feat: embedding router

This commit is contained in:
Ran
2026-01-26 05:10:53 +07:00
parent c332ce7749
commit 3ff0e2c4dd
22 changed files with 2572 additions and 392 deletions
+142
View File
@@ -0,0 +1,142 @@
package embeddings
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"strings"
"time"
)
const (
DefaultDashScopeBaseURL = "https://dashscope.aliyuncs.com"
DashScopeEmbeddingPath = "/api/v1/services/embeddings/multimodal-embedding/multimodal-embedding"
)
type DashScopeEmbedder struct {
apiKey string
baseURL string
model string
http *http.Client
}
type DashScopeUsage struct {
InputTokens int `json:"input_tokens"`
ImageTokens int `json:"image_tokens"`
ImageCount int `json:"image_count,omitempty"`
Duration int `json:"duration,omitempty"`
}
type dashScopeRequest struct {
Model string `json:"model"`
Input dashScopeRequestInput `json:"input"`
}
type dashScopeRequestInput struct {
Contents []map[string]string `json:"contents"`
}
type dashScopeResponse struct {
Output struct {
Embeddings []struct {
Index int `json:"index"`
Embedding []float32 `json:"embedding"`
Type string `json:"type"`
} `json:"embeddings"`
} `json:"output"`
Usage DashScopeUsage `json:"usage"`
RequestID string `json:"request_id"`
Code string `json:"code"`
Message string `json:"message"`
}
func NewDashScopeEmbedder(apiKey, baseURL, model string, timeout time.Duration) *DashScopeEmbedder {
if baseURL == "" {
baseURL = DefaultDashScopeBaseURL
}
if timeout <= 0 {
timeout = 10 * time.Second
}
return &DashScopeEmbedder{
apiKey: apiKey,
baseURL: strings.TrimRight(baseURL, "/"),
model: model,
http: &http.Client{
Timeout: timeout,
},
}
}
func (e *DashScopeEmbedder) Embed(ctx context.Context, text string, imageURL string, videoURL string) ([]float32, DashScopeUsage, error) {
contents := make([]map[string]string, 0, 3)
if strings.TrimSpace(text) != "" {
contents = append(contents, map[string]string{"text": text})
}
if strings.TrimSpace(imageURL) != "" {
contents = append(contents, map[string]string{"image": imageURL})
}
if strings.TrimSpace(videoURL) != "" {
contents = append(contents, map[string]string{"video": videoURL})
}
if len(contents) == 0 {
return nil, DashScopeUsage{}, fmt.Errorf("dashscope input is required")
}
payload, err := json.Marshal(dashScopeRequest{
Model: e.model,
Input: dashScopeRequestInput{Contents: contents},
})
if err != nil {
return nil, DashScopeUsage{}, err
}
req, err := http.NewRequestWithContext(ctx, http.MethodPost, e.baseURL+DashScopeEmbeddingPath, bytes.NewReader(payload))
if err != nil {
return nil, DashScopeUsage{}, err
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", "Bearer "+e.apiKey)
resp, err := e.http.Do(req)
if err != nil {
return nil, DashScopeUsage{}, err
}
defer resp.Body.Close()
body, _ := io.ReadAll(resp.Body)
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
return nil, DashScopeUsage{}, fmt.Errorf("dashscope embeddings error: %s", strings.TrimSpace(string(body)))
}
var parsed dashScopeResponse
if err := json.Unmarshal(body, &parsed); err != nil {
return nil, DashScopeUsage{}, err
}
if parsed.Code != "" {
return nil, parsed.Usage, fmt.Errorf("dashscope embeddings error: %s", parsed.Message)
}
if len(parsed.Output.Embeddings) == 0 {
return nil, parsed.Usage, fmt.Errorf("dashscope embeddings empty response")
}
preferredType := ""
if strings.TrimSpace(text) != "" {
preferredType = "text"
} else if strings.TrimSpace(imageURL) != "" {
preferredType = "image"
} else if strings.TrimSpace(videoURL) != "" {
preferredType = "video"
}
if preferredType != "" {
for _, item := range parsed.Output.Embeddings {
if strings.EqualFold(item.Type, preferredType) && len(item.Embedding) > 0 {
return item.Embedding, parsed.Usage, nil
}
}
}
return parsed.Output.Embeddings[0].Embedding, parsed.Usage, nil
}
+102
View File
@@ -0,0 +1,102 @@
package embeddings
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"strings"
"time"
)
type Embedder interface {
Embed(ctx context.Context, input string) ([]float32, error)
Dimensions() int
}
type OpenAIEmbedder struct {
apiKey string
baseURL string
model string
dims int
http *http.Client
}
type openAIEmbeddingRequest struct {
Input string `json:"input"`
Model string `json:"model"`
}
type openAIEmbeddingResponse struct {
Data []struct {
Embedding []float32 `json:"embedding"`
} `json:"data"`
}
func NewOpenAIEmbedder(apiKey, baseURL, model string, dims int, timeout time.Duration) *OpenAIEmbedder {
if baseURL == "" {
baseURL = "https://api.openai.com"
}
if model == "" {
model = "text-embedding-3-small"
}
if dims <= 0 {
dims = 1536
}
if timeout <= 0 {
timeout = 10 * time.Second
}
return &OpenAIEmbedder{
apiKey: apiKey,
baseURL: strings.TrimRight(baseURL, "/"),
model: model,
dims: dims,
http: &http.Client{
Timeout: timeout,
},
}
}
func (e *OpenAIEmbedder) Dimensions() int {
return e.dims
}
func (e *OpenAIEmbedder) Embed(ctx context.Context, input string) ([]float32, error) {
payload, err := json.Marshal(openAIEmbeddingRequest{
Input: input,
Model: e.model,
})
if err != nil {
return nil, err
}
req, err := http.NewRequestWithContext(ctx, http.MethodPost, e.baseURL+"/v1/embeddings", bytes.NewReader(payload))
if err != nil {
return nil, err
}
req.Header.Set("Content-Type", "application/json")
if e.apiKey != "" {
req.Header.Set("Authorization", "Bearer "+e.apiKey)
}
resp, err := e.http.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
body, _ := io.ReadAll(resp.Body)
return nil, fmt.Errorf("openai embeddings error: %s", strings.TrimSpace(string(body)))
}
var parsed openAIEmbeddingResponse
if err := json.NewDecoder(resp.Body).Decode(&parsed); err != nil {
return nil, err
}
if len(parsed.Data) == 0 {
return nil, fmt.Errorf("openai embeddings empty response")
}
return parsed.Data[0].Embedding, nil
}
+228
View File
@@ -0,0 +1,228 @@
package embeddings
import (
"context"
"errors"
"strings"
"time"
"github.com/google/uuid"
"github.com/jackc/pgx/v5/pgtype"
"github.com/memohai/memoh/internal/db/sqlc"
"github.com/memohai/memoh/internal/models"
)
const (
TypeText = "text"
TypeMultimodal = "multimodal"
ProviderOpenAI = "openai"
ProviderBedrock = "bedrock"
ProviderDashScope = "dashscope"
)
type Request struct {
Type string
Provider string
Model string
Dimensions int
Input Input
}
type Input struct {
Text string
ImageURL string
VideoURL string
}
type Usage struct {
InputTokens int
ImageTokens int
VideoTokens int
}
type Result struct {
Type string
Provider string
Model string
Dimensions int
Embedding []float32
Usage Usage
}
type Resolver struct {
modelsService *models.Service
queries *sqlc.Queries
timeout time.Duration
}
func NewResolver(modelsService *models.Service, queries *sqlc.Queries, timeout time.Duration) *Resolver {
return &Resolver{
modelsService: modelsService,
queries: queries,
timeout: timeout,
}
}
func (r *Resolver) Embed(ctx context.Context, req Request) (Result, error) {
req.Type = strings.ToLower(strings.TrimSpace(req.Type))
req.Provider = strings.ToLower(strings.TrimSpace(req.Provider))
req.Model = strings.TrimSpace(req.Model)
req.Input.Text = strings.TrimSpace(req.Input.Text)
req.Input.ImageURL = strings.TrimSpace(req.Input.ImageURL)
req.Input.VideoURL = strings.TrimSpace(req.Input.VideoURL)
if req.Type == "" {
return Result{}, errors.New("type is required")
}
switch req.Type {
case TypeText:
if req.Provider != "" && req.Provider != ProviderOpenAI {
return Result{}, errors.New("invalid provider for text embeddings")
}
if req.Input.Text == "" {
return Result{}, errors.New("text input is required")
}
case TypeMultimodal:
if req.Provider != "" && req.Provider != ProviderBedrock && req.Provider != ProviderDashScope {
return Result{}, errors.New("invalid provider for multimodal embeddings")
}
if req.Input.Text == "" && req.Input.ImageURL == "" && req.Input.VideoURL == "" {
return Result{}, errors.New("multimodal input is required")
}
default:
return Result{}, errors.New("invalid embeddings type")
}
selected, err := r.selectEmbeddingModel(ctx, req)
if err != nil {
return Result{}, err
}
provider, err := r.fetchProvider(ctx, selected.LlmProviderID)
if err != nil {
return Result{}, err
}
req.Model = selected.ModelID
req.Dimensions = selected.Dimensions
req.Provider = strings.ToLower(strings.TrimSpace(provider.ClientType))
if req.Model == "" {
return Result{}, errors.New("embedding model id not configured")
}
if req.Dimensions <= 0 {
return Result{}, errors.New("embedding model dimensions not configured")
}
timeout := r.timeout
if timeout <= 0 {
timeout = 10 * time.Second
}
switch req.Type {
case TypeText:
if req.Provider != ProviderOpenAI {
return Result{}, errors.New("provider not implemented")
}
if strings.TrimSpace(provider.ApiKey) == "" {
return Result{}, errors.New("openai api key is required")
}
embedder := NewOpenAIEmbedder(provider.ApiKey, provider.BaseUrl, req.Model, req.Dimensions, timeout)
vector, err := embedder.Embed(ctx, req.Input.Text)
if err != nil {
return Result{}, err
}
return Result{
Type: req.Type,
Provider: req.Provider,
Model: req.Model,
Dimensions: req.Dimensions,
Embedding: vector,
}, nil
case TypeMultimodal:
if req.Provider == ProviderDashScope {
if strings.TrimSpace(provider.ApiKey) == "" {
return Result{}, errors.New("dashscope api key is required")
}
dashscope := NewDashScopeEmbedder(provider.ApiKey, provider.BaseUrl, req.Model, timeout)
vector, usage, err := dashscope.Embed(ctx, req.Input.Text, req.Input.ImageURL, req.Input.VideoURL)
if err != nil {
return Result{}, err
}
return Result{
Type: req.Type,
Provider: req.Provider,
Model: req.Model,
Dimensions: req.Dimensions,
Embedding: vector,
Usage: Usage{
InputTokens: usage.InputTokens,
ImageTokens: usage.ImageTokens,
VideoTokens: usage.Duration,
},
}, nil
}
return Result{}, errors.New("provider not implemented")
default:
return Result{}, errors.New("invalid embeddings type")
}
}
func (r *Resolver) selectEmbeddingModel(ctx context.Context, req Request) (models.GetResponse, error) {
if r.modelsService == nil {
return models.GetResponse{}, errors.New("models service not configured")
}
var candidates []models.GetResponse
var err error
if req.Provider != "" {
candidates, err = r.modelsService.ListByClientType(ctx, models.ClientType(req.Provider))
} else {
candidates, err = r.modelsService.ListByType(ctx, models.ModelTypeEmbedding)
}
if err != nil {
return models.GetResponse{}, err
}
filtered := make([]models.GetResponse, 0, len(candidates))
for _, model := range candidates {
if model.Type != models.ModelTypeEmbedding {
continue
}
if req.Type == TypeMultimodal && !model.IsMultimodal {
continue
}
if req.Type == TypeText && model.IsMultimodal {
continue
}
filtered = append(filtered, model)
}
if len(filtered) == 0 {
return models.GetResponse{}, errors.New("no embedding models available")
}
if req.Model != "" {
for _, model := range filtered {
if model.ModelID == req.Model {
return model, nil
}
}
return models.GetResponse{}, errors.New("embedding model not found")
}
return filtered[0], nil
}
func (r *Resolver) fetchProvider(ctx context.Context, providerID string) (sqlc.LlmProvider, error) {
if r.queries == nil {
return sqlc.LlmProvider{}, errors.New("llm provider queries not configured")
}
if strings.TrimSpace(providerID) == "" {
return sqlc.LlmProvider{}, errors.New("llm provider id missing")
}
parsed, err := uuid.Parse(providerID)
if err != nil {
return sqlc.LlmProvider{}, err
}
pgID := pgtype.UUID{Valid: true}
copy(pgID.Bytes[:], parsed[:])
return r.queries.GetLlmProviderByID(ctx, pgID)
}