From 8d5c38f0e56e8eee4a915cb49d3c336497f509c2 Mon Sep 17 00:00:00 2001 From: Acbox Liu Date: Wed, 8 Apr 2026 01:03:44 +0800 Subject: [PATCH] refactor: unify providers and models tables (#338) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * refactor: unify providers and models tables - Rename `llm_providers` → `providers`, `llm_provider_oauth_tokens` → `provider_oauth_tokens` - Remove `tts_providers` and `tts_models` tables; speech models now live in the unified `models` table with `type = 'speech'` - Replace top-level `api_key`/`base_url` columns with a JSONB `config` field on `providers` - Rename `llm_provider_id` → `provider_id` across all references - Add `edge-speech` client type and `conf/providers/edge.yaml` default provider - Create new read-only speech endpoints (`/speech-providers`, `/speech-models`) backed by filtered views of the unified tables - Remove old TTS CRUD handlers; simplify speech page to read-only + test - Update registry loader to skip malformed YAML files instead of failing entirely - Fix YAML quoting for model names containing colons in openrouter.yaml - Regenerate sqlc, swagger, and TypeScript SDK * fix: exclude speech providers from providers list endpoint ListProviders now filters out client_type matching '%-speech' so Edge and future speech providers no longer appear on the Providers page. ListSpeechProviders uses the same pattern match instead of hard-coding 'edge-speech'. * fix: use explicit client_type list instead of LIKE pattern Replace '%-speech' pattern with explicit IN ('edge-speech') for both ListProviders (exclusion) and ListSpeechProviders (inclusion). New speech client types must be added to both queries. * fix: use EXECUTE for dynamic SQL in migrations referencing old schema PL/pgSQL pre-validates column/table references in static SQL statements inside DO blocks before evaluating IF/RETURN guards. This caused migrations 0010-0061 to fail on fresh databases where the canonical schema uses `providers`/`provider_id` instead of `llm_providers`/ `llm_provider_id`. Wrap all SQL that references potentially non-existent old schema objects (llm_providers, llm_provider_id, tts_providers, tts_models, etc.) in EXECUTE strings so they are only parsed at runtime when actually reached. * fix: revert canonical schema to use llm_providers for migration compatibility The CI migrations workflow (up → down → up) failed because 0061 down renames `providers` back to `llm_providers`, but 0001 down only dropped `providers` — leaving `llm_providers` as a remnant. On the second migrate up, 0010 found the stale `llm_providers` and tried to reference `models.llm_provider_id` which no longer existed. Revert 0001 canonical schema to use original names (llm_providers, tts_providers, tts_models) so incremental migrations work naturally and 0061 handles the final rename. Remove EXECUTE wrappers and unnecessary guards from migrations that now always operate on llm_providers. * fix: icons * fix: sync canonical schema with 0061 migration to fix sqlc column mismatch 0001_init.up.sql still used old names (llm_providers, llm_provider_id) and included dropped tts_providers/tts_models tables. sqlc could not parse the PL/pgSQL EXECUTE in migration 0061, so generated code retained stale columns (input_modalities, supports_reasoning) causing runtime "column does not exist" errors when adding models. - Update 0001_init.up.sql to current schema (providers, provider_id, no tts tables, add provider_oauth_tokens) - Use ALTER TABLE IF EXISTS in 0010/0041/0042 for backward compat - Regenerate sqlc * fix: guard all legacy migrations against fresh schema for CI compat On fresh databases, 0001_init.up.sql creates providers/provider_id (not llm_providers/llm_provider_id). Migrations 0013, 0041, 0046, 0047 referenced the old names without guards, causing CI migration failures. - 0013: check llm_provider_id column exists before adding old constraint - 0041: check llm_providers table exists before backfill/constraint DDL - 0046: wrap CREATE TABLE in DO block with llm_providers existence check - 0047: use ALTER TABLE IF EXISTS + DO block guard --- .../web/src/components/add-provider/index.vue | 7 +- .../web/src/components/create-model/index.vue | 2 +- apps/web/src/constants/client-types.ts | 5 + .../pages/bots/components/bot-settings.vue | 19 +- .../pages/bots/components/model-options.vue | 2 +- .../bots/components/tts-model-select.vue | 8 +- .../pages/providers/components/model-list.vue | 2 +- .../providers/components/provider-form.vue | 19 +- .../pages/speech/components/add-tts-model.vue | 130 --- .../speech/components/add-tts-provider.vue | 142 --- .../speech/components/model-config-editor.vue | 2 +- .../speech/components/provider-setting.vue | 336 ++----- apps/web/src/pages/speech/index.vue | 34 +- cmd/agent/main.go | 21 +- cmd/memoh/serve.go | 21 +- conf/providers/edge.yaml | 8 + conf/providers/openrouter.yaml | 666 ++++++------ db/migrations/0001_init.up.sql | 59 +- .../0010_client_type_to_model.up.sql | 6 +- .../0013_model_id_unique_per_provider.up.sql | 6 +- .../0041_provider_model_refactor.up.sql | 17 +- db/migrations/0042_provider_enable.up.sql | 2 +- db/migrations/0046_llm_provider_oauth.up.sql | 38 +- .../0047_add_openai_codex_client_type.up.sql | 23 +- db/migrations/0061_unify_providers.down.sql | 79 ++ db/migrations/0061_unify_providers.up.sql | 94 ++ db/queries/llm_provider_oauth.sql | 52 - db/queries/models.sql | 102 +- db/queries/provider_oauth.sql | 52 + db/queries/settings.sql | 4 +- db/queries/token_usage.sql | 2 +- db/queries/tts_models.sql | 50 - db/queries/tts_providers.sql | 38 - devenv/docker-compose.yml | 28 +- internal/agent/tools/image_gen.go | 4 +- internal/agent/tools/subagent.go | 4 +- internal/command/model.go | 2 +- internal/command/settings.go | 2 +- internal/conversation/flow/resolver.go | 12 +- .../conversation/flow/resolver_compaction.go | 4 +- .../flow/resolver_model_selection.go | 28 +- internal/conversation/flow/resolver_title.go | 4 +- internal/db/sqlc/llm_provider_oauth.sql.go | 163 --- internal/db/sqlc/models.go | 71 +- internal/db/sqlc/models.sql.go | 700 ++++++++----- internal/db/sqlc/provider_oauth.sql.go | 163 +++ internal/db/sqlc/settings.sql.go | 4 +- internal/db/sqlc/token_usage.sql.go | 2 +- internal/db/sqlc/tts_models.sql.go | 248 ----- internal/db/sqlc/tts_providers.sql.go | 205 ---- internal/handlers/providers.go | 11 +- internal/handlers/tts_providers.go | 306 +----- .../memory/adapters/builtin/dense_runtime.go | 14 +- internal/models/config.go | 16 + internal/models/models.go | 91 +- internal/models/models_test.go | 58 +- internal/models/probe.go | 12 +- internal/models/types.go | 22 +- internal/providers/credentials.go | 7 +- internal/providers/oauth.go | 30 +- internal/providers/service.go | 127 ++- internal/providers/service_test.go | 40 +- internal/providers/types.go | 13 +- internal/registry/registry.go | 33 +- internal/registry/types.go | 18 +- internal/tts/service.go | 476 +++------ internal/tts/types.go | 68 +- packages/icons/src/icons/Brave.vue | 18 +- packages/icons/src/icons/Duckduckgo.vue | 26 +- packages/icons/src/icons/Sogou.vue | 10 +- packages/icons/src/icons/Yandex.vue | 10 +- packages/sdk/src/@pinia/colada.gen.ts | 352 ++----- packages/sdk/src/index.ts | 4 +- packages/sdk/src/sdk.gen.ts | 167 +--- packages/sdk/src/types.gen.ts | 696 ++++--------- spec/docs.go | 946 +++++------------- spec/swagger.json | 946 +++++------------- spec/swagger.yaml | 622 ++++-------- 78 files changed, 3163 insertions(+), 5668 deletions(-) delete mode 100644 apps/web/src/pages/speech/components/add-tts-model.vue delete mode 100644 apps/web/src/pages/speech/components/add-tts-provider.vue create mode 100644 conf/providers/edge.yaml create mode 100644 db/migrations/0061_unify_providers.down.sql create mode 100644 db/migrations/0061_unify_providers.up.sql delete mode 100644 db/queries/llm_provider_oauth.sql create mode 100644 db/queries/provider_oauth.sql delete mode 100644 db/queries/tts_models.sql delete mode 100644 db/queries/tts_providers.sql delete mode 100644 internal/db/sqlc/llm_provider_oauth.sql.go create mode 100644 internal/db/sqlc/provider_oauth.sql.go delete mode 100644 internal/db/sqlc/tts_models.sql.go delete mode 100644 internal/db/sqlc/tts_providers.sql.go create mode 100644 internal/models/config.go diff --git a/apps/web/src/components/add-provider/index.vue b/apps/web/src/components/add-provider/index.vue index 5b1e0679..b3d47bc8 100644 --- a/apps/web/src/components/add-provider/index.vue +++ b/apps/web/src/components/add-provider/index.vue @@ -186,8 +186,13 @@ const clientTypeOptions = computed(() => const queryCache = useQueryCache() const { mutateAsync: createProviderMutation, isLoading } = useMutation({ mutation: async (data: Record) => { + const config: Record = {} + if (data.base_url) config.base_url = data.base_url + if (data.api_key) config.api_key = data.api_key const payload = { - ...data, + name: data.name, + client_type: data.client_type, + config, metadata: { additionalProp1: {} }, } const { data: result } = await postProviders({ body: payload as ProvidersCreateRequest, throwOnError: true }) diff --git a/apps/web/src/components/create-model/index.vue b/apps/web/src/components/create-model/index.vue index 28ffefef..dd82e571 100644 --- a/apps/web/src/components/create-model/index.vue +++ b/apps/web/src/components/create-model/index.vue @@ -297,7 +297,7 @@ async function addModel() { const payload: Record = { type, model_id, - llm_provider_id: id, + provider_id: id, config, } diff --git a/apps/web/src/constants/client-types.ts b/apps/web/src/constants/client-types.ts index 318a8d1c..57111650 100644 --- a/apps/web/src/constants/client-types.ts +++ b/apps/web/src/constants/client-types.ts @@ -30,6 +30,11 @@ export const CLIENT_TYPE_META: Record = { label: 'Google Generative AI', hint: 'Gemini API', }, + 'edge-speech': { + value: 'edge-speech', + label: 'Edge Speech', + hint: 'Microsoft Edge Read Aloud TTS', + }, } export const CLIENT_TYPE_LIST: ClientTypeMeta[] = Object.values(CLIENT_TYPE_META) diff --git a/apps/web/src/pages/bots/components/bot-settings.vue b/apps/web/src/pages/bots/components/bot-settings.vue index 96ab75a1..5132249e 100644 --- a/apps/web/src/pages/bots/components/bot-settings.vue +++ b/apps/web/src/pages/bots/components/bot-settings.vue @@ -330,7 +330,7 @@ import MemoryProviderSelect from './memory-provider-select.vue' import TtsModelSelect from './tts-model-select.vue' import BrowserContextSelect from './browser-context-select.vue' import { useQuery, useMutation, useQueryCache } from '@pinia/colada' -import { getBotsByBotIdSettings, putBotsByBotIdSettings, deleteBotsById, getModels, getProviders, getSearchProviders, getMemoryProviders, getTtsProviders, getBrowserContexts, getBotsByBotIdMemoryStatus, postBotsByBotIdMemoryRebuild } from '@memohai/sdk' +import { getBotsByBotIdSettings, putBotsByBotIdSettings, deleteBotsById, getModels, getProviders, getSearchProviders, getMemoryProviders, getSpeechProviders, getSpeechModels, getBrowserContexts, getBotsByBotIdMemoryStatus, postBotsByBotIdMemoryRebuild } from '@memohai/sdk' import type { SettingsSettings } from '@memohai/sdk' import type { Ref } from 'vue' import { resolveApiErrorMessage } from '@/utils/api-error' @@ -389,23 +389,18 @@ const { data: memoryProviderData } = useQuery({ }) const { data: ttsProviderData } = useQuery({ - key: ['tts-providers'], + key: ['speech-providers'], query: async () => { - const { data } = await getTtsProviders({ throwOnError: true }) + const { data } = await getSpeechProviders({ throwOnError: true }) return data }, }) const { data: ttsModelData } = useQuery({ - key: ['tts-models'], + key: ['speech-models'], query: async () => { - const apiBase = import.meta.env.VITE_API_URL?.trim() || '/api' - const token = localStorage.getItem('token') - const resp = await fetch(`${apiBase}/tts-models`, { - headers: token ? { Authorization: `Bearer ${token}` } : {}, - }) - if (!resp.ok) throw new Error('Failed to fetch TTS models') - return resp.json() + const { data } = await getSpeechModels({ throwOnError: true }) + return data }, }) @@ -448,7 +443,7 @@ const searchProviders = computed(() => (searchProviderData.value ?? []).filter(( const memoryProviders = computed(() => memoryProviderData.value ?? []) const ttsProviders = computed(() => (ttsProviderData.value ?? []).filter((p) => p.enable !== false)) const enabledTtsProviderIds = computed(() => new Set(ttsProviders.value.map((p) => p.id))) -const ttsModels = computed(() => (ttsModelData.value ?? []).filter((m: Record) => enabledTtsProviderIds.value.has(m.tts_provider_id as string))) +const ttsModels = computed(() => (ttsModelData.value ?? []).filter((m: Record) => enabledTtsProviderIds.value.has(m.provider_id as string))) const browserContexts = computed(() => browserContextData.value ?? []) // ---- Form ---- diff --git a/apps/web/src/pages/bots/components/model-options.vue b/apps/web/src/pages/bots/components/model-options.vue index 3c3b2609..71f00d0c 100644 --- a/apps/web/src/pages/bots/components/model-options.vue +++ b/apps/web/src/pages/bots/components/model-options.vue @@ -122,7 +122,7 @@ const typeFilteredModels = computed(() => const options = computed(() => typeFilteredModels.value.map((model) => { - const providerId = model.llm_provider_id ?? '' + const providerId = model.provider_id ?? '' const config = model.config as { compatibilities?: string[]; context_window?: number } | undefined return { value: model.id || model.model_id || '', diff --git a/apps/web/src/pages/bots/components/tts-model-select.vue b/apps/web/src/pages/bots/components/tts-model-select.vue index 27a3f7a8..68e0870b 100644 --- a/apps/web/src/pages/bots/components/tts-model-select.vue +++ b/apps/web/src/pages/bots/components/tts-model-select.vue @@ -59,14 +59,14 @@ export interface TtsModelOption { id: string model_id: string name: string - tts_provider_id: string + provider_id: string provider_type?: string } export interface TtsProviderOption { id: string name: string - provider: string + client_type: string } const props = defineProps<{ @@ -96,8 +96,8 @@ const options = computed(() => { value: model.id || '', label: model.name || model.model_id || '', description: model.model_id, - group: model.tts_provider_id, - groupLabel: providerMap.value.get(model.tts_provider_id) ?? model.tts_provider_id, + group: model.provider_id, + groupLabel: providerMap.value.get(model.provider_id) ?? model.provider_id, keywords: [model.name ?? '', model.model_id ?? '', model.provider_type ?? ''], })) return [noneOption, ...modelOptions] diff --git a/apps/web/src/pages/providers/components/model-list.vue b/apps/web/src/pages/providers/components/model-list.vue index 9194b691..e30c10d4 100644 --- a/apps/web/src/pages/providers/components/model-list.vue +++ b/apps/web/src/pages/providers/components/model-list.vue @@ -32,7 +32,7 @@
@@ -317,7 +317,7 @@ const providerSchema = toTypedSchema(z.object({ additionalProp1: z.object({}), }), }).superRefine((value, ctx) => { - if (value.client_type !== 'openai-codex' && !value.api_key?.trim() && !providerWithAuth.value?.api_key) { + if (value.client_type !== 'openai-codex' && !value.api_key?.trim() && !(providerWithAuth.value?.config as Record | undefined)?.api_key) { ctx.addIssue({ code: z.ZodIssueCode.custom, path: ['api_key'], @@ -332,10 +332,11 @@ const form = useForm({ watch(() => props.provider, (newVal) => { if (newVal) { + const cfg = newVal.config as Record | undefined form.setValues({ enable: newVal.enable ?? true, name: newVal.name, - base_url: newVal.base_url, + base_url: (cfg?.base_url as string) ?? '', api_key: '', client_type: newVal.client_type || 'openai-completions', }) @@ -362,6 +363,7 @@ watch(() => [props.provider?.id, form.values.client_type] as const, async ([id, const hasChanges = computed(() => { const raw = props.provider + const cfg = raw?.config as Record | undefined const baseChanged = JSON.stringify({ enable: form.values.enable, name: form.values.name, @@ -371,7 +373,7 @@ const hasChanges = computed(() => { }) !== JSON.stringify({ enable: raw?.enable ?? true, name: raw?.name, - base_url: raw?.base_url, + base_url: (cfg?.base_url as string) ?? '', client_type: raw?.client_type || 'openai-completions', metadata: { additionalProp1: {} }, }) @@ -381,16 +383,17 @@ const hasChanges = computed(() => { }) const editProvider = form.handleSubmit(async (value) => { + const config: Record = { base_url: value.base_url } + if (value.api_key && value.api_key.trim() !== '') { + config.api_key = value.api_key + } const payload: Record = { enable: value.enable, name: value.name, - base_url: value.base_url, + config, client_type: value.client_type, metadata: value.metadata, } - if (value.api_key && value.api_key.trim() !== '') { - payload.api_key = value.api_key - } emit('submit', payload) }) diff --git a/apps/web/src/pages/speech/components/add-tts-model.vue b/apps/web/src/pages/speech/components/add-tts-model.vue deleted file mode 100644 index 70f91132..00000000 --- a/apps/web/src/pages/speech/components/add-tts-model.vue +++ /dev/null @@ -1,130 +0,0 @@ - - - diff --git a/apps/web/src/pages/speech/components/add-tts-provider.vue b/apps/web/src/pages/speech/components/add-tts-provider.vue deleted file mode 100644 index a501525e..00000000 --- a/apps/web/src/pages/speech/components/add-tts-provider.vue +++ /dev/null @@ -1,142 +0,0 @@ - - - diff --git a/apps/web/src/pages/speech/components/model-config-editor.vue b/apps/web/src/pages/speech/components/model-config-editor.vue index 46bc7190..af9ccb81 100644 --- a/apps/web/src/pages/speech/components/model-config-editor.vue +++ b/apps/web/src/pages/speech/components/model-config-editor.vue @@ -371,7 +371,7 @@ async function handleTest() { try { const apiBase = import.meta.env.VITE_API_URL?.trim() || '/api' const token = localStorage.getItem('token') - const resp = await fetch(`${apiBase}/tts-models/${props.modelId}/test`, { + const resp = await fetch(`${apiBase}/speech-models/${props.modelId}/test`, { method: 'POST', headers: { 'Content-Type': 'application/json', diff --git a/apps/web/src/pages/speech/components/provider-setting.vue b/apps/web/src/pages/speech/components/provider-setting.vue index 8a76b723..4e9b1f52 100644 --- a/apps/web/src/pages/speech/components/provider-setting.vue +++ b/apps/web/src/pages/speech/components/provider-setting.vue @@ -9,7 +9,7 @@ {{ curProvider?.name }}

- {{ currentMeta?.display_name ?? curProvider?.provider }} + {{ currentMeta?.display_name ?? curProvider?.client_type }}

@@ -25,179 +25,91 @@
-
-
-
- - - - - - - - -
- - - - -
-
-

- {{ $t('speech.models') }} -

-
- - - {{ $t('speech.importModels') }} - - -
-
- -
- {{ $t('speech.noModels') }} -
- -
- - -
- -
-
-
+ +
+
+

+ {{ $t('speech.models') }} +

-
- + {{ $t('speech.noModels') }} +
+ +
+
+ + + +
- {{ $t('provider.saveChanges') }} - - - + +
+ + - - diff --git a/cmd/agent/main.go b/cmd/agent/main.go index fb802138..22c80377 100644 --- a/cmd/agent/main.go +++ b/cmd/agent/main.go @@ -251,7 +251,7 @@ func runServe() { provideServerHandler(weixin.NewQRServerHandler), provideServerHandler(provideUsersHandler), provideServerHandler(handlers.NewMemoryProvidersHandler), - provideServerHandler(handlers.NewTtsProvidersHandler), + provideServerHandler(handlers.NewSpeechHandler), provideServerHandler(handlers.NewBotTtsHandler), provideServerHandler(handlers.NewEmailProvidersHandler), provideServerHandler(handlers.NewEmailBindingsHandler), @@ -274,7 +274,7 @@ func runServe() { startRegistrySync, startMemoryProviderBootstrap, startSearchProviderBootstrap, - startTtsProviderBootstrap, + startScheduleService, startHeartbeatService, startChannelManager, @@ -878,7 +878,7 @@ func provideServer(params serverParams) *server.Server { func startRegistrySync(lc fx.Lifecycle, log *slog.Logger, cfg config.Config, queries *dbsqlc.Queries) { lc.Append(fx.Hook{ OnStart: func(ctx context.Context) error { - defs, err := registry.Load(cfg.Registry.ProvidersPath()) + defs, err := registry.Load(log, cfg.Registry.ProvidersPath()) if err != nil { log.Warn("registry: failed to load provider definitions", slog.Any("error", err)) return nil @@ -910,17 +910,6 @@ func startMemoryProviderBootstrap(lc fx.Lifecycle, log *slog.Logger, mpService * }) } -func startTtsProviderBootstrap(lc fx.Lifecycle, log *slog.Logger, ttsService *ttspkg.Service) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - if err := ttsService.EnsureDefaults(ctx); err != nil { - log.Warn("failed to ensure default tts providers", slog.Any("error", err)) - } - return nil - }, - }) -} - func startSearchProviderBootstrap(lc fx.Lifecycle, log *slog.Logger, spService *searchproviders.Service) { lc.Append(fx.Hook{ OnStart: func(ctx context.Context) error { @@ -1128,8 +1117,8 @@ func (c *lazyLLMClient) resolve(ctx context.Context) (memprovider.LLM, error) { } return memllm.New(memllm.Config{ ModelID: memoryModel.ModelID, - BaseURL: strings.TrimRight(memoryProvider.BaseUrl, "/"), - APIKey: memoryProvider.ApiKey, + BaseURL: strings.TrimRight(providers.ProviderConfigString(memoryProvider, "base_url"), "/"), + APIKey: providers.ProviderConfigString(memoryProvider, "api_key"), ClientType: memoryProvider.ClientType, Timeout: c.timeout, }), nil diff --git a/cmd/memoh/serve.go b/cmd/memoh/serve.go index 091e5628..4704f3c7 100644 --- a/cmd/memoh/serve.go +++ b/cmd/memoh/serve.go @@ -175,7 +175,7 @@ func runServe() { provideServerHandler(weixin.NewQRServerHandler), provideServerHandler(provideUsersHandler), provideServerHandler(handlers.NewMemoryProvidersHandler), - provideServerHandler(handlers.NewTtsProvidersHandler), + provideServerHandler(handlers.NewSpeechHandler), provideServerHandler(handlers.NewBotTtsHandler), provideServerHandler(handlers.NewEmailProvidersHandler), provideServerHandler(handlers.NewEmailBindingsHandler), @@ -198,7 +198,7 @@ func runServe() { startRegistrySync, startMemoryProviderBootstrap, startSearchProviderBootstrap, - startTtsProviderBootstrap, + startScheduleService, startHeartbeatService, startChannelManager, @@ -291,7 +291,7 @@ func provideMemoryProviderRegistry(log *slog.Logger, llm memprovider.LLM, chatSe func startRegistrySync(lc fx.Lifecycle, log *slog.Logger, cfg config.Config, queries *dbsqlc.Queries) { lc.Append(fx.Hook{ OnStart: func(ctx context.Context) error { - defs, err := registry.Load(cfg.Registry.ProvidersPath()) + defs, err := registry.Load(log, cfg.Registry.ProvidersPath()) if err != nil { log.Warn("registry: failed to load provider definitions", slog.Any("error", err)) return nil @@ -323,17 +323,6 @@ func startMemoryProviderBootstrap(lc fx.Lifecycle, log *slog.Logger, mpService * }) } -func startTtsProviderBootstrap(lc fx.Lifecycle, log *slog.Logger, ttsService *ttspkg.Service) { - lc.Append(fx.Hook{ - OnStart: func(ctx context.Context) error { - if err := ttsService.EnsureDefaults(ctx); err != nil { - log.Warn("failed to ensure default tts providers", slog.Any("error", err)) - } - return nil - }, - }) -} - func startSearchProviderBootstrap(lc fx.Lifecycle, log *slog.Logger, spService *searchproviders.Service) { lc.Append(fx.Hook{ OnStart: func(ctx context.Context) error { @@ -1050,8 +1039,8 @@ func (c *lazyLLMClient) resolve(ctx context.Context) (memprovider.LLM, error) { } return memllm.New(memllm.Config{ ModelID: memoryModel.ModelID, - BaseURL: strings.TrimRight(memoryProvider.BaseUrl, "/"), - APIKey: memoryProvider.ApiKey, + BaseURL: strings.TrimRight(providers.ProviderConfigString(memoryProvider, "base_url"), "/"), + APIKey: providers.ProviderConfigString(memoryProvider, "api_key"), ClientType: memoryProvider.ClientType, Timeout: c.timeout, }), nil diff --git a/conf/providers/edge.yaml b/conf/providers/edge.yaml new file mode 100644 index 00000000..7d2e267a --- /dev/null +++ b/conf/providers/edge.yaml @@ -0,0 +1,8 @@ +name: Edge +client_type: edge-speech +icon: edge + +models: + - model_id: edge-read-aloud + name: Edge Read Aloud + type: speech diff --git a/conf/providers/openrouter.yaml b/conf/providers/openrouter.yaml index d95a6ffb..224f80bb 100644 --- a/conf/providers/openrouter.yaml +++ b/conf/providers/openrouter.yaml @@ -11,36 +11,36 @@ models: context_window: 2000000 - model_id: ai21/jamba-large-1.7 - name: AI21: Jamba Large 1.7 + name: "AI21: Jamba Large 1.7" type: chat config: compatibilities: [tool-call] context_window: 256000 - model_id: aion-labs/aion-1.0 - name: AionLabs: Aion-1.0 + name: "AionLabs: Aion-1.0" type: chat config: compatibilities: [reasoning] context_window: 131072 - model_id: aion-labs/aion-1.0-mini - name: AionLabs: Aion-1.0-Mini + name: "AionLabs: Aion-1.0-Mini" type: chat config: compatibilities: [reasoning] context_window: 131072 - model_id: aion-labs/aion-2.0 - name: AionLabs: Aion-2.0 + name: "AionLabs: Aion-2.0" type: chat config: compatibilities: [reasoning] context_window: 131072 - model_id: aion-labs/aion-rp-llama-3.1-8b - name: AionLabs: Aion-RP 1.0 (8B) + name: "AionLabs: Aion-RP 1.0 (8B)" type: chat config: context_window: 32768 - model_id: alfredpros/codellama-7b-instruct-solidity - name: AlfredPros: CodeLLaMa 7B Instruct Solidity + name: "AlfredPros: CodeLLaMa 7B Instruct Solidity" type: chat config: context_window: 4096 @@ -51,24 +51,24 @@ models: compatibilities: [tool-call, reasoning] context_window: 131072 - model_id: allenai/olmo-2-0325-32b-instruct - name: AllenAI: Olmo 2 32B Instruct + name: "AllenAI: Olmo 2 32B Instruct" type: chat config: context_window: 128000 - model_id: allenai/olmo-3-32b-think - name: AllenAI: Olmo 3 32B Think + name: "AllenAI: Olmo 3 32B Think" type: chat config: compatibilities: [reasoning] context_window: 65536 - model_id: allenai/olmo-3.1-32b-instruct - name: AllenAI: Olmo 3.1 32B Instruct + name: "AllenAI: Olmo 3.1 32B Instruct" type: chat config: compatibilities: [tool-call] context_window: 65536 - model_id: allenai/olmo-3.1-32b-think - name: AllenAI: Olmo 3.1 32B Think + name: "AllenAI: Olmo 3.1 32B Think" type: chat config: compatibilities: [reasoning] @@ -79,31 +79,31 @@ models: config: context_window: 6144 - model_id: amazon/nova-2-lite-v1 - name: Amazon: Nova 2 Lite + name: "Amazon: Nova 2 Lite" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 1000000 - model_id: amazon/nova-lite-v1 - name: Amazon: Nova Lite 1.0 + name: "Amazon: Nova Lite 1.0" type: chat config: compatibilities: [vision, tool-call] context_window: 300000 - model_id: amazon/nova-micro-v1 - name: Amazon: Nova Micro 1.0 + name: "Amazon: Nova Micro 1.0" type: chat config: compatibilities: [tool-call] context_window: 128000 - model_id: amazon/nova-premier-v1 - name: Amazon: Nova Premier 1.0 + name: "Amazon: Nova Premier 1.0" type: chat config: compatibilities: [vision, tool-call] context_window: 1000000 - model_id: amazon/nova-pro-v1 - name: Amazon: Nova Pro 1.0 + name: "Amazon: Nova Pro 1.0" type: chat config: compatibilities: [vision, tool-call] @@ -114,457 +114,457 @@ models: config: context_window: 16384 - model_id: anthropic/claude-3-haiku - name: Anthropic: Claude 3 Haiku + name: "Anthropic: Claude 3 Haiku" type: chat config: compatibilities: [vision, tool-call] context_window: 200000 - model_id: anthropic/claude-3.5-haiku - name: Anthropic: Claude 3.5 Haiku + name: "Anthropic: Claude 3.5 Haiku" type: chat config: compatibilities: [vision, tool-call] context_window: 200000 - model_id: anthropic/claude-3.5-sonnet - name: Anthropic: Claude 3.5 Sonnet + name: "Anthropic: Claude 3.5 Sonnet" type: chat config: compatibilities: [vision, tool-call] context_window: 200000 - model_id: anthropic/claude-3.7-sonnet - name: Anthropic: Claude 3.7 Sonnet + name: "Anthropic: Claude 3.7 Sonnet" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 200000 - model_id: "anthropic/claude-3.7-sonnet:thinking" - name: Anthropic: Claude 3.7 Sonnet (thinking) + name: "Anthropic: Claude 3.7 Sonnet (thinking)" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 200000 - model_id: anthropic/claude-haiku-4.5 - name: Anthropic: Claude Haiku 4.5 + name: "Anthropic: Claude Haiku 4.5" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 200000 - model_id: anthropic/claude-opus-4 - name: Anthropic: Claude Opus 4 + name: "Anthropic: Claude Opus 4" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 200000 - model_id: anthropic/claude-opus-4.1 - name: Anthropic: Claude Opus 4.1 + name: "Anthropic: Claude Opus 4.1" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 200000 - model_id: anthropic/claude-opus-4.5 - name: Anthropic: Claude Opus 4.5 + name: "Anthropic: Claude Opus 4.5" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 200000 - model_id: anthropic/claude-opus-4.6 - name: Anthropic: Claude Opus 4.6 + name: "Anthropic: Claude Opus 4.6" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 1000000 - model_id: anthropic/claude-sonnet-4 - name: Anthropic: Claude Sonnet 4 + name: "Anthropic: Claude Sonnet 4" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 200000 - model_id: anthropic/claude-sonnet-4.5 - name: Anthropic: Claude Sonnet 4.5 + name: "Anthropic: Claude Sonnet 4.5" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 1000000 - model_id: anthropic/claude-sonnet-4.6 - name: Anthropic: Claude Sonnet 4.6 + name: "Anthropic: Claude Sonnet 4.6" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 1000000 - model_id: arcee-ai/coder-large - name: Arcee AI: Coder Large + name: "Arcee AI: Coder Large" type: chat config: context_window: 32768 - model_id: arcee-ai/maestro-reasoning - name: Arcee AI: Maestro Reasoning + name: "Arcee AI: Maestro Reasoning" type: chat config: context_window: 131072 - model_id: arcee-ai/spotlight - name: Arcee AI: Spotlight + name: "Arcee AI: Spotlight" type: chat config: compatibilities: [vision] context_window: 131072 - model_id: "arcee-ai/trinity-large-preview:free" - name: Arcee AI: Trinity Large Preview (free) + name: "Arcee AI: Trinity Large Preview (free)" type: chat config: compatibilities: [tool-call] context_window: 131000 - model_id: arcee-ai/trinity-large-thinking - name: Arcee AI: Trinity Large Thinking + name: "Arcee AI: Trinity Large Thinking" type: chat config: compatibilities: [tool-call, reasoning] context_window: 262144 - model_id: arcee-ai/trinity-mini - name: Arcee AI: Trinity Mini + name: "Arcee AI: Trinity Mini" type: chat config: compatibilities: [tool-call, reasoning] context_window: 131072 - model_id: "arcee-ai/trinity-mini:free" - name: Arcee AI: Trinity Mini (free) + name: "Arcee AI: Trinity Mini (free)" type: chat config: compatibilities: [tool-call, reasoning] context_window: 131072 - model_id: arcee-ai/virtuoso-large - name: Arcee AI: Virtuoso Large + name: "Arcee AI: Virtuoso Large" type: chat config: compatibilities: [tool-call] context_window: 131072 - model_id: baidu/ernie-4.5-21b-a3b - name: Baidu: ERNIE 4.5 21B A3B + name: "Baidu: ERNIE 4.5 21B A3B" type: chat config: compatibilities: [tool-call] context_window: 120000 - model_id: baidu/ernie-4.5-21b-a3b-thinking - name: Baidu: ERNIE 4.5 21B A3B Thinking + name: "Baidu: ERNIE 4.5 21B A3B Thinking" type: chat config: compatibilities: [reasoning] context_window: 131072 - model_id: baidu/ernie-4.5-300b-a47b - name: Baidu: ERNIE 4.5 300B A47B + name: "Baidu: ERNIE 4.5 300B A47B " type: chat config: context_window: 123000 - model_id: baidu/ernie-4.5-vl-28b-a3b - name: Baidu: ERNIE 4.5 VL 28B A3B + name: "Baidu: ERNIE 4.5 VL 28B A3B" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 30000 - model_id: baidu/ernie-4.5-vl-424b-a47b - name: Baidu: ERNIE 4.5 VL 424B A47B + name: "Baidu: ERNIE 4.5 VL 424B A47B " type: chat config: compatibilities: [vision, reasoning] context_window: 123000 - model_id: bytedance-seed/seed-1.6 - name: ByteDance Seed: Seed 1.6 + name: "ByteDance Seed: Seed 1.6" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 262144 - model_id: bytedance-seed/seed-1.6-flash - name: ByteDance Seed: Seed 1.6 Flash + name: "ByteDance Seed: Seed 1.6 Flash" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 262144 - model_id: bytedance-seed/seed-2.0-lite - name: ByteDance Seed: Seed-2.0-Lite + name: "ByteDance Seed: Seed-2.0-Lite" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 262144 - model_id: bytedance-seed/seed-2.0-mini - name: ByteDance Seed: Seed-2.0-Mini + name: "ByteDance Seed: Seed-2.0-Mini" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 262144 - model_id: bytedance/ui-tars-1.5-7b - name: ByteDance: UI-TARS 7B + name: "ByteDance: UI-TARS 7B " type: chat config: compatibilities: [vision] context_window: 128000 - model_id: "cognitivecomputations/dolphin-mistral-24b-venice-edition:free" - name: Venice: Uncensored (free) + name: "Venice: Uncensored (free)" type: chat config: context_window: 32768 - model_id: cohere/command-a - name: Cohere: Command A + name: "Cohere: Command A" type: chat config: context_window: 256000 - model_id: cohere/command-r-08-2024 - name: Cohere: Command R (08-2024) + name: "Cohere: Command R (08-2024)" type: chat config: compatibilities: [tool-call] context_window: 128000 - model_id: cohere/command-r-plus-08-2024 - name: Cohere: Command R+ (08-2024) + name: "Cohere: Command R+ (08-2024)" type: chat config: compatibilities: [tool-call] context_window: 128000 - model_id: cohere/command-r7b-12-2024 - name: Cohere: Command R7B (12-2024) + name: "Cohere: Command R7B (12-2024)" type: chat config: context_window: 128000 - model_id: deepcogito/cogito-v2.1-671b - name: Deep Cogito: Cogito v2.1 671B + name: "Deep Cogito: Cogito v2.1 671B" type: chat config: compatibilities: [reasoning] context_window: 128000 - model_id: deepseek/deepseek-chat - name: DeepSeek: DeepSeek V3 + name: "DeepSeek: DeepSeek V3" type: chat config: compatibilities: [tool-call] context_window: 163840 - model_id: deepseek/deepseek-chat-v3-0324 - name: DeepSeek: DeepSeek V3 0324 + name: "DeepSeek: DeepSeek V3 0324" type: chat config: compatibilities: [tool-call, reasoning] context_window: 163840 - model_id: deepseek/deepseek-chat-v3.1 - name: DeepSeek: DeepSeek V3.1 + name: "DeepSeek: DeepSeek V3.1" type: chat config: compatibilities: [tool-call, reasoning] context_window: 32768 - model_id: deepseek/deepseek-r1 - name: DeepSeek: R1 + name: "DeepSeek: R1" type: chat config: compatibilities: [tool-call, reasoning] context_window: 64000 - model_id: deepseek/deepseek-r1-0528 - name: DeepSeek: R1 0528 + name: "DeepSeek: R1 0528" type: chat config: compatibilities: [tool-call, reasoning] context_window: 163840 - model_id: deepseek/deepseek-r1-distill-llama-70b - name: DeepSeek: R1 Distill Llama 70B + name: "DeepSeek: R1 Distill Llama 70B" type: chat config: compatibilities: [reasoning] context_window: 131072 - model_id: deepseek/deepseek-r1-distill-qwen-32b - name: DeepSeek: R1 Distill Qwen 32B + name: "DeepSeek: R1 Distill Qwen 32B" type: chat config: compatibilities: [reasoning] context_window: 32768 - model_id: deepseek/deepseek-v3.1-terminus - name: DeepSeek: DeepSeek V3.1 Terminus + name: "DeepSeek: DeepSeek V3.1 Terminus" type: chat config: compatibilities: [tool-call, reasoning] context_window: 163840 - model_id: deepseek/deepseek-v3.2 - name: DeepSeek: DeepSeek V3.2 + name: "DeepSeek: DeepSeek V3.2" type: chat config: compatibilities: [tool-call, reasoning] context_window: 163840 - model_id: deepseek/deepseek-v3.2-exp - name: DeepSeek: DeepSeek V3.2 Exp + name: "DeepSeek: DeepSeek V3.2 Exp" type: chat config: compatibilities: [tool-call, reasoning] context_window: 163840 - model_id: deepseek/deepseek-v3.2-speciale - name: DeepSeek: DeepSeek V3.2 Speciale + name: "DeepSeek: DeepSeek V3.2 Speciale" type: chat config: compatibilities: [reasoning] context_window: 163840 - model_id: eleutherai/llemma_7b - name: EleutherAI: Llemma 7b + name: "EleutherAI: Llemma 7b" type: chat config: context_window: 4096 - model_id: essentialai/rnj-1-instruct - name: EssentialAI: Rnj 1 Instruct + name: "EssentialAI: Rnj 1 Instruct" type: chat config: compatibilities: [tool-call] context_window: 32768 - model_id: google/gemini-2.0-flash-001 - name: Google: Gemini 2.0 Flash + name: "Google: Gemini 2.0 Flash" type: chat config: compatibilities: [vision, tool-call] context_window: 1048576 - model_id: google/gemini-2.0-flash-lite-001 - name: Google: Gemini 2.0 Flash Lite + name: "Google: Gemini 2.0 Flash Lite" type: chat config: compatibilities: [vision, tool-call] context_window: 1048576 - model_id: google/gemini-2.5-flash - name: Google: Gemini 2.5 Flash + name: "Google: Gemini 2.5 Flash" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 1048576 - model_id: google/gemini-2.5-flash-image - name: Google: Nano Banana (Gemini 2.5 Flash Image) + name: "Google: Nano Banana (Gemini 2.5 Flash Image)" type: chat config: compatibilities: [vision, image-output] context_window: 32768 - model_id: google/gemini-2.5-flash-lite - name: Google: Gemini 2.5 Flash Lite + name: "Google: Gemini 2.5 Flash Lite" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 1048576 - model_id: google/gemini-2.5-flash-lite-preview-09-2025 - name: Google: Gemini 2.5 Flash Lite Preview 09-2025 + name: "Google: Gemini 2.5 Flash Lite Preview 09-2025" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 1048576 - model_id: google/gemini-2.5-pro - name: Google: Gemini 2.5 Pro + name: "Google: Gemini 2.5 Pro" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 1048576 - model_id: google/gemini-2.5-pro-preview - name: Google: Gemini 2.5 Pro Preview 06-05 + name: "Google: Gemini 2.5 Pro Preview 06-05" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 1048576 - model_id: google/gemini-2.5-pro-preview-05-06 - name: Google: Gemini 2.5 Pro Preview 05-06 + name: "Google: Gemini 2.5 Pro Preview 05-06" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 1048576 - model_id: google/gemini-3-flash-preview - name: Google: Gemini 3 Flash Preview + name: "Google: Gemini 3 Flash Preview" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 1048576 - model_id: google/gemini-3-pro-image-preview - name: Google: Nano Banana Pro (Gemini 3 Pro Image Preview) + name: "Google: Nano Banana Pro (Gemini 3 Pro Image Preview)" type: chat config: compatibilities: [vision, image-output, reasoning] context_window: 65536 - model_id: google/gemini-3.1-flash-image-preview - name: Google: Nano Banana 2 (Gemini 3.1 Flash Image Preview) + name: "Google: Nano Banana 2 (Gemini 3.1 Flash Image Preview)" type: chat config: compatibilities: [vision, image-output, reasoning] context_window: 65536 - model_id: google/gemini-3.1-flash-lite-preview - name: Google: Gemini 3.1 Flash Lite Preview + name: "Google: Gemini 3.1 Flash Lite Preview" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 1048576 - model_id: google/gemini-3.1-pro-preview - name: Google: Gemini 3.1 Pro Preview + name: "Google: Gemini 3.1 Pro Preview" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 1048576 - model_id: google/gemini-3.1-pro-preview-customtools - name: Google: Gemini 3.1 Pro Preview Custom Tools + name: "Google: Gemini 3.1 Pro Preview Custom Tools" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 1048576 - model_id: google/gemma-2-27b-it - name: Google: Gemma 2 27B + name: "Google: Gemma 2 27B" type: chat config: context_window: 8192 - model_id: google/gemma-2-9b-it - name: Google: Gemma 2 9B + name: "Google: Gemma 2 9B" type: chat config: context_window: 8192 - model_id: google/gemma-3-12b-it - name: Google: Gemma 3 12B + name: "Google: Gemma 3 12B" type: chat config: compatibilities: [vision] context_window: 131072 - model_id: "google/gemma-3-12b-it:free" - name: Google: Gemma 3 12B (free) + name: "Google: Gemma 3 12B (free)" type: chat config: compatibilities: [vision] context_window: 32768 - model_id: google/gemma-3-27b-it - name: Google: Gemma 3 27B + name: "Google: Gemma 3 27B" type: chat config: compatibilities: [vision] context_window: 131072 - model_id: "google/gemma-3-27b-it:free" - name: Google: Gemma 3 27B (free) + name: "Google: Gemma 3 27B (free)" type: chat config: compatibilities: [vision] context_window: 131072 - model_id: google/gemma-3-4b-it - name: Google: Gemma 3 4B + name: "Google: Gemma 3 4B" type: chat config: compatibilities: [vision] context_window: 131072 - model_id: "google/gemma-3-4b-it:free" - name: Google: Gemma 3 4B (free) + name: "Google: Gemma 3 4B (free)" type: chat config: compatibilities: [vision] context_window: 32768 - model_id: "google/gemma-3n-e2b-it:free" - name: Google: Gemma 3n 2B (free) + name: "Google: Gemma 3n 2B (free)" type: chat config: context_window: 8192 - model_id: google/gemma-3n-e4b-it - name: Google: Gemma 3n 4B + name: "Google: Gemma 3n 4B" type: chat config: context_window: 32768 - model_id: "google/gemma-3n-e4b-it:free" - name: Google: Gemma 3n 4B (free) + name: "Google: Gemma 3n 4B (free)" type: chat config: context_window: 8192 - model_id: google/lyria-3-clip-preview - name: Google: Lyria 3 Clip Preview + name: "Google: Lyria 3 Clip Preview" type: chat config: compatibilities: [vision] context_window: 1048576 - model_id: google/lyria-3-pro-preview - name: Google: Lyria 3 Pro Preview + name: "Google: Lyria 3 Pro Preview" type: chat config: compatibilities: [vision] @@ -575,145 +575,145 @@ models: config: context_window: 4096 - model_id: ibm-granite/granite-4.0-h-micro - name: IBM: Granite 4.0 Micro + name: "IBM: Granite 4.0 Micro" type: chat config: context_window: 131000 - model_id: inception/mercury - name: Inception: Mercury + name: "Inception: Mercury" type: chat config: compatibilities: [tool-call] context_window: 128000 - model_id: inception/mercury-2 - name: Inception: Mercury 2 + name: "Inception: Mercury 2" type: chat config: compatibilities: [tool-call, reasoning] context_window: 128000 - model_id: inception/mercury-coder - name: Inception: Mercury Coder + name: "Inception: Mercury Coder" type: chat config: compatibilities: [tool-call] context_window: 128000 - model_id: inflection/inflection-3-pi - name: Inflection: Inflection 3 Pi + name: "Inflection: Inflection 3 Pi" type: chat config: context_window: 8000 - model_id: inflection/inflection-3-productivity - name: Inflection: Inflection 3 Productivity + name: "Inflection: Inflection 3 Productivity" type: chat config: context_window: 8000 - model_id: kwaipilot/kat-coder-pro-v2 - name: Kwaipilot: KAT-Coder-Pro V2 + name: "Kwaipilot: KAT-Coder-Pro V2" type: chat config: compatibilities: [tool-call] context_window: 256000 - model_id: liquid/lfm-2-24b-a2b - name: LiquidAI: LFM2-24B-A2B + name: "LiquidAI: LFM2-24B-A2B" type: chat config: context_window: 32768 - model_id: liquid/lfm-2.2-6b - name: LiquidAI: LFM2-2.6B + name: "LiquidAI: LFM2-2.6B" type: chat config: context_window: 32768 - model_id: "liquid/lfm-2.5-1.2b-instruct:free" - name: LiquidAI: LFM2.5-1.2B-Instruct (free) + name: "LiquidAI: LFM2.5-1.2B-Instruct (free)" type: chat config: context_window: 32768 - model_id: "liquid/lfm-2.5-1.2b-thinking:free" - name: LiquidAI: LFM2.5-1.2B-Thinking (free) + name: "LiquidAI: LFM2.5-1.2B-Thinking (free)" type: chat config: compatibilities: [reasoning] context_window: 32768 - model_id: liquid/lfm2-8b-a1b - name: LiquidAI: LFM2-8B-A1B + name: "LiquidAI: LFM2-8B-A1B" type: chat config: context_window: 32768 - model_id: mancer/weaver - name: Mancer: Weaver (alpha) + name: "Mancer: Weaver (alpha)" type: chat config: context_window: 8000 - model_id: meituan/longcat-flash-chat - name: Meituan: LongCat Flash Chat + name: "Meituan: LongCat Flash Chat" type: chat config: compatibilities: [tool-call] context_window: 131072 - model_id: meta-llama/llama-3-70b-instruct - name: Meta: Llama 3 70B Instruct + name: "Meta: Llama 3 70B Instruct" type: chat config: context_window: 8192 - model_id: meta-llama/llama-3-8b-instruct - name: Meta: Llama 3 8B Instruct + name: "Meta: Llama 3 8B Instruct" type: chat config: compatibilities: [tool-call] context_window: 8192 - model_id: meta-llama/llama-3.1-70b-instruct - name: Meta: Llama 3.1 70B Instruct + name: "Meta: Llama 3.1 70B Instruct" type: chat config: compatibilities: [tool-call] context_window: 131072 - model_id: meta-llama/llama-3.1-8b-instruct - name: Meta: Llama 3.1 8B Instruct + name: "Meta: Llama 3.1 8B Instruct" type: chat config: compatibilities: [tool-call] context_window: 16384 - model_id: meta-llama/llama-3.2-11b-vision-instruct - name: Meta: Llama 3.2 11B Vision Instruct + name: "Meta: Llama 3.2 11B Vision Instruct" type: chat config: compatibilities: [vision] context_window: 131072 - model_id: meta-llama/llama-3.2-1b-instruct - name: Meta: Llama 3.2 1B Instruct + name: "Meta: Llama 3.2 1B Instruct" type: chat config: context_window: 60000 - model_id: meta-llama/llama-3.2-3b-instruct - name: Meta: Llama 3.2 3B Instruct + name: "Meta: Llama 3.2 3B Instruct" type: chat config: context_window: 80000 - model_id: "meta-llama/llama-3.2-3b-instruct:free" - name: Meta: Llama 3.2 3B Instruct (free) + name: "Meta: Llama 3.2 3B Instruct (free)" type: chat config: context_window: 131072 - model_id: meta-llama/llama-3.3-70b-instruct - name: Meta: Llama 3.3 70B Instruct + name: "Meta: Llama 3.3 70B Instruct" type: chat config: compatibilities: [tool-call] context_window: 131072 - model_id: "meta-llama/llama-3.3-70b-instruct:free" - name: Meta: Llama 3.3 70B Instruct (free) + name: "Meta: Llama 3.3 70B Instruct (free)" type: chat config: compatibilities: [tool-call] context_window: 65536 - model_id: meta-llama/llama-4-maverick - name: Meta: Llama 4 Maverick + name: "Meta: Llama 4 Maverick" type: chat config: compatibilities: [vision, tool-call] context_window: 1048576 - model_id: meta-llama/llama-4-scout - name: Meta: Llama 4 Scout + name: "Meta: Llama 4 Scout" type: chat config: compatibilities: [vision, tool-call] @@ -724,13 +724,13 @@ models: config: context_window: 131072 - model_id: meta-llama/llama-guard-4-12b - name: Meta: Llama Guard 4 12B + name: "Meta: Llama Guard 4 12B" type: chat config: compatibilities: [vision] context_window: 163840 - model_id: microsoft/phi-4 - name: Microsoft: Phi 4 + name: "Microsoft: Phi 4" type: chat config: context_window: 16384 @@ -740,96 +740,96 @@ models: config: context_window: 65535 - model_id: minimax/minimax-01 - name: MiniMax: MiniMax-01 + name: "MiniMax: MiniMax-01" type: chat config: compatibilities: [vision] context_window: 1000192 - model_id: minimax/minimax-m1 - name: MiniMax: MiniMax M1 + name: "MiniMax: MiniMax M1" type: chat config: compatibilities: [tool-call, reasoning] context_window: 1000000 - model_id: minimax/minimax-m2 - name: MiniMax: MiniMax M2 + name: "MiniMax: MiniMax M2" type: chat config: compatibilities: [tool-call, reasoning] context_window: 196608 - model_id: minimax/minimax-m2-her - name: MiniMax: MiniMax M2-her + name: "MiniMax: MiniMax M2-her" type: chat config: context_window: 65536 - model_id: minimax/minimax-m2.1 - name: MiniMax: MiniMax M2.1 + name: "MiniMax: MiniMax M2.1" type: chat config: compatibilities: [tool-call, reasoning] context_window: 196608 - model_id: minimax/minimax-m2.5 - name: MiniMax: MiniMax M2.5 + name: "MiniMax: MiniMax M2.5" type: chat config: compatibilities: [tool-call, reasoning] context_window: 196600 - model_id: "minimax/minimax-m2.5:free" - name: MiniMax: MiniMax M2.5 (free) + name: "MiniMax: MiniMax M2.5 (free)" type: chat config: compatibilities: [tool-call, reasoning] context_window: 196608 - model_id: minimax/minimax-m2.7 - name: MiniMax: MiniMax M2.7 + name: "MiniMax: MiniMax M2.7" type: chat config: compatibilities: [tool-call, reasoning] context_window: 204800 - model_id: mistralai/codestral-2508 - name: Mistral: Codestral 2508 + name: "Mistral: Codestral 2508" type: chat config: compatibilities: [tool-call] context_window: 256000 - model_id: mistralai/devstral-2512 - name: Mistral: Devstral 2 2512 + name: "Mistral: Devstral 2 2512" type: chat config: compatibilities: [tool-call] context_window: 262144 - model_id: mistralai/devstral-medium - name: Mistral: Devstral Medium + name: "Mistral: Devstral Medium" type: chat config: compatibilities: [tool-call] context_window: 131072 - model_id: mistralai/devstral-small - name: Mistral: Devstral Small 1.1 + name: "Mistral: Devstral Small 1.1" type: chat config: compatibilities: [tool-call] context_window: 131072 - model_id: mistralai/ministral-14b-2512 - name: Mistral: Ministral 3 14B 2512 + name: "Mistral: Ministral 3 14B 2512" type: chat config: compatibilities: [vision, tool-call] context_window: 262144 - model_id: mistralai/ministral-3b-2512 - name: Mistral: Ministral 3 3B 2512 + name: "Mistral: Ministral 3 3B 2512" type: chat config: compatibilities: [vision, tool-call] context_window: 131072 - model_id: mistralai/ministral-8b-2512 - name: Mistral: Ministral 3 8B 2512 + name: "Mistral: Ministral 3 8B 2512" type: chat config: compatibilities: [vision, tool-call] context_window: 262144 - model_id: mistralai/mistral-7b-instruct-v0.1 - name: Mistral: Mistral 7B Instruct v0.1 + name: "Mistral: Mistral 7B Instruct v0.1" type: chat config: context_window: 2824 @@ -852,591 +852,591 @@ models: compatibilities: [tool-call] context_window: 131072 - model_id: mistralai/mistral-large-2512 - name: Mistral: Mistral Large 3 2512 + name: "Mistral: Mistral Large 3 2512" type: chat config: compatibilities: [vision, tool-call] context_window: 262144 - model_id: mistralai/mistral-medium-3 - name: Mistral: Mistral Medium 3 + name: "Mistral: Mistral Medium 3" type: chat config: compatibilities: [vision, tool-call] context_window: 131072 - model_id: mistralai/mistral-medium-3.1 - name: Mistral: Mistral Medium 3.1 + name: "Mistral: Mistral Medium 3.1" type: chat config: compatibilities: [vision, tool-call] context_window: 131072 - model_id: mistralai/mistral-nemo - name: Mistral: Mistral Nemo + name: "Mistral: Mistral Nemo" type: chat config: compatibilities: [tool-call] context_window: 131072 - model_id: mistralai/mistral-saba - name: Mistral: Saba + name: "Mistral: Saba" type: chat config: compatibilities: [tool-call] context_window: 32768 - model_id: mistralai/mistral-small-24b-instruct-2501 - name: Mistral: Mistral Small 3 + name: "Mistral: Mistral Small 3" type: chat config: context_window: 32768 - model_id: mistralai/mistral-small-2603 - name: Mistral: Mistral Small 4 + name: "Mistral: Mistral Small 4" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 262144 - model_id: mistralai/mistral-small-3.1-24b-instruct - name: Mistral: Mistral Small 3.1 24B + name: "Mistral: Mistral Small 3.1 24B" type: chat config: compatibilities: [vision] context_window: 131072 - model_id: mistralai/mistral-small-3.2-24b-instruct - name: Mistral: Mistral Small 3.2 24B + name: "Mistral: Mistral Small 3.2 24B" type: chat config: compatibilities: [vision, tool-call] context_window: 128000 - model_id: mistralai/mistral-small-creative - name: Mistral: Mistral Small Creative + name: "Mistral: Mistral Small Creative" type: chat config: compatibilities: [tool-call] context_window: 32768 - model_id: mistralai/mixtral-8x22b-instruct - name: Mistral: Mixtral 8x22B Instruct + name: "Mistral: Mixtral 8x22B Instruct" type: chat config: compatibilities: [tool-call] context_window: 65536 - model_id: mistralai/mixtral-8x7b-instruct - name: Mistral: Mixtral 8x7B Instruct + name: "Mistral: Mixtral 8x7B Instruct" type: chat config: compatibilities: [tool-call] context_window: 32768 - model_id: mistralai/pixtral-large-2411 - name: Mistral: Pixtral Large 2411 + name: "Mistral: Pixtral Large 2411" type: chat config: compatibilities: [vision, tool-call] context_window: 131072 - model_id: mistralai/voxtral-small-24b-2507 - name: Mistral: Voxtral Small 24B 2507 + name: "Mistral: Voxtral Small 24B 2507" type: chat config: compatibilities: [tool-call] context_window: 32000 - model_id: moonshotai/kimi-k2 - name: MoonshotAI: Kimi K2 0711 + name: "MoonshotAI: Kimi K2 0711" type: chat config: compatibilities: [tool-call] context_window: 131072 - model_id: moonshotai/kimi-k2-0905 - name: MoonshotAI: Kimi K2 0905 + name: "MoonshotAI: Kimi K2 0905" type: chat config: compatibilities: [tool-call] context_window: 131072 - model_id: moonshotai/kimi-k2-thinking - name: MoonshotAI: Kimi K2 Thinking + name: "MoonshotAI: Kimi K2 Thinking" type: chat config: compatibilities: [tool-call, reasoning] context_window: 131072 - model_id: moonshotai/kimi-k2.5 - name: MoonshotAI: Kimi K2.5 + name: "MoonshotAI: Kimi K2.5" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 262144 - model_id: morph/morph-v3-fast - name: Morph: Morph V3 Fast + name: "Morph: Morph V3 Fast" type: chat config: context_window: 81920 - model_id: morph/morph-v3-large - name: Morph: Morph V3 Large + name: "Morph: Morph V3 Large" type: chat config: context_window: 262144 - model_id: nex-agi/deepseek-v3.1-nex-n1 - name: Nex AGI: DeepSeek V3.1 Nex N1 + name: "Nex AGI: DeepSeek V3.1 Nex N1" type: chat config: compatibilities: [tool-call] context_window: 131072 - model_id: nousresearch/hermes-2-pro-llama-3-8b - name: NousResearch: Hermes 2 Pro - Llama-3 8B + name: "NousResearch: Hermes 2 Pro - Llama-3 8B" type: chat config: context_window: 8192 - model_id: nousresearch/hermes-3-llama-3.1-405b - name: Nous: Hermes 3 405B Instruct + name: "Nous: Hermes 3 405B Instruct" type: chat config: context_window: 131072 - model_id: "nousresearch/hermes-3-llama-3.1-405b:free" - name: Nous: Hermes 3 405B Instruct (free) + name: "Nous: Hermes 3 405B Instruct (free)" type: chat config: context_window: 131072 - model_id: nousresearch/hermes-3-llama-3.1-70b - name: Nous: Hermes 3 70B Instruct + name: "Nous: Hermes 3 70B Instruct" type: chat config: context_window: 131072 - model_id: nousresearch/hermes-4-405b - name: Nous: Hermes 4 405B + name: "Nous: Hermes 4 405B" type: chat config: compatibilities: [reasoning] context_window: 131072 - model_id: nousresearch/hermes-4-70b - name: Nous: Hermes 4 70B + name: "Nous: Hermes 4 70B" type: chat config: compatibilities: [reasoning] context_window: 131072 - model_id: nvidia/llama-3.1-nemotron-70b-instruct - name: NVIDIA: Llama 3.1 Nemotron 70B Instruct + name: "NVIDIA: Llama 3.1 Nemotron 70B Instruct" type: chat config: compatibilities: [tool-call] context_window: 131072 - model_id: nvidia/llama-3.1-nemotron-ultra-253b-v1 - name: NVIDIA: Llama 3.1 Nemotron Ultra 253B v1 + name: "NVIDIA: Llama 3.1 Nemotron Ultra 253B v1" type: chat config: compatibilities: [reasoning] context_window: 131072 - model_id: nvidia/llama-3.3-nemotron-super-49b-v1.5 - name: NVIDIA: Llama 3.3 Nemotron Super 49B V1.5 + name: "NVIDIA: Llama 3.3 Nemotron Super 49B V1.5" type: chat config: compatibilities: [tool-call, reasoning] context_window: 131072 - model_id: nvidia/nemotron-3-nano-30b-a3b - name: NVIDIA: Nemotron 3 Nano 30B A3B + name: "NVIDIA: Nemotron 3 Nano 30B A3B" type: chat config: compatibilities: [tool-call, reasoning] context_window: 262144 - model_id: "nvidia/nemotron-3-nano-30b-a3b:free" - name: NVIDIA: Nemotron 3 Nano 30B A3B (free) + name: "NVIDIA: Nemotron 3 Nano 30B A3B (free)" type: chat config: compatibilities: [tool-call, reasoning] context_window: 256000 - model_id: nvidia/nemotron-3-super-120b-a12b - name: NVIDIA: Nemotron 3 Super + name: "NVIDIA: Nemotron 3 Super" type: chat config: compatibilities: [tool-call, reasoning] context_window: 262144 - model_id: "nvidia/nemotron-3-super-120b-a12b:free" - name: NVIDIA: Nemotron 3 Super (free) + name: "NVIDIA: Nemotron 3 Super (free)" type: chat config: compatibilities: [tool-call, reasoning] context_window: 262144 - model_id: nvidia/nemotron-nano-12b-v2-vl - name: NVIDIA: Nemotron Nano 12B 2 VL + name: "NVIDIA: Nemotron Nano 12B 2 VL" type: chat config: compatibilities: [vision, reasoning] context_window: 131072 - model_id: "nvidia/nemotron-nano-12b-v2-vl:free" - name: NVIDIA: Nemotron Nano 12B 2 VL (free) + name: "NVIDIA: Nemotron Nano 12B 2 VL (free)" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 128000 - model_id: nvidia/nemotron-nano-9b-v2 - name: NVIDIA: Nemotron Nano 9B V2 + name: "NVIDIA: Nemotron Nano 9B V2" type: chat config: compatibilities: [tool-call, reasoning] context_window: 131072 - model_id: "nvidia/nemotron-nano-9b-v2:free" - name: NVIDIA: Nemotron Nano 9B V2 (free) + name: "NVIDIA: Nemotron Nano 9B V2 (free)" type: chat config: compatibilities: [tool-call, reasoning] context_window: 128000 - model_id: openai/gpt-3.5-turbo - name: OpenAI: GPT-3.5 Turbo + name: "OpenAI: GPT-3.5 Turbo" type: chat config: compatibilities: [tool-call] context_window: 16385 - model_id: openai/gpt-3.5-turbo-0613 - name: OpenAI: GPT-3.5 Turbo (older v0613) + name: "OpenAI: GPT-3.5 Turbo (older v0613)" type: chat config: compatibilities: [tool-call] context_window: 4095 - model_id: openai/gpt-3.5-turbo-16k - name: OpenAI: GPT-3.5 Turbo 16k + name: "OpenAI: GPT-3.5 Turbo 16k" type: chat config: compatibilities: [tool-call] context_window: 16385 - model_id: openai/gpt-3.5-turbo-instruct - name: OpenAI: GPT-3.5 Turbo Instruct + name: "OpenAI: GPT-3.5 Turbo Instruct" type: chat config: context_window: 4095 - model_id: openai/gpt-4 - name: OpenAI: GPT-4 + name: "OpenAI: GPT-4" type: chat config: compatibilities: [tool-call] context_window: 8191 - model_id: openai/gpt-4-0314 - name: OpenAI: GPT-4 (older v0314) + name: "OpenAI: GPT-4 (older v0314)" type: chat config: compatibilities: [tool-call] context_window: 8191 - model_id: openai/gpt-4-1106-preview - name: OpenAI: GPT-4 Turbo (older v1106) + name: "OpenAI: GPT-4 Turbo (older v1106)" type: chat config: compatibilities: [tool-call] context_window: 128000 - model_id: openai/gpt-4-turbo - name: OpenAI: GPT-4 Turbo + name: "OpenAI: GPT-4 Turbo" type: chat config: compatibilities: [vision, tool-call] context_window: 128000 - model_id: openai/gpt-4-turbo-preview - name: OpenAI: GPT-4 Turbo Preview + name: "OpenAI: GPT-4 Turbo Preview" type: chat config: compatibilities: [tool-call] context_window: 128000 - model_id: openai/gpt-4.1 - name: OpenAI: GPT-4.1 + name: "OpenAI: GPT-4.1" type: chat config: compatibilities: [vision, tool-call] context_window: 1047576 - model_id: openai/gpt-4.1-mini - name: OpenAI: GPT-4.1 Mini + name: "OpenAI: GPT-4.1 Mini" type: chat config: compatibilities: [vision, tool-call] context_window: 1047576 - model_id: openai/gpt-4.1-nano - name: OpenAI: GPT-4.1 Nano + name: "OpenAI: GPT-4.1 Nano" type: chat config: compatibilities: [vision, tool-call] context_window: 1047576 - model_id: openai/gpt-4o - name: OpenAI: GPT-4o + name: "OpenAI: GPT-4o" type: chat config: compatibilities: [vision, tool-call] context_window: 128000 - model_id: openai/gpt-4o-2024-05-13 - name: OpenAI: GPT-4o (2024-05-13) + name: "OpenAI: GPT-4o (2024-05-13)" type: chat config: compatibilities: [vision, tool-call] context_window: 128000 - model_id: openai/gpt-4o-2024-08-06 - name: OpenAI: GPT-4o (2024-08-06) + name: "OpenAI: GPT-4o (2024-08-06)" type: chat config: compatibilities: [vision, tool-call] context_window: 128000 - model_id: openai/gpt-4o-2024-11-20 - name: OpenAI: GPT-4o (2024-11-20) + name: "OpenAI: GPT-4o (2024-11-20)" type: chat config: compatibilities: [vision, tool-call] context_window: 128000 - model_id: openai/gpt-4o-audio-preview - name: OpenAI: GPT-4o Audio + name: "OpenAI: GPT-4o Audio" type: chat config: compatibilities: [tool-call] context_window: 128000 - model_id: openai/gpt-4o-mini - name: OpenAI: GPT-4o-mini + name: "OpenAI: GPT-4o-mini" type: chat config: compatibilities: [vision, tool-call] context_window: 128000 - model_id: openai/gpt-4o-mini-2024-07-18 - name: OpenAI: GPT-4o-mini (2024-07-18) + name: "OpenAI: GPT-4o-mini (2024-07-18)" type: chat config: compatibilities: [vision, tool-call] context_window: 128000 - model_id: openai/gpt-4o-mini-search-preview - name: OpenAI: GPT-4o-mini Search Preview + name: "OpenAI: GPT-4o-mini Search Preview" type: chat config: context_window: 128000 - model_id: openai/gpt-4o-search-preview - name: OpenAI: GPT-4o Search Preview + name: "OpenAI: GPT-4o Search Preview" type: chat config: context_window: 128000 - model_id: "openai/gpt-4o:extended" - name: OpenAI: GPT-4o (extended) + name: "OpenAI: GPT-4o (extended)" type: chat config: compatibilities: [vision, tool-call] context_window: 128000 - model_id: openai/gpt-5 - name: OpenAI: GPT-5 + name: "OpenAI: GPT-5" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 400000 - model_id: openai/gpt-5-chat - name: OpenAI: GPT-5 Chat + name: "OpenAI: GPT-5 Chat" type: chat config: compatibilities: [vision] context_window: 128000 - model_id: openai/gpt-5-codex - name: OpenAI: GPT-5 Codex + name: "OpenAI: GPT-5 Codex" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 400000 - model_id: openai/gpt-5-image - name: OpenAI: GPT-5 Image + name: "OpenAI: GPT-5 Image" type: chat config: compatibilities: [vision, tool-call, image-output, reasoning] context_window: 400000 - model_id: openai/gpt-5-image-mini - name: OpenAI: GPT-5 Image Mini + name: "OpenAI: GPT-5 Image Mini" type: chat config: compatibilities: [vision, tool-call, image-output, reasoning] context_window: 400000 - model_id: openai/gpt-5-mini - name: OpenAI: GPT-5 Mini + name: "OpenAI: GPT-5 Mini" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 400000 - model_id: openai/gpt-5-nano - name: OpenAI: GPT-5 Nano + name: "OpenAI: GPT-5 Nano" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 400000 - model_id: openai/gpt-5-pro - name: OpenAI: GPT-5 Pro + name: "OpenAI: GPT-5 Pro" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 400000 - model_id: openai/gpt-5.1 - name: OpenAI: GPT-5.1 + name: "OpenAI: GPT-5.1" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 400000 - model_id: openai/gpt-5.1-chat - name: OpenAI: GPT-5.1 Chat + name: "OpenAI: GPT-5.1 Chat" type: chat config: compatibilities: [vision, tool-call] context_window: 128000 - model_id: openai/gpt-5.1-codex - name: OpenAI: GPT-5.1-Codex + name: "OpenAI: GPT-5.1-Codex" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 400000 - model_id: openai/gpt-5.1-codex-max - name: OpenAI: GPT-5.1-Codex-Max + name: "OpenAI: GPT-5.1-Codex-Max" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 400000 - model_id: openai/gpt-5.1-codex-mini - name: OpenAI: GPT-5.1-Codex-Mini + name: "OpenAI: GPT-5.1-Codex-Mini" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 400000 - model_id: openai/gpt-5.2 - name: OpenAI: GPT-5.2 + name: "OpenAI: GPT-5.2" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 400000 - model_id: openai/gpt-5.2-chat - name: OpenAI: GPT-5.2 Chat + name: "OpenAI: GPT-5.2 Chat" type: chat config: compatibilities: [vision, tool-call] context_window: 128000 - model_id: openai/gpt-5.2-codex - name: OpenAI: GPT-5.2-Codex + name: "OpenAI: GPT-5.2-Codex" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 400000 - model_id: openai/gpt-5.2-pro - name: OpenAI: GPT-5.2 Pro + name: "OpenAI: GPT-5.2 Pro" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 400000 - model_id: openai/gpt-5.3-chat - name: OpenAI: GPT-5.3 Chat + name: "OpenAI: GPT-5.3 Chat" type: chat config: compatibilities: [vision, tool-call] context_window: 128000 - model_id: openai/gpt-5.3-codex - name: OpenAI: GPT-5.3-Codex + name: "OpenAI: GPT-5.3-Codex" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 400000 - model_id: openai/gpt-5.4 - name: OpenAI: GPT-5.4 + name: "OpenAI: GPT-5.4" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 1050000 - model_id: openai/gpt-5.4-mini - name: OpenAI: GPT-5.4 Mini + name: "OpenAI: GPT-5.4 Mini" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 400000 - model_id: openai/gpt-5.4-nano - name: OpenAI: GPT-5.4 Nano + name: "OpenAI: GPT-5.4 Nano" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 400000 - model_id: openai/gpt-5.4-pro - name: OpenAI: GPT-5.4 Pro + name: "OpenAI: GPT-5.4 Pro" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 1050000 - model_id: openai/gpt-audio - name: OpenAI: GPT Audio + name: "OpenAI: GPT Audio" type: chat config: compatibilities: [tool-call] context_window: 128000 - model_id: openai/gpt-audio-mini - name: OpenAI: GPT Audio Mini + name: "OpenAI: GPT Audio Mini" type: chat config: compatibilities: [tool-call] context_window: 128000 - model_id: openai/gpt-oss-120b - name: OpenAI: gpt-oss-120b + name: "OpenAI: gpt-oss-120b" type: chat config: compatibilities: [tool-call, reasoning] context_window: 131072 - model_id: "openai/gpt-oss-120b:free" - name: OpenAI: gpt-oss-120b (free) + name: "OpenAI: gpt-oss-120b (free)" type: chat config: compatibilities: [tool-call, reasoning] context_window: 131072 - model_id: openai/gpt-oss-20b - name: OpenAI: gpt-oss-20b + name: "OpenAI: gpt-oss-20b" type: chat config: compatibilities: [tool-call, reasoning] context_window: 131072 - model_id: "openai/gpt-oss-20b:free" - name: OpenAI: gpt-oss-20b (free) + name: "OpenAI: gpt-oss-20b (free)" type: chat config: compatibilities: [tool-call, reasoning] context_window: 131072 - model_id: openai/gpt-oss-safeguard-20b - name: OpenAI: gpt-oss-safeguard-20b + name: "OpenAI: gpt-oss-safeguard-20b" type: chat config: compatibilities: [tool-call, reasoning] context_window: 131072 - model_id: openai/o1 - name: OpenAI: o1 + name: "OpenAI: o1" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 200000 - model_id: openai/o1-pro - name: OpenAI: o1-pro + name: "OpenAI: o1-pro" type: chat config: compatibilities: [vision, reasoning] context_window: 200000 - model_id: openai/o3 - name: OpenAI: o3 + name: "OpenAI: o3" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 200000 - model_id: openai/o3-deep-research - name: OpenAI: o3 Deep Research + name: "OpenAI: o3 Deep Research" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 200000 - model_id: openai/o3-mini - name: OpenAI: o3 Mini + name: "OpenAI: o3 Mini" type: chat config: compatibilities: [tool-call, reasoning] context_window: 200000 - model_id: openai/o3-mini-high - name: OpenAI: o3 Mini High + name: "OpenAI: o3 Mini High" type: chat config: compatibilities: [tool-call, reasoning] context_window: 200000 - model_id: openai/o3-pro - name: OpenAI: o3 Pro + name: "OpenAI: o3 Pro" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 200000 - model_id: openai/o4-mini - name: OpenAI: o4 Mini + name: "OpenAI: o4 Mini" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 200000 - model_id: openai/o4-mini-deep-research - name: OpenAI: o4 Mini Deep Research + name: "OpenAI: o4 Mini Deep Research" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 200000 - model_id: openai/o4-mini-high - name: OpenAI: o4 Mini High + name: "OpenAI: o4 Mini High" type: chat config: compatibilities: [vision, tool-call, reasoning] @@ -1459,37 +1459,37 @@ models: compatibilities: [vision, tool-call, reasoning] context_window: 200000 - model_id: perplexity/sonar - name: Perplexity: Sonar + name: "Perplexity: Sonar" type: chat config: compatibilities: [vision] context_window: 127072 - model_id: perplexity/sonar-deep-research - name: Perplexity: Sonar Deep Research + name: "Perplexity: Sonar Deep Research" type: chat config: compatibilities: [reasoning] context_window: 128000 - model_id: perplexity/sonar-pro - name: Perplexity: Sonar Pro + name: "Perplexity: Sonar Pro" type: chat config: compatibilities: [vision] context_window: 200000 - model_id: perplexity/sonar-pro-search - name: Perplexity: Sonar Pro Search + name: "Perplexity: Sonar Pro Search" type: chat config: compatibilities: [vision, reasoning] context_window: 200000 - model_id: perplexity/sonar-reasoning-pro - name: Perplexity: Sonar Reasoning Pro + name: "Perplexity: Sonar Reasoning Pro" type: chat config: compatibilities: [vision, reasoning] context_window: 128000 - model_id: prime-intellect/intellect-3 - name: Prime Intellect: INTELLECT-3 + name: "Prime Intellect: INTELLECT-3" type: chat config: compatibilities: [tool-call, reasoning] @@ -1501,7 +1501,7 @@ models: compatibilities: [tool-call] context_window: 32768 - model_id: qwen/qwen-2.5-7b-instruct - name: Qwen: Qwen2.5 7B Instruct + name: "Qwen: Qwen2.5 7B Instruct" type: chat config: compatibilities: [tool-call] @@ -1512,282 +1512,282 @@ models: config: context_window: 32768 - model_id: qwen/qwen-max - name: Qwen: Qwen-Max + name: "Qwen: Qwen-Max " type: chat config: compatibilities: [tool-call] context_window: 32768 - model_id: qwen/qwen-plus - name: Qwen: Qwen-Plus + name: "Qwen: Qwen-Plus" type: chat config: compatibilities: [tool-call] context_window: 1000000 - model_id: qwen/qwen-plus-2025-07-28 - name: Qwen: Qwen Plus 0728 + name: "Qwen: Qwen Plus 0728" type: chat config: compatibilities: [tool-call] context_window: 1000000 - model_id: "qwen/qwen-plus-2025-07-28:thinking" - name: Qwen: Qwen Plus 0728 (thinking) + name: "Qwen: Qwen Plus 0728 (thinking)" type: chat config: compatibilities: [tool-call, reasoning] context_window: 1000000 - model_id: qwen/qwen-turbo - name: Qwen: Qwen-Turbo + name: "Qwen: Qwen-Turbo" type: chat config: compatibilities: [tool-call] context_window: 131072 - model_id: qwen/qwen-vl-max - name: Qwen: Qwen VL Max + name: "Qwen: Qwen VL Max" type: chat config: compatibilities: [vision, tool-call] context_window: 131072 - model_id: qwen/qwen-vl-plus - name: Qwen: Qwen VL Plus + name: "Qwen: Qwen VL Plus" type: chat config: compatibilities: [vision] context_window: 131072 - model_id: qwen/qwen2.5-coder-7b-instruct - name: Qwen: Qwen2.5 Coder 7B Instruct + name: "Qwen: Qwen2.5 Coder 7B Instruct" type: chat config: context_window: 32768 - model_id: qwen/qwen2.5-vl-32b-instruct - name: Qwen: Qwen2.5 VL 32B Instruct + name: "Qwen: Qwen2.5 VL 32B Instruct" type: chat config: compatibilities: [vision] context_window: 128000 - model_id: qwen/qwen2.5-vl-72b-instruct - name: Qwen: Qwen2.5 VL 72B Instruct + name: "Qwen: Qwen2.5 VL 72B Instruct" type: chat config: compatibilities: [vision] context_window: 32768 - model_id: qwen/qwen3-14b - name: Qwen: Qwen3 14B + name: "Qwen: Qwen3 14B" type: chat config: compatibilities: [tool-call, reasoning] context_window: 40960 - model_id: qwen/qwen3-235b-a22b - name: Qwen: Qwen3 235B A22B + name: "Qwen: Qwen3 235B A22B" type: chat config: compatibilities: [tool-call, reasoning] context_window: 131072 - model_id: qwen/qwen3-235b-a22b-2507 - name: Qwen: Qwen3 235B A22B Instruct 2507 + name: "Qwen: Qwen3 235B A22B Instruct 2507" type: chat config: compatibilities: [tool-call, reasoning] context_window: 262144 - model_id: qwen/qwen3-235b-a22b-thinking-2507 - name: Qwen: Qwen3 235B A22B Thinking 2507 + name: "Qwen: Qwen3 235B A22B Thinking 2507" type: chat config: compatibilities: [tool-call, reasoning] context_window: 131072 - model_id: qwen/qwen3-30b-a3b - name: Qwen: Qwen3 30B A3B + name: "Qwen: Qwen3 30B A3B" type: chat config: compatibilities: [tool-call, reasoning] context_window: 40960 - model_id: qwen/qwen3-30b-a3b-instruct-2507 - name: Qwen: Qwen3 30B A3B Instruct 2507 + name: "Qwen: Qwen3 30B A3B Instruct 2507" type: chat config: compatibilities: [tool-call] context_window: 262144 - model_id: qwen/qwen3-30b-a3b-thinking-2507 - name: Qwen: Qwen3 30B A3B Thinking 2507 + name: "Qwen: Qwen3 30B A3B Thinking 2507" type: chat config: compatibilities: [tool-call, reasoning] context_window: 131072 - model_id: qwen/qwen3-32b - name: Qwen: Qwen3 32B + name: "Qwen: Qwen3 32B" type: chat config: compatibilities: [tool-call, reasoning] context_window: 40960 - model_id: qwen/qwen3-8b - name: Qwen: Qwen3 8B + name: "Qwen: Qwen3 8B" type: chat config: compatibilities: [tool-call, reasoning] context_window: 40960 - model_id: qwen/qwen3-coder - name: Qwen: Qwen3 Coder 480B A35B + name: "Qwen: Qwen3 Coder 480B A35B" type: chat config: compatibilities: [tool-call] context_window: 262144 - model_id: qwen/qwen3-coder-30b-a3b-instruct - name: Qwen: Qwen3 Coder 30B A3B Instruct + name: "Qwen: Qwen3 Coder 30B A3B Instruct" type: chat config: compatibilities: [tool-call] context_window: 160000 - model_id: qwen/qwen3-coder-flash - name: Qwen: Qwen3 Coder Flash + name: "Qwen: Qwen3 Coder Flash" type: chat config: compatibilities: [tool-call] context_window: 1000000 - model_id: qwen/qwen3-coder-next - name: Qwen: Qwen3 Coder Next + name: "Qwen: Qwen3 Coder Next" type: chat config: compatibilities: [tool-call] context_window: 262144 - model_id: qwen/qwen3-coder-plus - name: Qwen: Qwen3 Coder Plus + name: "Qwen: Qwen3 Coder Plus" type: chat config: compatibilities: [tool-call] context_window: 1000000 - model_id: "qwen/qwen3-coder:free" - name: Qwen: Qwen3 Coder 480B A35B (free) + name: "Qwen: Qwen3 Coder 480B A35B (free)" type: chat config: compatibilities: [tool-call] context_window: 262000 - model_id: qwen/qwen3-max - name: Qwen: Qwen3 Max + name: "Qwen: Qwen3 Max" type: chat config: compatibilities: [tool-call] context_window: 262144 - model_id: qwen/qwen3-max-thinking - name: Qwen: Qwen3 Max Thinking + name: "Qwen: Qwen3 Max Thinking" type: chat config: compatibilities: [tool-call, reasoning] context_window: 262144 - model_id: qwen/qwen3-next-80b-a3b-instruct - name: Qwen: Qwen3 Next 80B A3B Instruct + name: "Qwen: Qwen3 Next 80B A3B Instruct" type: chat config: compatibilities: [tool-call] context_window: 262144 - model_id: "qwen/qwen3-next-80b-a3b-instruct:free" - name: Qwen: Qwen3 Next 80B A3B Instruct (free) + name: "Qwen: Qwen3 Next 80B A3B Instruct (free)" type: chat config: compatibilities: [tool-call] context_window: 262144 - model_id: qwen/qwen3-next-80b-a3b-thinking - name: Qwen: Qwen3 Next 80B A3B Thinking + name: "Qwen: Qwen3 Next 80B A3B Thinking" type: chat config: compatibilities: [tool-call, reasoning] context_window: 131072 - model_id: qwen/qwen3-vl-235b-a22b-instruct - name: Qwen: Qwen3 VL 235B A22B Instruct + name: "Qwen: Qwen3 VL 235B A22B Instruct" type: chat config: compatibilities: [vision, tool-call] context_window: 262144 - model_id: qwen/qwen3-vl-235b-a22b-thinking - name: Qwen: Qwen3 VL 235B A22B Thinking + name: "Qwen: Qwen3 VL 235B A22B Thinking" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 131072 - model_id: qwen/qwen3-vl-30b-a3b-instruct - name: Qwen: Qwen3 VL 30B A3B Instruct + name: "Qwen: Qwen3 VL 30B A3B Instruct" type: chat config: compatibilities: [vision, tool-call] context_window: 131072 - model_id: qwen/qwen3-vl-30b-a3b-thinking - name: Qwen: Qwen3 VL 30B A3B Thinking + name: "Qwen: Qwen3 VL 30B A3B Thinking" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 131072 - model_id: qwen/qwen3-vl-32b-instruct - name: Qwen: Qwen3 VL 32B Instruct + name: "Qwen: Qwen3 VL 32B Instruct" type: chat config: compatibilities: [vision, tool-call] context_window: 131072 - model_id: qwen/qwen3-vl-8b-instruct - name: Qwen: Qwen3 VL 8B Instruct + name: "Qwen: Qwen3 VL 8B Instruct" type: chat config: compatibilities: [vision, tool-call] context_window: 131072 - model_id: qwen/qwen3-vl-8b-thinking - name: Qwen: Qwen3 VL 8B Thinking + name: "Qwen: Qwen3 VL 8B Thinking" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 131072 - model_id: qwen/qwen3.5-122b-a10b - name: Qwen: Qwen3.5-122B-A10B + name: "Qwen: Qwen3.5-122B-A10B" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 262144 - model_id: qwen/qwen3.5-27b - name: Qwen: Qwen3.5-27B + name: "Qwen: Qwen3.5-27B" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 262144 - model_id: qwen/qwen3.5-35b-a3b - name: Qwen: Qwen3.5-35B-A3B + name: "Qwen: Qwen3.5-35B-A3B" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 262144 - model_id: qwen/qwen3.5-397b-a17b - name: Qwen: Qwen3.5 397B A17B + name: "Qwen: Qwen3.5 397B A17B" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 262144 - model_id: qwen/qwen3.5-9b - name: Qwen: Qwen3.5-9B + name: "Qwen: Qwen3.5-9B" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 256000 - model_id: qwen/qwen3.5-flash-02-23 - name: Qwen: Qwen3.5-Flash + name: "Qwen: Qwen3.5-Flash" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 1000000 - model_id: qwen/qwen3.5-plus-02-15 - name: Qwen: Qwen3.5 Plus 2026-02-15 + name: "Qwen: Qwen3.5 Plus 2026-02-15" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 1000000 - model_id: "qwen/qwen3.6-plus-preview:free" - name: Qwen: Qwen3.6 Plus Preview (free) + name: "Qwen: Qwen3.6 Plus Preview (free)" type: chat config: compatibilities: [tool-call, reasoning] context_window: 1000000 - model_id: "qwen/qwen3.6-plus:free" - name: Qwen: Qwen3.6 Plus (free) + name: "Qwen: Qwen3.6 Plus (free)" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 1000000 - model_id: qwen/qwq-32b - name: Qwen: QwQ 32B + name: "Qwen: QwQ 32B" type: chat config: compatibilities: [tool-call, reasoning] @@ -1805,51 +1805,51 @@ models: compatibilities: [reasoning] context_window: 65536 - model_id: relace/relace-apply-3 - name: Relace: Relace Apply 3 + name: "Relace: Relace Apply 3" type: chat config: context_window: 256000 - model_id: relace/relace-search - name: Relace: Relace Search + name: "Relace: Relace Search" type: chat config: compatibilities: [tool-call] context_window: 256000 - model_id: sao10k/l3-euryale-70b - name: Sao10k: Llama 3 Euryale 70B v2.1 + name: "Sao10k: Llama 3 Euryale 70B v2.1" type: chat config: compatibilities: [tool-call] context_window: 8192 - model_id: sao10k/l3-lunaris-8b - name: Sao10K: Llama 3 8B Lunaris + name: "Sao10K: Llama 3 8B Lunaris" type: chat config: context_window: 8192 - model_id: sao10k/l3.1-70b-hanami-x1 - name: Sao10K: Llama 3.1 70B Hanami x1 + name: "Sao10K: Llama 3.1 70B Hanami x1" type: chat config: context_window: 16000 - model_id: sao10k/l3.1-euryale-70b - name: Sao10K: Llama 3.1 Euryale 70B v2.2 + name: "Sao10K: Llama 3.1 Euryale 70B v2.2" type: chat config: compatibilities: [tool-call] context_window: 131072 - model_id: sao10k/l3.3-euryale-70b - name: Sao10K: Llama 3.3 Euryale 70B + name: "Sao10K: Llama 3.3 Euryale 70B" type: chat config: context_window: 131072 - model_id: stepfun/step-3.5-flash - name: StepFun: Step 3.5 Flash + name: "StepFun: Step 3.5 Flash" type: chat config: compatibilities: [tool-call, reasoning] context_window: 262144 - model_id: "stepfun/step-3.5-flash:free" - name: StepFun: Step 3.5 Flash (free) + name: "StepFun: Step 3.5 Flash (free)" type: chat config: compatibilities: [tool-call, reasoning] @@ -1861,35 +1861,35 @@ models: compatibilities: [reasoning] context_window: 131072 - model_id: tencent/hunyuan-a13b-instruct - name: Tencent: Hunyuan A13B Instruct + name: "Tencent: Hunyuan A13B Instruct" type: chat config: compatibilities: [reasoning] context_window: 131072 - model_id: thedrummer/cydonia-24b-v4.1 - name: TheDrummer: Cydonia 24B V4.1 + name: "TheDrummer: Cydonia 24B V4.1" type: chat config: context_window: 131072 - model_id: thedrummer/rocinante-12b - name: TheDrummer: Rocinante 12B + name: "TheDrummer: Rocinante 12B" type: chat config: compatibilities: [tool-call] context_window: 32768 - model_id: thedrummer/skyfall-36b-v2 - name: TheDrummer: Skyfall 36B V2 + name: "TheDrummer: Skyfall 36B V2" type: chat config: context_window: 32768 - model_id: thedrummer/unslopnemo-12b - name: TheDrummer: UnslopNemo 12B + name: "TheDrummer: UnslopNemo 12B" type: chat config: compatibilities: [tool-call] context_window: 32768 - model_id: tngtech/deepseek-r1t2-chimera - name: TNG: DeepSeek R1T2 Chimera + name: "TNG: DeepSeek R1T2 Chimera" type: chat config: compatibilities: [tool-call, reasoning] @@ -1900,162 +1900,162 @@ models: config: context_window: 6144 - model_id: upstage/solar-pro-3 - name: Upstage: Solar Pro 3 + name: "Upstage: Solar Pro 3" type: chat config: compatibilities: [tool-call, reasoning] context_window: 128000 - model_id: writer/palmyra-x5 - name: Writer: Palmyra X5 + name: "Writer: Palmyra X5" type: chat config: context_window: 1040000 - model_id: x-ai/grok-3 - name: xAI: Grok 3 + name: "xAI: Grok 3" type: chat config: compatibilities: [tool-call] context_window: 131072 - model_id: x-ai/grok-3-beta - name: xAI: Grok 3 Beta + name: "xAI: Grok 3 Beta" type: chat config: compatibilities: [tool-call] context_window: 131072 - model_id: x-ai/grok-3-mini - name: xAI: Grok 3 Mini + name: "xAI: Grok 3 Mini" type: chat config: compatibilities: [tool-call, reasoning] context_window: 131072 - model_id: x-ai/grok-3-mini-beta - name: xAI: Grok 3 Mini Beta + name: "xAI: Grok 3 Mini Beta" type: chat config: compatibilities: [tool-call, reasoning] context_window: 131072 - model_id: x-ai/grok-4 - name: xAI: Grok 4 + name: "xAI: Grok 4" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 256000 - model_id: x-ai/grok-4-fast - name: xAI: Grok 4 Fast + name: "xAI: Grok 4 Fast" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 2000000 - model_id: x-ai/grok-4.1-fast - name: xAI: Grok 4.1 Fast + name: "xAI: Grok 4.1 Fast" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 2000000 - model_id: x-ai/grok-4.20 - name: xAI: Grok 4.20 + name: "xAI: Grok 4.20" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 2000000 - model_id: x-ai/grok-4.20-multi-agent - name: xAI: Grok 4.20 Multi-Agent + name: "xAI: Grok 4.20 Multi-Agent" type: chat config: compatibilities: [vision, reasoning] context_window: 2000000 - model_id: x-ai/grok-code-fast-1 - name: xAI: Grok Code Fast 1 + name: "xAI: Grok Code Fast 1" type: chat config: compatibilities: [tool-call, reasoning] context_window: 256000 - model_id: xiaomi/mimo-v2-flash - name: Xiaomi: MiMo-V2-Flash + name: "Xiaomi: MiMo-V2-Flash" type: chat config: compatibilities: [tool-call, reasoning] context_window: 262144 - model_id: xiaomi/mimo-v2-omni - name: Xiaomi: MiMo-V2-Omni + name: "Xiaomi: MiMo-V2-Omni" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 262144 - model_id: xiaomi/mimo-v2-pro - name: Xiaomi: MiMo-V2-Pro + name: "Xiaomi: MiMo-V2-Pro" type: chat config: compatibilities: [tool-call, reasoning] context_window: 1048576 - model_id: z-ai/glm-4-32b - name: Z.ai: GLM 4 32B + name: "Z.ai: GLM 4 32B " type: chat config: compatibilities: [tool-call] context_window: 128000 - model_id: z-ai/glm-4.5 - name: Z.ai: GLM 4.5 + name: "Z.ai: GLM 4.5" type: chat config: compatibilities: [tool-call, reasoning] context_window: 131072 - model_id: z-ai/glm-4.5-air - name: Z.ai: GLM 4.5 Air + name: "Z.ai: GLM 4.5 Air" type: chat config: compatibilities: [tool-call, reasoning] context_window: 131072 - model_id: "z-ai/glm-4.5-air:free" - name: Z.ai: GLM 4.5 Air (free) + name: "Z.ai: GLM 4.5 Air (free)" type: chat config: compatibilities: [tool-call, reasoning] context_window: 131072 - model_id: z-ai/glm-4.5v - name: Z.ai: GLM 4.5V + name: "Z.ai: GLM 4.5V" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 65536 - model_id: z-ai/glm-4.6 - name: Z.ai: GLM 4.6 + name: "Z.ai: GLM 4.6" type: chat config: compatibilities: [tool-call, reasoning] context_window: 204800 - model_id: z-ai/glm-4.6v - name: Z.ai: GLM 4.6V + name: "Z.ai: GLM 4.6V" type: chat config: compatibilities: [vision, tool-call, reasoning] context_window: 131072 - model_id: z-ai/glm-4.7 - name: Z.ai: GLM 4.7 + name: "Z.ai: GLM 4.7" type: chat config: compatibilities: [tool-call, reasoning] context_window: 202752 - model_id: z-ai/glm-4.7-flash - name: Z.ai: GLM 4.7 Flash + name: "Z.ai: GLM 4.7 Flash" type: chat config: compatibilities: [tool-call, reasoning] context_window: 202752 - model_id: z-ai/glm-5 - name: Z.ai: GLM 5 + name: "Z.ai: GLM 5" type: chat config: compatibilities: [tool-call, reasoning] context_window: 80000 - model_id: z-ai/glm-5-turbo - name: Z.ai: GLM 5 Turbo + name: "Z.ai: GLM 5 Turbo" type: chat config: compatibilities: [tool-call, reasoning] context_window: 202752 - model_id: z-ai/glm-5v-turbo - name: Z.ai: GLM 5V Turbo + name: "Z.ai: GLM 5V Turbo" type: chat config: compatibilities: [vision, tool-call, reasoning] diff --git a/db/migrations/0001_init.up.sql b/db/migrations/0001_init.up.sql index 37df88f5..7f13aa17 100644 --- a/db/migrations/0001_init.up.sql +++ b/db/migrations/0001_init.up.sql @@ -57,19 +57,18 @@ CREATE TABLE IF NOT EXISTS user_channel_bindings ( CREATE INDEX IF NOT EXISTS idx_user_channel_bindings_user_id ON user_channel_bindings(user_id); -CREATE TABLE IF NOT EXISTS llm_providers ( +CREATE TABLE IF NOT EXISTS providers ( id UUID PRIMARY KEY DEFAULT gen_random_uuid(), name TEXT NOT NULL, - base_url TEXT NOT NULL, - api_key TEXT NOT NULL, client_type TEXT NOT NULL DEFAULT 'openai-completions', icon TEXT, enable BOOLEAN NOT NULL DEFAULT true, + config JSONB NOT NULL DEFAULT '{}'::jsonb, metadata JSONB NOT NULL DEFAULT '{}'::jsonb, created_at TIMESTAMPTZ NOT NULL DEFAULT now(), updated_at TIMESTAMPTZ NOT NULL DEFAULT now(), - CONSTRAINT llm_providers_name_unique UNIQUE (name), - CONSTRAINT llm_providers_client_type_check CHECK (client_type IN ('openai-responses', 'openai-completions', 'anthropic-messages', 'google-generative-ai', 'openai-codex')) + CONSTRAINT providers_name_unique UNIQUE (name), + CONSTRAINT providers_client_type_check CHECK (client_type IN ('openai-responses', 'openai-completions', 'anthropic-messages', 'google-generative-ai', 'openai-codex', 'edge-speech')) ); CREATE TABLE IF NOT EXISTS search_providers ( @@ -87,13 +86,13 @@ CREATE TABLE IF NOT EXISTS models ( id UUID PRIMARY KEY DEFAULT gen_random_uuid(), model_id TEXT NOT NULL, name TEXT, - llm_provider_id UUID NOT NULL REFERENCES llm_providers(id) ON DELETE CASCADE, + provider_id UUID NOT NULL REFERENCES providers(id) ON DELETE CASCADE, type TEXT NOT NULL DEFAULT 'chat', config JSONB NOT NULL DEFAULT '{}'::jsonb, created_at TIMESTAMPTZ NOT NULL DEFAULT now(), updated_at TIMESTAMPTZ NOT NULL DEFAULT now(), - CONSTRAINT models_provider_model_id_unique UNIQUE (llm_provider_id, model_id), - CONSTRAINT models_type_check CHECK (type IN ('chat', 'embedding')) + CONSTRAINT models_provider_id_model_id_unique UNIQUE (provider_id, model_id), + CONSTRAINT models_type_check CHECK (type IN ('chat', 'embedding', 'speech')) ); CREATE TABLE IF NOT EXISTS model_variants ( @@ -120,31 +119,6 @@ CREATE TABLE IF NOT EXISTS memory_providers ( CONSTRAINT memory_providers_name_unique UNIQUE (name) ); --- tts_providers: pluggable TTS service backends -CREATE TABLE IF NOT EXISTS tts_providers ( - id UUID PRIMARY KEY DEFAULT gen_random_uuid(), - name TEXT NOT NULL, - provider TEXT NOT NULL, - config JSONB NOT NULL DEFAULT '{}'::jsonb, - enable BOOLEAN NOT NULL DEFAULT false, - created_at TIMESTAMPTZ NOT NULL DEFAULT now(), - updated_at TIMESTAMPTZ NOT NULL DEFAULT now(), - CONSTRAINT tts_providers_name_unique UNIQUE (name) -); - --- tts_models: available models per TTS provider with per-model configuration -CREATE TABLE IF NOT EXISTS tts_models ( - id UUID PRIMARY KEY DEFAULT gen_random_uuid(), - model_id TEXT NOT NULL, - name TEXT, - tts_provider_id UUID NOT NULL REFERENCES tts_providers(id) ON DELETE CASCADE, - config JSONB NOT NULL DEFAULT '{}'::jsonb, - created_at TIMESTAMPTZ NOT NULL DEFAULT now(), - updated_at TIMESTAMPTZ NOT NULL DEFAULT now() -); - -CREATE INDEX IF NOT EXISTS idx_tts_models_provider_id ON tts_models(tts_provider_id); - CREATE TABLE IF NOT EXISTS browser_contexts ( id UUID PRIMARY KEY DEFAULT gen_random_uuid(), name TEXT NOT NULL DEFAULT '', @@ -179,7 +153,7 @@ CREATE TABLE IF NOT EXISTS bots ( title_model_id UUID REFERENCES models(id) ON DELETE SET NULL, image_model_id UUID REFERENCES models(id) ON DELETE SET NULL, discuss_probe_model_id UUID REFERENCES models(id) ON DELETE SET NULL, - tts_model_id UUID REFERENCES tts_models(id) ON DELETE SET NULL, + tts_model_id UUID REFERENCES models(id) ON DELETE SET NULL, browser_context_id UUID REFERENCES browser_contexts(id) ON DELETE SET NULL, metadata JSONB NOT NULL DEFAULT '{}'::jsonb, created_at TIMESTAMPTZ NOT NULL DEFAULT now(), @@ -651,3 +625,20 @@ CREATE TABLE IF NOT EXISTS email_outbox ( CREATE INDEX IF NOT EXISTS idx_email_outbox_provider_id ON email_outbox(provider_id); CREATE INDEX IF NOT EXISTS idx_email_outbox_bot_id ON email_outbox(bot_id, created_at DESC); + +-- provider_oauth_tokens: OAuth2 tokens for LLM providers (e.g. OpenAI Codex OAuth) +CREATE TABLE IF NOT EXISTS provider_oauth_tokens ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + provider_id UUID NOT NULL UNIQUE REFERENCES providers(id) ON DELETE CASCADE, + access_token TEXT NOT NULL DEFAULT '', + refresh_token TEXT NOT NULL DEFAULT '', + expires_at TIMESTAMPTZ, + scope TEXT NOT NULL DEFAULT '', + token_type TEXT NOT NULL DEFAULT '', + state TEXT NOT NULL DEFAULT '', + pkce_code_verifier TEXT NOT NULL DEFAULT '', + created_at TIMESTAMPTZ NOT NULL DEFAULT now(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT now() +); + +CREATE INDEX IF NOT EXISTS idx_provider_oauth_tokens_state ON provider_oauth_tokens(state) WHERE state != ''; diff --git a/db/migrations/0010_client_type_to_model.up.sql b/db/migrations/0010_client_type_to_model.up.sql index c3d42622..24cd02ec 100644 --- a/db/migrations/0010_client_type_to_model.up.sql +++ b/db/migrations/0010_client_type_to_model.up.sql @@ -43,6 +43,6 @@ BEGIN END IF; END $$; --- 7) Drop client_type from llm_providers -ALTER TABLE llm_providers DROP CONSTRAINT IF EXISTS llm_providers_client_type_check; -ALTER TABLE llm_providers DROP COLUMN IF EXISTS client_type; +-- 7) Drop client_type from llm_providers (IF EXISTS for fresh-schema compat) +ALTER TABLE IF EXISTS llm_providers DROP CONSTRAINT IF EXISTS llm_providers_client_type_check; +ALTER TABLE IF EXISTS llm_providers DROP COLUMN IF EXISTS client_type; diff --git a/db/migrations/0013_model_id_unique_per_provider.up.sql b/db/migrations/0013_model_id_unique_per_provider.up.sql index 91612a91..160246d3 100644 --- a/db/migrations/0013_model_id_unique_per_provider.up.sql +++ b/db/migrations/0013_model_id_unique_per_provider.up.sql @@ -7,7 +7,11 @@ BEGIN ALTER TABLE models DROP CONSTRAINT models_model_id_unique; END IF; - IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'models_provider_model_id_unique') THEN + -- Only add old-style constraint when llm_provider_id column exists (pre-0061 schema). + -- Fresh databases already have provider_id with models_provider_id_model_id_unique. + IF NOT EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'models_provider_model_id_unique') + AND EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'models' AND column_name = 'llm_provider_id') + THEN ALTER TABLE models ADD CONSTRAINT models_provider_model_id_unique UNIQUE (llm_provider_id, model_id); END IF; diff --git a/db/migrations/0041_provider_model_refactor.up.sql b/db/migrations/0041_provider_model_refactor.up.sql index 1adabbfb..976627a6 100644 --- a/db/migrations/0041_provider_model_refactor.up.sql +++ b/db/migrations/0041_provider_model_refactor.up.sql @@ -1,15 +1,18 @@ -- 0041_provider_model_refactor -- Move client_type to llm_providers, add icon, replace model columns with config JSONB. --- 1. Add client_type and icon to llm_providers -ALTER TABLE llm_providers +-- 1. Add client_type and icon to llm_providers (IF EXISTS for fresh-schema compat) +ALTER TABLE IF EXISTS llm_providers ADD COLUMN IF NOT EXISTS client_type TEXT NOT NULL DEFAULT 'openai-completions', ADD COLUMN IF NOT EXISTS icon TEXT; -- 2–6. Backfill and migrate only when old columns exist (idempotent for fresh DBs). DO $$ BEGIN - -- Back-fill provider client_type from models.client_type (old column) - IF EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'models' AND column_name = 'client_type') THEN + -- Back-fill provider client_type from models.client_type (old column). + -- Only runs on pre-0061 schema where llm_providers table still exists. + IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_schema = 'public' AND table_name = 'llm_providers') + AND EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'models' AND column_name = 'client_type') + THEN UPDATE llm_providers p SET client_type = sub.client_type FROM ( @@ -21,8 +24,10 @@ DO $$ BEGIN WHERE p.id = sub.llm_provider_id; END IF; - -- Add CHECK constraint (skip if already present) - IF NOT EXISTS (SELECT 1 FROM information_schema.table_constraints WHERE constraint_name = 'llm_providers_client_type_check') THEN + -- Add CHECK constraint (skip if already present or table renamed) + IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_schema = 'public' AND table_name = 'llm_providers') + AND NOT EXISTS (SELECT 1 FROM information_schema.table_constraints WHERE constraint_name = 'llm_providers_client_type_check') + THEN ALTER TABLE llm_providers ADD CONSTRAINT llm_providers_client_type_check CHECK (client_type IN ('openai-responses', 'openai-completions', 'anthropic-messages', 'google-generative-ai')); diff --git a/db/migrations/0042_provider_enable.up.sql b/db/migrations/0042_provider_enable.up.sql index 6911f311..473d1440 100644 --- a/db/migrations/0042_provider_enable.up.sql +++ b/db/migrations/0042_provider_enable.up.sql @@ -1,5 +1,5 @@ -- 0042_provider_enable -- Add enable column to llm_providers for built-in provider registry support. -ALTER TABLE llm_providers +ALTER TABLE IF EXISTS llm_providers ADD COLUMN IF NOT EXISTS enable BOOLEAN NOT NULL DEFAULT true; diff --git a/db/migrations/0046_llm_provider_oauth.up.sql b/db/migrations/0046_llm_provider_oauth.up.sql index e556976e..13e6e59d 100644 --- a/db/migrations/0046_llm_provider_oauth.up.sql +++ b/db/migrations/0046_llm_provider_oauth.up.sql @@ -1,18 +1,26 @@ -- 0046_llm_provider_oauth -- Add OAuth token storage for LLM providers to support OpenAI Codex OAuth. +-- On fresh databases, provider_oauth_tokens is already created by 0001_init. -CREATE TABLE IF NOT EXISTS llm_provider_oauth_tokens ( - id UUID PRIMARY KEY DEFAULT gen_random_uuid(), - llm_provider_id UUID NOT NULL UNIQUE REFERENCES llm_providers(id) ON DELETE CASCADE, - access_token TEXT NOT NULL DEFAULT '', - refresh_token TEXT NOT NULL DEFAULT '', - expires_at TIMESTAMPTZ, - scope TEXT NOT NULL DEFAULT '', - token_type TEXT NOT NULL DEFAULT '', - state TEXT NOT NULL DEFAULT '', - pkce_code_verifier TEXT NOT NULL DEFAULT '', - created_at TIMESTAMPTZ NOT NULL DEFAULT now(), - updated_at TIMESTAMPTZ NOT NULL DEFAULT now() -); - -CREATE INDEX IF NOT EXISTS idx_llm_provider_oauth_tokens_state ON llm_provider_oauth_tokens(state) WHERE state != ''; +DO $$ +BEGIN + -- Only create old-style table when llm_providers still exists (pre-0061 schema). + IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_schema = 'public' AND table_name = 'llm_providers') + AND NOT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_schema = 'public' AND table_name = 'llm_provider_oauth_tokens') + THEN + CREATE TABLE llm_provider_oauth_tokens ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + llm_provider_id UUID NOT NULL UNIQUE REFERENCES llm_providers(id) ON DELETE CASCADE, + access_token TEXT NOT NULL DEFAULT '', + refresh_token TEXT NOT NULL DEFAULT '', + expires_at TIMESTAMPTZ, + scope TEXT NOT NULL DEFAULT '', + token_type TEXT NOT NULL DEFAULT '', + state TEXT NOT NULL DEFAULT '', + pkce_code_verifier TEXT NOT NULL DEFAULT '', + created_at TIMESTAMPTZ NOT NULL DEFAULT now(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT now() + ); + CREATE INDEX idx_llm_provider_oauth_tokens_state ON llm_provider_oauth_tokens(state) WHERE state != ''; + END IF; +END $$; diff --git a/db/migrations/0047_add_openai_codex_client_type.up.sql b/db/migrations/0047_add_openai_codex_client_type.up.sql index 37d7b527..358f3e73 100644 --- a/db/migrations/0047_add_openai_codex_client_type.up.sql +++ b/db/migrations/0047_add_openai_codex_client_type.up.sql @@ -1,12 +1,19 @@ -- 0047_add_openai_codex_client_type -- Add openai-codex as a first-class client_type and migrate existing codex-oauth providers. +-- On fresh databases, providers table already has the expanded CHECK from 0001_init. -ALTER TABLE llm_providers DROP CONSTRAINT IF EXISTS llm_providers_client_type_check; -ALTER TABLE llm_providers ADD CONSTRAINT llm_providers_client_type_check - CHECK (client_type IN ('openai-responses', 'openai-completions', 'anthropic-messages', 'google-generative-ai', 'openai-codex')); +ALTER TABLE IF EXISTS llm_providers DROP CONSTRAINT IF EXISTS llm_providers_client_type_check; -UPDATE llm_providers -SET client_type = 'openai-codex', - updated_at = now() -WHERE client_type = 'openai-responses' - AND metadata->>'auth_type' = 'openai-codex-oauth'; +DO $$ +BEGIN + IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_schema = 'public' AND table_name = 'llm_providers') THEN + ALTER TABLE llm_providers ADD CONSTRAINT llm_providers_client_type_check + CHECK (client_type IN ('openai-responses', 'openai-completions', 'anthropic-messages', 'google-generative-ai', 'openai-codex')); + + UPDATE llm_providers + SET client_type = 'openai-codex', + updated_at = now() + WHERE client_type = 'openai-responses' + AND metadata->>'auth_type' = 'openai-codex-oauth'; + END IF; +END $$; diff --git a/db/migrations/0061_unify_providers.down.sql b/db/migrations/0061_unify_providers.down.sql new file mode 100644 index 00000000..57947284 --- /dev/null +++ b/db/migrations/0061_unify_providers.down.sql @@ -0,0 +1,79 @@ +-- 0061_unify_providers (rollback) +-- Reverse the provider unification: restore llm_providers, tts_providers, tts_models. + +-- Step 1: Rename provider_oauth_tokens back +ALTER INDEX IF EXISTS idx_provider_oauth_tokens_state RENAME TO idx_llm_provider_oauth_tokens_state; +ALTER TABLE provider_oauth_tokens RENAME COLUMN provider_id TO llm_provider_id; +ALTER TABLE provider_oauth_tokens RENAME TO llm_provider_oauth_tokens; + +-- Step 2: Recreate tts_providers and tts_models +CREATE TABLE IF NOT EXISTS tts_providers ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + name TEXT NOT NULL, + provider TEXT NOT NULL, + config JSONB NOT NULL DEFAULT '{}'::jsonb, + enable BOOLEAN NOT NULL DEFAULT false, + created_at TIMESTAMPTZ NOT NULL DEFAULT now(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT now(), + CONSTRAINT tts_providers_name_unique UNIQUE (name) +); + +CREATE TABLE IF NOT EXISTS tts_models ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + model_id TEXT NOT NULL, + name TEXT, + tts_provider_id UUID NOT NULL REFERENCES tts_providers(id) ON DELETE CASCADE, + config JSONB NOT NULL DEFAULT '{}'::jsonb, + created_at TIMESTAMPTZ NOT NULL DEFAULT now(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT now() +); +CREATE INDEX IF NOT EXISTS idx_tts_models_provider_id ON tts_models(tts_provider_id); + +-- Step 3: Migrate speech providers back to tts_providers +INSERT INTO tts_providers (id, name, provider, config, enable, created_at, updated_at) +SELECT id, name, + CASE WHEN client_type = 'edge-speech' THEN 'edge' ELSE client_type END, + config, enable, created_at, updated_at +FROM providers +WHERE client_type = 'edge-speech'; + +-- Step 4: Migrate speech models back to tts_models +INSERT INTO tts_models (id, model_id, name, tts_provider_id, config, created_at, updated_at) +SELECT id, model_id, name, provider_id, config, created_at, updated_at +FROM models +WHERE type = 'speech'; + +-- Step 5: Update bots FK back to tts_models +ALTER TABLE bots DROP CONSTRAINT IF EXISTS bots_tts_model_id_fkey; +ALTER TABLE bots ADD CONSTRAINT bots_tts_model_id_fkey + FOREIGN KEY (tts_model_id) REFERENCES tts_models(id) ON DELETE SET NULL; + +-- Step 6: Remove speech models and providers from unified tables +DELETE FROM models WHERE type = 'speech'; +DELETE FROM providers WHERE client_type = 'edge-speech'; + +-- Step 7: Restore models type CHECK +ALTER TABLE models DROP CONSTRAINT models_type_check; +ALTER TABLE models ADD CONSTRAINT models_type_check CHECK (type IN ('chat', 'embedding')); + +-- Step 8: Rename provider_id back to llm_provider_id +ALTER TABLE models RENAME CONSTRAINT models_provider_id_model_id_unique TO models_provider_model_id_unique; +ALTER TABLE models RENAME COLUMN provider_id TO llm_provider_id; + +-- Step 9: Restore client_type CHECK +ALTER TABLE providers DROP CONSTRAINT providers_client_type_check; +ALTER TABLE providers ADD CONSTRAINT llm_providers_client_type_check CHECK ( + client_type IN ('openai-responses', 'openai-completions', 'anthropic-messages', 'google-generative-ai', 'openai-codex') +); + +-- Step 10: Restore api_key and base_url columns from config +ALTER TABLE providers ADD COLUMN base_url TEXT NOT NULL DEFAULT ''; +ALTER TABLE providers ADD COLUMN api_key TEXT NOT NULL DEFAULT ''; +UPDATE providers SET + base_url = COALESCE(config->>'base_url', ''), + api_key = COALESCE(config->>'api_key', ''); +ALTER TABLE providers DROP COLUMN config; + +-- Step 11: Rename providers back to llm_providers +ALTER TABLE providers RENAME CONSTRAINT providers_name_unique TO llm_providers_name_unique; +ALTER TABLE providers RENAME TO llm_providers; diff --git a/db/migrations/0061_unify_providers.up.sql b/db/migrations/0061_unify_providers.up.sql new file mode 100644 index 00000000..7c1498b4 --- /dev/null +++ b/db/migrations/0061_unify_providers.up.sql @@ -0,0 +1,94 @@ +-- 0061_unify_providers +-- Unify llm_providers and tts_providers/tts_models into a single providers/models schema. +-- Merge api_key and base_url into a config JSONB column. Add speech model type. +-- NOTE: On fresh databases the canonical schema already applies; all guards are IF EXISTS. + +DO $$ +BEGIN + -- Only run full migration if old llm_providers table still exists + IF NOT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'llm_providers') THEN + RETURN; + END IF; + + -- Step 1: Rename llm_providers → providers + EXECUTE 'ALTER TABLE llm_providers RENAME TO providers'; + EXECUTE 'ALTER TABLE providers RENAME CONSTRAINT llm_providers_name_unique TO providers_name_unique'; + EXECUTE 'ALTER TABLE providers DROP CONSTRAINT IF EXISTS llm_providers_client_type_check'; + + -- Step 2: Add config JSONB and migrate api_key + base_url into it + EXECUTE 'ALTER TABLE providers ADD COLUMN IF NOT EXISTS config JSONB NOT NULL DEFAULT ''{}''::jsonb'; + EXECUTE 'UPDATE providers SET config = jsonb_build_object(''api_key'', api_key, ''base_url'', base_url) WHERE api_key IS NOT NULL'; + EXECUTE 'ALTER TABLE providers DROP COLUMN IF EXISTS api_key'; + EXECUTE 'ALTER TABLE providers DROP COLUMN IF EXISTS base_url'; + + -- Step 3: Expand client_type CHECK + EXECUTE 'ALTER TABLE providers ADD CONSTRAINT providers_client_type_check CHECK ( + client_type IN ( + ''openai-responses'', ''openai-completions'', ''anthropic-messages'', + ''google-generative-ai'', ''openai-codex'', ''edge-speech'' + ) + )'; + + -- Step 4: Rename llm_provider_id → provider_id in models table + IF EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'models' AND column_name = 'llm_provider_id') THEN + EXECUTE 'ALTER TABLE models RENAME COLUMN llm_provider_id TO provider_id'; + END IF; + IF EXISTS (SELECT 1 FROM pg_constraint WHERE conname = 'models_provider_model_id_unique') THEN + EXECUTE 'ALTER TABLE models RENAME CONSTRAINT models_provider_model_id_unique TO models_provider_id_model_id_unique'; + END IF; + + -- Step 5: Expand models type CHECK to include speech + EXECUTE 'ALTER TABLE models DROP CONSTRAINT IF EXISTS models_type_check'; + EXECUTE 'ALTER TABLE models ADD CONSTRAINT models_type_check CHECK (type IN (''chat'', ''embedding'', ''speech''))'; + + -- Step 6: Migrate tts_providers into providers + IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'tts_providers') THEN + EXECUTE ' + INSERT INTO providers (id, name, client_type, icon, enable, config, metadata, created_at, updated_at) + SELECT + tp.id, + tp.name, + CASE WHEN tp.provider = ''edge'' THEN ''edge-speech'' ELSE tp.provider END, + NULL, + tp.enable, + tp.config, + ''{}''::jsonb, + tp.created_at, + tp.updated_at + FROM tts_providers tp + WHERE NOT EXISTS (SELECT 1 FROM providers p WHERE p.id = tp.id)'; + END IF; + + -- Step 7: Migrate tts_models into models + IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'tts_models') THEN + EXECUTE ' + INSERT INTO models (id, model_id, name, provider_id, type, config, created_at, updated_at) + SELECT + tm.id, + tm.model_id, + tm.name, + tm.tts_provider_id, + ''speech'', + tm.config, + tm.created_at, + tm.updated_at + FROM tts_models tm + WHERE NOT EXISTS (SELECT 1 FROM models m WHERE m.id = tm.id)'; + END IF; + + -- Step 8: Update bots.tts_model_id FK to reference models instead of tts_models + EXECUTE 'ALTER TABLE bots DROP CONSTRAINT IF EXISTS bots_tts_model_id_fkey'; + EXECUTE 'ALTER TABLE bots ADD CONSTRAINT bots_tts_model_id_fkey FOREIGN KEY (tts_model_id) REFERENCES models(id) ON DELETE SET NULL'; + + -- Step 9: Drop tts_models and tts_providers + EXECUTE 'DROP INDEX IF EXISTS idx_tts_models_provider_id'; + EXECUTE 'DROP TABLE IF EXISTS tts_models'; + EXECUTE 'DROP TABLE IF EXISTS tts_providers'; + + -- Step 10: Rename llm_provider_oauth_tokens → provider_oauth_tokens + IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'llm_provider_oauth_tokens') THEN + EXECUTE 'ALTER TABLE llm_provider_oauth_tokens RENAME TO provider_oauth_tokens'; + EXECUTE 'ALTER TABLE provider_oauth_tokens RENAME COLUMN llm_provider_id TO provider_id'; + EXECUTE 'ALTER INDEX IF EXISTS idx_llm_provider_oauth_tokens_state RENAME TO idx_provider_oauth_tokens_state'; + END IF; +END $$; diff --git a/db/queries/llm_provider_oauth.sql b/db/queries/llm_provider_oauth.sql deleted file mode 100644 index 48499714..00000000 --- a/db/queries/llm_provider_oauth.sql +++ /dev/null @@ -1,52 +0,0 @@ --- name: UpsertLlmProviderOAuthToken :one -INSERT INTO llm_provider_oauth_tokens ( - llm_provider_id, - access_token, - refresh_token, - expires_at, - scope, - token_type, - state, - pkce_code_verifier -) -VALUES ( - sqlc.arg(llm_provider_id), - sqlc.arg(access_token), - sqlc.arg(refresh_token), - sqlc.arg(expires_at), - sqlc.arg(scope), - sqlc.arg(token_type), - sqlc.arg(state), - sqlc.arg(pkce_code_verifier) -) -ON CONFLICT (llm_provider_id) DO UPDATE SET - access_token = EXCLUDED.access_token, - refresh_token = EXCLUDED.refresh_token, - expires_at = EXCLUDED.expires_at, - scope = EXCLUDED.scope, - token_type = EXCLUDED.token_type, - state = EXCLUDED.state, - pkce_code_verifier = EXCLUDED.pkce_code_verifier, - updated_at = now() -RETURNING *; - --- name: GetLlmProviderOAuthTokenByProvider :one -SELECT * FROM llm_provider_oauth_tokens WHERE llm_provider_id = sqlc.arg(llm_provider_id); - --- name: GetLlmProviderOAuthTokenByState :one -SELECT * FROM llm_provider_oauth_tokens WHERE state = sqlc.arg(state) AND state != ''; - --- name: UpdateLlmProviderOAuthState :exec -INSERT INTO llm_provider_oauth_tokens (llm_provider_id, state, pkce_code_verifier) -VALUES ( - sqlc.arg(llm_provider_id), - sqlc.arg(state), - sqlc.arg(pkce_code_verifier) -) -ON CONFLICT (llm_provider_id) DO UPDATE SET - state = EXCLUDED.state, - pkce_code_verifier = EXCLUDED.pkce_code_verifier, - updated_at = now(); - --- name: DeleteLlmProviderOAuthToken :exec -DELETE FROM llm_provider_oauth_tokens WHERE llm_provider_id = sqlc.arg(llm_provider_id); diff --git a/db/queries/models.sql b/db/queries/models.sql index bf67ca19..a767f5f0 100644 --- a/db/queries/models.sql +++ b/db/queries/models.sql @@ -1,52 +1,51 @@ --- name: CreateLlmProvider :one -INSERT INTO llm_providers (name, base_url, api_key, client_type, icon, enable, metadata) +-- name: CreateProvider :one +INSERT INTO providers (name, client_type, icon, enable, config, metadata) VALUES ( sqlc.arg(name), - sqlc.arg(base_url), - sqlc.arg(api_key), sqlc.arg(client_type), sqlc.arg(icon), sqlc.arg(enable), + sqlc.arg(config), sqlc.arg(metadata) ) RETURNING *; --- name: GetLlmProviderByID :one -SELECT * FROM llm_providers WHERE id = sqlc.arg(id); +-- name: GetProviderByID :one +SELECT * FROM providers WHERE id = sqlc.arg(id); --- name: GetLlmProviderByName :one -SELECT * FROM llm_providers WHERE name = sqlc.arg(name); +-- name: GetProviderByName :one +SELECT * FROM providers WHERE name = sqlc.arg(name); --- name: ListLlmProviders :many -SELECT * FROM llm_providers +-- name: ListProviders :many +SELECT * FROM providers +WHERE client_type NOT IN ('edge-speech') ORDER BY created_at DESC; --- name: UpdateLlmProvider :one -UPDATE llm_providers +-- name: UpdateProvider :one +UPDATE providers SET name = sqlc.arg(name), - base_url = sqlc.arg(base_url), - api_key = sqlc.arg(api_key), client_type = sqlc.arg(client_type), icon = sqlc.arg(icon), enable = sqlc.arg(enable), + config = sqlc.arg(config), metadata = sqlc.arg(metadata), updated_at = now() WHERE id = sqlc.arg(id) RETURNING *; --- name: DeleteLlmProvider :exec -DELETE FROM llm_providers WHERE id = sqlc.arg(id); +-- name: DeleteProvider :exec +DELETE FROM providers WHERE id = sqlc.arg(id); --- name: CountLlmProviders :one -SELECT COUNT(*) FROM llm_providers; +-- name: CountProviders :one +SELECT COUNT(*) FROM providers; -- name: CreateModel :one -INSERT INTO models (model_id, name, llm_provider_id, type, config) +INSERT INTO models (model_id, name, provider_id, type, config) VALUES ( sqlc.arg(model_id), sqlc.arg(name), - sqlc.arg(llm_provider_id), + sqlc.arg(provider_id), sqlc.arg(type), sqlc.arg(config) ) @@ -74,19 +73,19 @@ ORDER BY created_at DESC; -- name: ListModelsByProviderID :many SELECT * FROM models -WHERE llm_provider_id = sqlc.arg(llm_provider_id) +WHERE provider_id = sqlc.arg(provider_id) ORDER BY created_at DESC; -- name: ListModelsByProviderIDAndType :many SELECT * FROM models -WHERE llm_provider_id = sqlc.arg(llm_provider_id) +WHERE provider_id = sqlc.arg(provider_id) AND type = sqlc.arg(type) ORDER BY created_at DESC; -- name: ListModelsByProviderClientType :many SELECT m.* FROM models m -JOIN llm_providers p ON m.llm_provider_id = p.id +JOIN providers p ON m.provider_id = p.id WHERE p.client_type = sqlc.arg(client_type) ORDER BY m.created_at DESC; @@ -95,7 +94,7 @@ UPDATE models SET model_id = sqlc.arg(model_id), name = sqlc.arg(name), - llm_provider_id = sqlc.arg(llm_provider_id), + provider_id = sqlc.arg(provider_id), type = sqlc.arg(type), config = sqlc.arg(config), updated_at = now() @@ -116,18 +115,23 @@ SELECT COUNT(*) FROM models WHERE type = sqlc.arg(type); -- name: UpsertRegistryProvider :one -INSERT INTO llm_providers (name, base_url, api_key, client_type, icon, enable, metadata) -VALUES (sqlc.arg(name), sqlc.arg(base_url), '', sqlc.arg(client_type), sqlc.arg(icon), false, '{}') +INSERT INTO providers (name, client_type, icon, enable, config, metadata) +VALUES (sqlc.arg(name), sqlc.arg(client_type), sqlc.arg(icon), false, sqlc.arg(config), '{}') ON CONFLICT (name) DO UPDATE SET icon = EXCLUDED.icon, client_type = EXCLUDED.client_type, + config = CASE + WHEN providers.config->>'api_key' IS NOT NULL AND providers.config->>'api_key' != '' + THEN jsonb_set(EXCLUDED.config, '{api_key}', providers.config->'api_key') + ELSE EXCLUDED.config + END, updated_at = now() RETURNING *; -- name: UpsertRegistryModel :one -INSERT INTO models (model_id, name, llm_provider_id, type, config) -VALUES (sqlc.arg(model_id), sqlc.arg(name), sqlc.arg(llm_provider_id), sqlc.arg(type), sqlc.arg(config)) -ON CONFLICT (llm_provider_id, model_id) DO UPDATE SET +INSERT INTO models (model_id, name, provider_id, type, config) +VALUES (sqlc.arg(model_id), sqlc.arg(name), sqlc.arg(provider_id), sqlc.arg(type), sqlc.arg(config)) +ON CONFLICT (provider_id, model_id) DO UPDATE SET name = EXCLUDED.name, type = EXCLUDED.type, config = EXCLUDED.config, @@ -137,14 +141,14 @@ RETURNING *; -- name: ListEnabledModels :many SELECT m.* FROM models m -JOIN llm_providers p ON m.llm_provider_id = p.id +JOIN providers p ON m.provider_id = p.id WHERE p.enable = true ORDER BY m.created_at DESC; -- name: ListEnabledModelsByType :many SELECT m.* FROM models m -JOIN llm_providers p ON m.llm_provider_id = p.id +JOIN providers p ON m.provider_id = p.id WHERE p.enable = true AND m.type = sqlc.arg(type) ORDER BY m.created_at DESC; @@ -152,7 +156,7 @@ ORDER BY m.created_at DESC; -- name: ListEnabledModelsByProviderClientType :many SELECT m.* FROM models m -JOIN llm_providers p ON m.llm_provider_id = p.id +JOIN providers p ON m.provider_id = p.id WHERE p.enable = true AND p.client_type = sqlc.arg(client_type) ORDER BY m.created_at DESC; @@ -171,3 +175,37 @@ RETURNING *; SELECT * FROM model_variants WHERE model_uuid = sqlc.arg(model_uuid) ORDER BY weight DESC, created_at DESC; + +-- name: GetSpeechModelWithProvider :one +SELECT + m.*, + p.client_type AS provider_type +FROM models m +JOIN providers p ON p.id = m.provider_id +WHERE m.id = sqlc.arg(id) + AND m.type = 'speech'; + +-- name: ListSpeechProviders :many +SELECT * FROM providers +WHERE client_type IN ('edge-speech') +ORDER BY created_at DESC; + +-- name: ListSpeechModels :many +SELECT m.*, + p.client_type AS provider_type +FROM models m +JOIN providers p ON p.id = m.provider_id +WHERE m.type = 'speech' +ORDER BY m.created_at DESC; + +-- name: ListSpeechModelsByProviderID :many +SELECT * FROM models +WHERE provider_id = sqlc.arg(provider_id) + AND type = 'speech' +ORDER BY created_at DESC; + +-- name: GetModelByProviderAndModelID :one +SELECT * FROM models +WHERE provider_id = sqlc.arg(provider_id) + AND model_id = sqlc.arg(model_id) +LIMIT 1; diff --git a/db/queries/provider_oauth.sql b/db/queries/provider_oauth.sql new file mode 100644 index 00000000..2bb964ad --- /dev/null +++ b/db/queries/provider_oauth.sql @@ -0,0 +1,52 @@ +-- name: UpsertProviderOAuthToken :one +INSERT INTO provider_oauth_tokens ( + provider_id, + access_token, + refresh_token, + expires_at, + scope, + token_type, + state, + pkce_code_verifier +) +VALUES ( + sqlc.arg(provider_id), + sqlc.arg(access_token), + sqlc.arg(refresh_token), + sqlc.arg(expires_at), + sqlc.arg(scope), + sqlc.arg(token_type), + sqlc.arg(state), + sqlc.arg(pkce_code_verifier) +) +ON CONFLICT (provider_id) DO UPDATE SET + access_token = EXCLUDED.access_token, + refresh_token = EXCLUDED.refresh_token, + expires_at = EXCLUDED.expires_at, + scope = EXCLUDED.scope, + token_type = EXCLUDED.token_type, + state = EXCLUDED.state, + pkce_code_verifier = EXCLUDED.pkce_code_verifier, + updated_at = now() +RETURNING *; + +-- name: GetProviderOAuthTokenByProvider :one +SELECT * FROM provider_oauth_tokens WHERE provider_id = sqlc.arg(provider_id); + +-- name: GetProviderOAuthTokenByState :one +SELECT * FROM provider_oauth_tokens WHERE state = sqlc.arg(state) AND state != ''; + +-- name: UpdateProviderOAuthState :exec +INSERT INTO provider_oauth_tokens (provider_id, state, pkce_code_verifier) +VALUES ( + sqlc.arg(provider_id), + sqlc.arg(state), + sqlc.arg(pkce_code_verifier) +) +ON CONFLICT (provider_id) DO UPDATE SET + state = EXCLUDED.state, + pkce_code_verifier = EXCLUDED.pkce_code_verifier, + updated_at = now(); + +-- name: DeleteProviderOAuthToken :exec +DELETE FROM provider_oauth_tokens WHERE provider_id = sqlc.arg(provider_id); diff --git a/db/queries/settings.sql b/db/queries/settings.sql index 75208f1d..eff58da0 100644 --- a/db/queries/settings.sql +++ b/db/queries/settings.sql @@ -27,7 +27,7 @@ LEFT JOIN models AS title_models ON title_models.id = bots.title_model_id LEFT JOIN models AS image_models ON image_models.id = bots.image_model_id LEFT JOIN search_providers ON search_providers.id = bots.search_provider_id LEFT JOIN memory_providers ON memory_providers.id = bots.memory_provider_id -LEFT JOIN tts_models ON tts_models.id = bots.tts_model_id +LEFT JOIN models AS tts_models ON tts_models.id = bots.tts_model_id LEFT JOIN browser_contexts ON browser_contexts.id = bots.browser_context_id WHERE bots.id = $1; @@ -84,7 +84,7 @@ LEFT JOIN models AS title_models ON title_models.id = updated.title_model_id LEFT JOIN models AS image_models ON image_models.id = updated.image_model_id LEFT JOIN search_providers ON search_providers.id = updated.search_provider_id LEFT JOIN memory_providers ON memory_providers.id = updated.memory_provider_id -LEFT JOIN tts_models ON tts_models.id = updated.tts_model_id +LEFT JOIN models AS tts_models ON tts_models.id = updated.tts_model_id LEFT JOIN browser_contexts ON browser_contexts.id = updated.browser_context_id; -- name: DeleteSettingsByBotID :exec diff --git a/db/queries/token_usage.sql b/db/queries/token_usage.sql index 719fdd8a..f787c461 100644 --- a/db/queries/token_usage.sql +++ b/db/queries/token_usage.sql @@ -31,7 +31,7 @@ SELECT COALESCE(SUM((m.usage->>'outputTokens')::bigint), 0)::bigint AS output_tokens FROM bot_history_messages m LEFT JOIN models mo ON mo.id = m.model_id -LEFT JOIN llm_providers lp ON lp.id = mo.llm_provider_id +LEFT JOIN providers lp ON lp.id = mo.provider_id WHERE m.bot_id = sqlc.arg(bot_id) AND m.usage IS NOT NULL AND m.created_at >= sqlc.arg(from_time) diff --git a/db/queries/tts_models.sql b/db/queries/tts_models.sql deleted file mode 100644 index c28db130..00000000 --- a/db/queries/tts_models.sql +++ /dev/null @@ -1,50 +0,0 @@ --- name: CreateTtsModel :one -INSERT INTO tts_models (model_id, name, tts_provider_id, config) -VALUES ( - sqlc.arg(model_id), - sqlc.arg(name), - sqlc.arg(tts_provider_id), - sqlc.arg(config) -) -RETURNING *; - --- name: GetTtsModelByID :one -SELECT * FROM tts_models WHERE id = sqlc.arg(id); - --- name: GetTtsModelWithProvider :one -SELECT - tm.*, - tp.provider AS provider_type -FROM tts_models tm -JOIN tts_providers tp ON tp.id = tm.tts_provider_id -WHERE tm.id = sqlc.arg(id); - --- name: ListTtsModels :many -SELECT * FROM tts_models -ORDER BY created_at DESC; - --- name: ListTtsModelsByProviderID :many -SELECT * FROM tts_models -WHERE tts_provider_id = sqlc.arg(tts_provider_id) -ORDER BY created_at DESC; - --- name: UpdateTtsModel :one -UPDATE tts_models -SET - name = sqlc.arg(name), - config = sqlc.arg(config), - updated_at = now() -WHERE id = sqlc.arg(id) -RETURNING *; - --- name: DeleteTtsModel :exec -DELETE FROM tts_models WHERE id = sqlc.arg(id); - --- name: DeleteTtsModelsByProviderID :exec -DELETE FROM tts_models WHERE tts_provider_id = sqlc.arg(tts_provider_id); - --- name: GetTtsModelByProviderAndModelID :one -SELECT * FROM tts_models -WHERE tts_provider_id = sqlc.arg(tts_provider_id) - AND model_id = sqlc.arg(model_id) -LIMIT 1; diff --git a/db/queries/tts_providers.sql b/db/queries/tts_providers.sql deleted file mode 100644 index 37cc9f1c..00000000 --- a/db/queries/tts_providers.sql +++ /dev/null @@ -1,38 +0,0 @@ --- name: CreateTtsProvider :one -INSERT INTO tts_providers (name, provider, config, enable) -VALUES ( - sqlc.arg(name), - sqlc.arg(provider), - sqlc.arg(config), - sqlc.arg(enable) -) -RETURNING *; - --- name: GetTtsProviderByID :one -SELECT * FROM tts_providers WHERE id = sqlc.arg(id); - --- name: GetTtsProviderByName :one -SELECT * FROM tts_providers WHERE name = sqlc.arg(name); - --- name: ListTtsProviders :many -SELECT * FROM tts_providers -ORDER BY created_at DESC; - --- name: ListTtsProvidersByProvider :many -SELECT * FROM tts_providers -WHERE provider = sqlc.arg(provider) -ORDER BY created_at DESC; - --- name: UpdateTtsProvider :one -UPDATE tts_providers -SET - name = sqlc.arg(name), - provider = sqlc.arg(provider), - config = sqlc.arg(config), - enable = sqlc.arg(enable), - updated_at = now() -WHERE id = sqlc.arg(id) -RETURNING *; - --- name: DeleteTtsProvider :exec -DELETE FROM tts_providers WHERE id = sqlc.arg(id); diff --git a/devenv/docker-compose.yml b/devenv/docker-compose.yml index 9145b11b..240f8969 100644 --- a/devenv/docker-compose.yml +++ b/devenv/docker-compose.yml @@ -156,20 +156,20 @@ services: condition: service_healthy restart: unless-stopped - sparse: - build: - context: .. - dockerfile: docker/Dockerfile.sparse - container_name: memoh-dev-sparse - ports: - - "${MEMOH_DEV_SPARSE_PORT:-18085}:8085" - healthcheck: - test: ["CMD-SHELL", "python -c \"import urllib.request; urllib.request.urlopen('http://127.0.0.1:8085/health')\" || exit 1"] - interval: 15s - timeout: 10s - start_period: 30s - retries: 3 - restart: unless-stopped + # sparse: + # build: + # context: .. + # dockerfile: docker/Dockerfile.sparse + # container_name: memoh-dev-sparse + # ports: + # - "${MEMOH_DEV_SPARSE_PORT:-18085}:8085" + # healthcheck: + # test: ["CMD-SHELL", "python -c \"import urllib.request; urllib.request.urlopen('http://127.0.0.1:8085/health')\" || exit 1"] + # interval: 15s + # timeout: 10s + # start_period: 30s + # retries: 3 + # restart: unless-stopped volumes: postgres_data: diff --git a/internal/agent/tools/image_gen.go b/internal/agent/tools/image_gen.go index c0263578..9e559399 100644 --- a/internal/agent/tools/image_gen.go +++ b/internal/agent/tools/image_gen.go @@ -116,7 +116,7 @@ func (p *ImageGenProvider) execGenerateImage(ctx context.Context, session Sessio return nil, errors.New("configured model does not support image generation") } - provider, err := models.FetchProviderByID(ctx, p.queries, modelResp.LlmProviderID) + provider, err := models.FetchProviderByID(ctx, p.queries, modelResp.ProviderID) if err != nil { return nil, fmt.Errorf("failed to load model provider: %w", err) } @@ -131,7 +131,7 @@ func (p *ImageGenProvider) execGenerateImage(ctx context.Context, session Sessio ModelID: modelResp.ModelID, ClientType: provider.ClientType, APIKey: creds.APIKey, - BaseURL: provider.BaseUrl, + BaseURL: providers.ProviderConfigString(provider, "base_url"), }) userMsg := fmt.Sprintf("Generate an image with the following description. Size: %s\n\n%s", size, prompt) diff --git a/internal/agent/tools/subagent.go b/internal/agent/tools/subagent.go index 7100d7dd..cbb50257 100644 --- a/internal/agent/tools/subagent.go +++ b/internal/agent/tools/subagent.go @@ -318,7 +318,7 @@ func (p *SpawnProvider) resolveModel(ctx context.Context, botID string) (*sdk.Mo if err != nil { return nil, "", err } - provider, err := models.FetchProviderByID(ctx, p.queries, modelInfo.LlmProviderID) + provider, err := models.FetchProviderByID(ctx, p.queries, modelInfo.ProviderID) if err != nil { return nil, "", err } @@ -335,7 +335,7 @@ func (p *SpawnProvider) resolveModel(ctx context.Context, botID string) (*sdk.Mo provider.ClientType, creds.APIKey, creds.CodexAccountID, - provider.BaseUrl, + providers.ProviderConfigString(provider, "base_url"), nil, ) return sdkModel, modelInfo.ID, nil diff --git a/internal/command/model.go b/internal/command/model.go index 6870102a..53bcc644 100644 --- a/internal/command/model.go +++ b/internal/command/model.go @@ -23,7 +23,7 @@ func (h *Handler) buildModelGroup() *CommandGroup { } records := make([][]kv, 0, len(items)) for _, item := range items { - provName := h.resolveProviderName(cc, item.LlmProviderID) + provName := h.resolveProviderName(cc, item.ProviderID) records = append(records, []kv{ {"Model", item.Name}, {"Provider", provName}, diff --git a/internal/command/settings.go b/internal/command/settings.go index 3e70c41f..b88d9be5 100644 --- a/internal/command/settings.go +++ b/internal/command/settings.go @@ -120,7 +120,7 @@ func (h *Handler) resolveModelName(cc CommandContext, modelID string) string { } provName := "" if h.providersService != nil { - p, err := h.providersService.Get(cc.Ctx, m.LlmProviderID) + p, err := h.providersService.Get(cc.Ctx, m.ProviderID) if err == nil { provName = p.Name } diff --git a/internal/conversation/flow/resolver.go b/internal/conversation/flow/resolver.go index 16c4f64b..d593388c 100644 --- a/internal/conversation/flow/resolver.go +++ b/internal/conversation/flow/resolver.go @@ -153,7 +153,7 @@ type usageInfo struct { type resolvedContext struct { runConfig agentpkg.RunConfig model models.GetResponse - provider sqlc.LlmProvider + provider sqlc.Provider query string // headerified query injectedRecords *[]conversation.InjectedMessageRecord } @@ -345,10 +345,10 @@ type baseRunConfigParams struct { // buildBaseRunConfig creates a RunConfig with model, credentials, skills, // identity and system prompt — everything except Messages/Query/InlineImages. // Both resolve() and ResolveRunConfig() delegate to this shared builder. -func (r *Resolver) buildBaseRunConfig(ctx context.Context, p baseRunConfigParams) (agentpkg.RunConfig, models.GetResponse, sqlc.LlmProvider, error) { +func (r *Resolver) buildBaseRunConfig(ctx context.Context, p baseRunConfigParams) (agentpkg.RunConfig, models.GetResponse, sqlc.Provider, error) { botSettings, err := r.loadBotSettings(ctx, p.BotID) if err != nil { - return agentpkg.RunConfig{}, models.GetResponse{}, sqlc.LlmProvider{}, err + return agentpkg.RunConfig{}, models.GetResponse{}, sqlc.Provider{}, err } loopDetectionEnabled := r.loadBotLoopDetectionEnabled(ctx, p.BotID) userTimezoneName, userClockLocation := r.resolveTimezone(ctx, p.BotID, p.UserID) @@ -367,7 +367,7 @@ func (r *Resolver) buildBaseRunConfig(ctx context.Context, p baseRunConfigParams chatModel, provider, err := r.selectChatModel(ctx, req, botSettings, conversation.Settings{}) if err != nil { - return agentpkg.RunConfig{}, models.GetResponse{}, sqlc.LlmProvider{}, err + return agentpkg.RunConfig{}, models.GetResponse{}, sqlc.Provider{}, err } reasoningEffort := p.ReasoningEffort @@ -382,7 +382,7 @@ func (r *Resolver) buildBaseRunConfig(ctx context.Context, p baseRunConfigParams authResolver := providers.NewService(nil, r.queries, "") creds, err := authResolver.ResolveModelCredentials(ctx, provider) if err != nil { - return agentpkg.RunConfig{}, models.GetResponse{}, sqlc.LlmProvider{}, fmt.Errorf("resolve provider credentials: %w", err) + return agentpkg.RunConfig{}, models.GetResponse{}, sqlc.Provider{}, fmt.Errorf("resolve provider credentials: %w", err) } sdkModel := models.NewSDKChatModel(models.SDKModelConfig{ @@ -390,7 +390,7 @@ func (r *Resolver) buildBaseRunConfig(ctx context.Context, p baseRunConfigParams ClientType: provider.ClientType, APIKey: creds.APIKey, CodexAccountID: creds.CodexAccountID, - BaseURL: provider.BaseUrl, + BaseURL: providers.ProviderConfigString(provider, "base_url"), ReasoningConfig: reasoningConfig, }) diff --git a/internal/conversation/flow/resolver_compaction.go b/internal/conversation/flow/resolver_compaction.go index 9fcbf839..2be6fa83 100644 --- a/internal/conversation/flow/resolver_compaction.go +++ b/internal/conversation/flow/resolver_compaction.go @@ -50,7 +50,7 @@ func (r *Resolver) maybeCompact(ctx context.Context, req conversation.ChatReques } cfg.ModelID = model.ModelID - provider, err := models.FetchProviderByID(ctx, r.queries, model.LlmProviderID) + provider, err := models.FetchProviderByID(ctx, r.queries, model.ProviderID) if err != nil { r.logger.Warn("compaction: failed to fetch provider", slog.Any("error", err)) return @@ -64,7 +64,7 @@ func (r *Resolver) maybeCompact(ctx context.Context, req conversation.ChatReques cfg.ClientType = provider.ClientType cfg.APIKey = creds.APIKey cfg.CodexAccountID = creds.CodexAccountID - cfg.BaseURL = provider.BaseUrl + cfg.BaseURL = providers.ProviderConfigString(provider, "base_url") r.compactionService.TriggerCompaction(ctx, cfg) } diff --git a/internal/conversation/flow/resolver_model_selection.go b/internal/conversation/flow/resolver_model_selection.go index bbd56588..73ef633a 100644 --- a/internal/conversation/flow/resolver_model_selection.go +++ b/internal/conversation/flow/resolver_model_selection.go @@ -15,9 +15,9 @@ import ( "github.com/memohai/memoh/internal/settings" ) -func (r *Resolver) selectChatModel(ctx context.Context, req conversation.ChatRequest, botSettings settings.Settings, cs conversation.Settings) (models.GetResponse, sqlc.LlmProvider, error) { +func (r *Resolver) selectChatModel(ctx context.Context, req conversation.ChatRequest, botSettings settings.Settings, cs conversation.Settings) (models.GetResponse, sqlc.Provider, error) { if r.modelsService == nil { - return models.GetResponse{}, sqlc.LlmProvider{}, errors.New("models service not configured") + return models.GetResponse{}, sqlc.Provider{}, errors.New("models service not configured") } modelID := strings.TrimSpace(req.Model) providerFilter := strings.TrimSpace(req.Provider) @@ -32,7 +32,7 @@ func (r *Resolver) selectChatModel(ctx context.Context, req conversation.ChatReq } if modelID == "" { - return models.GetResponse{}, sqlc.LlmProvider{}, errors.New("chat model not configured: specify model in request or bot settings") + return models.GetResponse{}, sqlc.Provider{}, errors.New("chat model not configured: specify model in request or bot settings") } if providerFilter == "" { @@ -41,24 +41,24 @@ func (r *Resolver) selectChatModel(ctx context.Context, req conversation.ChatReq candidates, err := r.listCandidates(ctx, providerFilter) if err != nil { - return models.GetResponse{}, sqlc.LlmProvider{}, err + return models.GetResponse{}, sqlc.Provider{}, err } for _, m := range candidates { if matchesModelReference(m, modelID) { - prov, err := models.FetchProviderByID(ctx, r.queries, m.LlmProviderID) + prov, err := models.FetchProviderByID(ctx, r.queries, m.ProviderID) if err != nil { - return models.GetResponse{}, sqlc.LlmProvider{}, err + return models.GetResponse{}, sqlc.Provider{}, err } return m, prov, nil } } - return models.GetResponse{}, sqlc.LlmProvider{}, fmt.Errorf("chat model %q not found for provider %q", modelID, providerFilter) + return models.GetResponse{}, sqlc.Provider{}, fmt.Errorf("chat model %q not found for provider %q", modelID, providerFilter) } -func (r *Resolver) fetchChatModel(ctx context.Context, modelID string) (models.GetResponse, sqlc.LlmProvider, error) { +func (r *Resolver) fetchChatModel(ctx context.Context, modelID string) (models.GetResponse, sqlc.Provider, error) { modelRef := strings.TrimSpace(modelID) if modelRef == "" { - return models.GetResponse{}, sqlc.LlmProvider{}, errors.New("model id is required") + return models.GetResponse{}, sqlc.Provider{}, errors.New("model id is required") } // Support both model UUID and model_id slug. UUID-formatted slugs still @@ -71,21 +71,21 @@ func (r *Resolver) fetchChatModel(ctx context.Context, modelID string) (models.G goto resolved } if !errors.Is(err, pgx.ErrNoRows) { - return models.GetResponse{}, sqlc.LlmProvider{}, err + return models.GetResponse{}, sqlc.Provider{}, err } } model, err = r.modelsService.GetByModelID(ctx, modelRef) if err != nil { - return models.GetResponse{}, sqlc.LlmProvider{}, err + return models.GetResponse{}, sqlc.Provider{}, err } resolved: if model.Type != models.ModelTypeChat { - return models.GetResponse{}, sqlc.LlmProvider{}, errors.New("model is not a chat model") + return models.GetResponse{}, sqlc.Provider{}, errors.New("model is not a chat model") } - prov, err := models.FetchProviderByID(ctx, r.queries, model.LlmProviderID) + prov, err := models.FetchProviderByID(ctx, r.queries, model.ProviderID) if err != nil { - return models.GetResponse{}, sqlc.LlmProvider{}, err + return models.GetResponse{}, sqlc.Provider{}, err } return model, prov, nil } diff --git a/internal/conversation/flow/resolver_title.go b/internal/conversation/flow/resolver_title.go index e73aaf4c..0780cdb5 100644 --- a/internal/conversation/flow/resolver_title.go +++ b/internal/conversation/flow/resolver_title.go @@ -95,7 +95,7 @@ func (r *Resolver) maybeGenerateSessionTitle(ctx context.Context, req conversati } } -func (r *Resolver) generateTitle(ctx context.Context, model models.GetResponse, provider sqlc.LlmProvider, userQuery string) string { +func (r *Resolver) generateTitle(ctx context.Context, model models.GetResponse, provider sqlc.Provider, userQuery string) string { userSnippet := truncate(strings.TrimSpace(userQuery), titlePromptMaxInputChars) if userSnippet == "" { return "" @@ -117,7 +117,7 @@ func (r *Resolver) generateTitle(ctx context.Context, model models.GetResponse, ClientType: provider.ClientType, APIKey: creds.APIKey, CodexAccountID: creds.CodexAccountID, - BaseURL: provider.BaseUrl, + BaseURL: providers.ProviderConfigString(provider, "base_url"), } sdkModel := models.NewSDKChatModel(modelCfg) diff --git a/internal/db/sqlc/llm_provider_oauth.sql.go b/internal/db/sqlc/llm_provider_oauth.sql.go deleted file mode 100644 index cd31ac5c..00000000 --- a/internal/db/sqlc/llm_provider_oauth.sql.go +++ /dev/null @@ -1,163 +0,0 @@ -// Code generated by sqlc. DO NOT EDIT. -// versions: -// sqlc v1.30.0 -// source: llm_provider_oauth.sql - -package sqlc - -import ( - "context" - - "github.com/jackc/pgx/v5/pgtype" -) - -const deleteLlmProviderOAuthToken = `-- name: DeleteLlmProviderOAuthToken :exec -DELETE FROM llm_provider_oauth_tokens WHERE llm_provider_id = $1 -` - -func (q *Queries) DeleteLlmProviderOAuthToken(ctx context.Context, llmProviderID pgtype.UUID) error { - _, err := q.db.Exec(ctx, deleteLlmProviderOAuthToken, llmProviderID) - return err -} - -const getLlmProviderOAuthTokenByProvider = `-- name: GetLlmProviderOAuthTokenByProvider :one -SELECT id, llm_provider_id, access_token, refresh_token, expires_at, scope, token_type, state, pkce_code_verifier, created_at, updated_at FROM llm_provider_oauth_tokens WHERE llm_provider_id = $1 -` - -func (q *Queries) GetLlmProviderOAuthTokenByProvider(ctx context.Context, llmProviderID pgtype.UUID) (LlmProviderOauthToken, error) { - row := q.db.QueryRow(ctx, getLlmProviderOAuthTokenByProvider, llmProviderID) - var i LlmProviderOauthToken - err := row.Scan( - &i.ID, - &i.LlmProviderID, - &i.AccessToken, - &i.RefreshToken, - &i.ExpiresAt, - &i.Scope, - &i.TokenType, - &i.State, - &i.PkceCodeVerifier, - &i.CreatedAt, - &i.UpdatedAt, - ) - return i, err -} - -const getLlmProviderOAuthTokenByState = `-- name: GetLlmProviderOAuthTokenByState :one -SELECT id, llm_provider_id, access_token, refresh_token, expires_at, scope, token_type, state, pkce_code_verifier, created_at, updated_at FROM llm_provider_oauth_tokens WHERE state = $1 AND state != '' -` - -func (q *Queries) GetLlmProviderOAuthTokenByState(ctx context.Context, state string) (LlmProviderOauthToken, error) { - row := q.db.QueryRow(ctx, getLlmProviderOAuthTokenByState, state) - var i LlmProviderOauthToken - err := row.Scan( - &i.ID, - &i.LlmProviderID, - &i.AccessToken, - &i.RefreshToken, - &i.ExpiresAt, - &i.Scope, - &i.TokenType, - &i.State, - &i.PkceCodeVerifier, - &i.CreatedAt, - &i.UpdatedAt, - ) - return i, err -} - -const updateLlmProviderOAuthState = `-- name: UpdateLlmProviderOAuthState :exec -INSERT INTO llm_provider_oauth_tokens (llm_provider_id, state, pkce_code_verifier) -VALUES ( - $1, - $2, - $3 -) -ON CONFLICT (llm_provider_id) DO UPDATE SET - state = EXCLUDED.state, - pkce_code_verifier = EXCLUDED.pkce_code_verifier, - updated_at = now() -` - -type UpdateLlmProviderOAuthStateParams struct { - LlmProviderID pgtype.UUID `json:"llm_provider_id"` - State string `json:"state"` - PkceCodeVerifier string `json:"pkce_code_verifier"` -} - -func (q *Queries) UpdateLlmProviderOAuthState(ctx context.Context, arg UpdateLlmProviderOAuthStateParams) error { - _, err := q.db.Exec(ctx, updateLlmProviderOAuthState, arg.LlmProviderID, arg.State, arg.PkceCodeVerifier) - return err -} - -const upsertLlmProviderOAuthToken = `-- name: UpsertLlmProviderOAuthToken :one -INSERT INTO llm_provider_oauth_tokens ( - llm_provider_id, - access_token, - refresh_token, - expires_at, - scope, - token_type, - state, - pkce_code_verifier -) -VALUES ( - $1, - $2, - $3, - $4, - $5, - $6, - $7, - $8 -) -ON CONFLICT (llm_provider_id) DO UPDATE SET - access_token = EXCLUDED.access_token, - refresh_token = EXCLUDED.refresh_token, - expires_at = EXCLUDED.expires_at, - scope = EXCLUDED.scope, - token_type = EXCLUDED.token_type, - state = EXCLUDED.state, - pkce_code_verifier = EXCLUDED.pkce_code_verifier, - updated_at = now() -RETURNING id, llm_provider_id, access_token, refresh_token, expires_at, scope, token_type, state, pkce_code_verifier, created_at, updated_at -` - -type UpsertLlmProviderOAuthTokenParams struct { - LlmProviderID pgtype.UUID `json:"llm_provider_id"` - AccessToken string `json:"access_token"` - RefreshToken string `json:"refresh_token"` - ExpiresAt pgtype.Timestamptz `json:"expires_at"` - Scope string `json:"scope"` - TokenType string `json:"token_type"` - State string `json:"state"` - PkceCodeVerifier string `json:"pkce_code_verifier"` -} - -func (q *Queries) UpsertLlmProviderOAuthToken(ctx context.Context, arg UpsertLlmProviderOAuthTokenParams) (LlmProviderOauthToken, error) { - row := q.db.QueryRow(ctx, upsertLlmProviderOAuthToken, - arg.LlmProviderID, - arg.AccessToken, - arg.RefreshToken, - arg.ExpiresAt, - arg.Scope, - arg.TokenType, - arg.State, - arg.PkceCodeVerifier, - ) - var i LlmProviderOauthToken - err := row.Scan( - &i.ID, - &i.LlmProviderID, - &i.AccessToken, - &i.RefreshToken, - &i.ExpiresAt, - &i.Scope, - &i.TokenType, - &i.State, - &i.PkceCodeVerifier, - &i.CreatedAt, - &i.UpdatedAt, - ) - return i, err -} diff --git a/internal/db/sqlc/models.go b/internal/db/sqlc/models.go index 055bb32b..701187fc 100644 --- a/internal/db/sqlc/models.go +++ b/internal/db/sqlc/models.go @@ -298,33 +298,6 @@ type LifecycleEvent struct { CreatedAt pgtype.Timestamptz `json:"created_at"` } -type LlmProvider struct { - ID pgtype.UUID `json:"id"` - Name string `json:"name"` - BaseUrl string `json:"base_url"` - ApiKey string `json:"api_key"` - Icon pgtype.Text `json:"icon"` - Enable bool `json:"enable"` - Metadata []byte `json:"metadata"` - CreatedAt pgtype.Timestamptz `json:"created_at"` - UpdatedAt pgtype.Timestamptz `json:"updated_at"` - ClientType string `json:"client_type"` -} - -type LlmProviderOauthToken struct { - ID pgtype.UUID `json:"id"` - LlmProviderID pgtype.UUID `json:"llm_provider_id"` - AccessToken string `json:"access_token"` - RefreshToken string `json:"refresh_token"` - ExpiresAt pgtype.Timestamptz `json:"expires_at"` - Scope string `json:"scope"` - TokenType string `json:"token_type"` - State string `json:"state"` - PkceCodeVerifier string `json:"pkce_code_verifier"` - CreatedAt pgtype.Timestamptz `json:"created_at"` - UpdatedAt pgtype.Timestamptz `json:"updated_at"` -} - type McpConnection struct { ID pgtype.UUID `json:"id"` BotID pgtype.UUID `json:"bot_id"` @@ -393,14 +366,14 @@ type MemoryProvider struct { } type Model struct { - ID pgtype.UUID `json:"id"` - ModelID string `json:"model_id"` - Name pgtype.Text `json:"name"` - LlmProviderID pgtype.UUID `json:"llm_provider_id"` - Type string `json:"type"` - Config []byte `json:"config"` - CreatedAt pgtype.Timestamptz `json:"created_at"` - UpdatedAt pgtype.Timestamptz `json:"updated_at"` + ID pgtype.UUID `json:"id"` + ModelID string `json:"model_id"` + Name pgtype.Text `json:"name"` + ProviderID pgtype.UUID `json:"provider_id"` + Type string `json:"type"` + Config []byte `json:"config"` + CreatedAt pgtype.Timestamptz `json:"created_at"` + UpdatedAt pgtype.Timestamptz `json:"updated_at"` } type ModelVariant struct { @@ -413,6 +386,32 @@ type ModelVariant struct { UpdatedAt pgtype.Timestamptz `json:"updated_at"` } +type Provider struct { + ID pgtype.UUID `json:"id"` + Name string `json:"name"` + ClientType string `json:"client_type"` + Icon pgtype.Text `json:"icon"` + Enable bool `json:"enable"` + Config []byte `json:"config"` + Metadata []byte `json:"metadata"` + CreatedAt pgtype.Timestamptz `json:"created_at"` + UpdatedAt pgtype.Timestamptz `json:"updated_at"` +} + +type ProviderOauthToken struct { + ID pgtype.UUID `json:"id"` + ProviderID pgtype.UUID `json:"provider_id"` + AccessToken string `json:"access_token"` + RefreshToken string `json:"refresh_token"` + ExpiresAt pgtype.Timestamptz `json:"expires_at"` + Scope string `json:"scope"` + TokenType string `json:"token_type"` + State string `json:"state"` + PkceCodeVerifier string `json:"pkce_code_verifier"` + CreatedAt pgtype.Timestamptz `json:"created_at"` + UpdatedAt pgtype.Timestamptz `json:"updated_at"` +} + type Schedule struct { ID pgtype.UUID `json:"id"` Name string `json:"name"` @@ -486,9 +485,9 @@ type TtsProvider struct { Name string `json:"name"` Provider string `json:"provider"` Config []byte `json:"config"` - Enable bool `json:"enable"` CreatedAt pgtype.Timestamptz `json:"created_at"` UpdatedAt pgtype.Timestamptz `json:"updated_at"` + Enable bool `json:"enable"` } type User struct { diff --git a/internal/db/sqlc/models.sql.go b/internal/db/sqlc/models.sql.go index b108371c..a0874769 100644 --- a/internal/db/sqlc/models.sql.go +++ b/internal/db/sqlc/models.sql.go @@ -11,17 +11,6 @@ import ( "github.com/jackc/pgx/v5/pgtype" ) -const countLlmProviders = `-- name: CountLlmProviders :one -SELECT COUNT(*) FROM llm_providers -` - -func (q *Queries) CountLlmProviders(ctx context.Context) (int64, error) { - row := q.db.QueryRow(ctx, countLlmProviders) - var count int64 - err := row.Scan(&count) - return count, err -} - const countModels = `-- name: CountModels :one SELECT COUNT(*) FROM models ` @@ -44,58 +33,19 @@ func (q *Queries) CountModelsByType(ctx context.Context, type_ string) (int64, e return count, err } -const createLlmProvider = `-- name: CreateLlmProvider :one -INSERT INTO llm_providers (name, base_url, api_key, client_type, icon, enable, metadata) -VALUES ( - $1, - $2, - $3, - $4, - $5, - $6, - $7 -) -RETURNING id, name, base_url, api_key, icon, enable, metadata, created_at, updated_at, client_type +const countProviders = `-- name: CountProviders :one +SELECT COUNT(*) FROM providers ` -type CreateLlmProviderParams struct { - Name string `json:"name"` - BaseUrl string `json:"base_url"` - ApiKey string `json:"api_key"` - ClientType string `json:"client_type"` - Icon pgtype.Text `json:"icon"` - Enable bool `json:"enable"` - Metadata []byte `json:"metadata"` -} - -func (q *Queries) CreateLlmProvider(ctx context.Context, arg CreateLlmProviderParams) (LlmProvider, error) { - row := q.db.QueryRow(ctx, createLlmProvider, - arg.Name, - arg.BaseUrl, - arg.ApiKey, - arg.ClientType, - arg.Icon, - arg.Enable, - arg.Metadata, - ) - var i LlmProvider - err := row.Scan( - &i.ID, - &i.Name, - &i.BaseUrl, - &i.ApiKey, - &i.Icon, - &i.Enable, - &i.Metadata, - &i.CreatedAt, - &i.UpdatedAt, - &i.ClientType, - ) - return i, err +func (q *Queries) CountProviders(ctx context.Context) (int64, error) { + row := q.db.QueryRow(ctx, countProviders) + var count int64 + err := row.Scan(&count) + return count, err } const createModel = `-- name: CreateModel :one -INSERT INTO models (model_id, name, llm_provider_id, type, config) +INSERT INTO models (model_id, name, provider_id, type, config) VALUES ( $1, $2, @@ -103,22 +53,22 @@ VALUES ( $4, $5 ) -RETURNING id, model_id, name, llm_provider_id, type, config, created_at, updated_at +RETURNING id, model_id, name, provider_id, type, config, created_at, updated_at ` type CreateModelParams struct { - ModelID string `json:"model_id"` - Name pgtype.Text `json:"name"` - LlmProviderID pgtype.UUID `json:"llm_provider_id"` - Type string `json:"type"` - Config []byte `json:"config"` + ModelID string `json:"model_id"` + Name pgtype.Text `json:"name"` + ProviderID pgtype.UUID `json:"provider_id"` + Type string `json:"type"` + Config []byte `json:"config"` } func (q *Queries) CreateModel(ctx context.Context, arg CreateModelParams) (Model, error) { row := q.db.QueryRow(ctx, createModel, arg.ModelID, arg.Name, - arg.LlmProviderID, + arg.ProviderID, arg.Type, arg.Config, ) @@ -127,7 +77,7 @@ func (q *Queries) CreateModel(ctx context.Context, arg CreateModelParams) (Model &i.ID, &i.ModelID, &i.Name, - &i.LlmProviderID, + &i.ProviderID, &i.Type, &i.Config, &i.CreatedAt, @@ -174,13 +124,50 @@ func (q *Queries) CreateModelVariant(ctx context.Context, arg CreateModelVariant return i, err } -const deleteLlmProvider = `-- name: DeleteLlmProvider :exec -DELETE FROM llm_providers WHERE id = $1 +const createProvider = `-- name: CreateProvider :one +INSERT INTO providers (name, client_type, icon, enable, config, metadata) +VALUES ( + $1, + $2, + $3, + $4, + $5, + $6 +) +RETURNING id, name, client_type, icon, enable, config, metadata, created_at, updated_at ` -func (q *Queries) DeleteLlmProvider(ctx context.Context, id pgtype.UUID) error { - _, err := q.db.Exec(ctx, deleteLlmProvider, id) - return err +type CreateProviderParams struct { + Name string `json:"name"` + ClientType string `json:"client_type"` + Icon pgtype.Text `json:"icon"` + Enable bool `json:"enable"` + Config []byte `json:"config"` + Metadata []byte `json:"metadata"` +} + +func (q *Queries) CreateProvider(ctx context.Context, arg CreateProviderParams) (Provider, error) { + row := q.db.QueryRow(ctx, createProvider, + arg.Name, + arg.ClientType, + arg.Icon, + arg.Enable, + arg.Config, + arg.Metadata, + ) + var i Provider + err := row.Scan( + &i.ID, + &i.Name, + &i.ClientType, + &i.Icon, + &i.Enable, + &i.Config, + &i.Metadata, + &i.CreatedAt, + &i.UpdatedAt, + ) + return i, err } const deleteModel = `-- name: DeleteModel :exec @@ -201,52 +188,17 @@ func (q *Queries) DeleteModelByModelID(ctx context.Context, modelID string) erro return err } -const getLlmProviderByID = `-- name: GetLlmProviderByID :one -SELECT id, name, base_url, api_key, icon, enable, metadata, created_at, updated_at, client_type FROM llm_providers WHERE id = $1 +const deleteProvider = `-- name: DeleteProvider :exec +DELETE FROM providers WHERE id = $1 ` -func (q *Queries) GetLlmProviderByID(ctx context.Context, id pgtype.UUID) (LlmProvider, error) { - row := q.db.QueryRow(ctx, getLlmProviderByID, id) - var i LlmProvider - err := row.Scan( - &i.ID, - &i.Name, - &i.BaseUrl, - &i.ApiKey, - &i.Icon, - &i.Enable, - &i.Metadata, - &i.CreatedAt, - &i.UpdatedAt, - &i.ClientType, - ) - return i, err -} - -const getLlmProviderByName = `-- name: GetLlmProviderByName :one -SELECT id, name, base_url, api_key, icon, enable, metadata, created_at, updated_at, client_type FROM llm_providers WHERE name = $1 -` - -func (q *Queries) GetLlmProviderByName(ctx context.Context, name string) (LlmProvider, error) { - row := q.db.QueryRow(ctx, getLlmProviderByName, name) - var i LlmProvider - err := row.Scan( - &i.ID, - &i.Name, - &i.BaseUrl, - &i.ApiKey, - &i.Icon, - &i.Enable, - &i.Metadata, - &i.CreatedAt, - &i.UpdatedAt, - &i.ClientType, - ) - return i, err +func (q *Queries) DeleteProvider(ctx context.Context, id pgtype.UUID) error { + _, err := q.db.Exec(ctx, deleteProvider, id) + return err } const getModelByID = `-- name: GetModelByID :one -SELECT id, model_id, name, llm_provider_id, type, config, created_at, updated_at FROM models WHERE id = $1 +SELECT id, model_id, name, provider_id, type, config, created_at, updated_at FROM models WHERE id = $1 ` func (q *Queries) GetModelByID(ctx context.Context, id pgtype.UUID) (Model, error) { @@ -256,7 +208,7 @@ func (q *Queries) GetModelByID(ctx context.Context, id pgtype.UUID) (Model, erro &i.ID, &i.ModelID, &i.Name, - &i.LlmProviderID, + &i.ProviderID, &i.Type, &i.Config, &i.CreatedAt, @@ -266,7 +218,7 @@ func (q *Queries) GetModelByID(ctx context.Context, id pgtype.UUID) (Model, erro } const getModelByModelID = `-- name: GetModelByModelID :one -SELECT id, model_id, name, llm_provider_id, type, config, created_at, updated_at FROM models WHERE model_id = $1 +SELECT id, model_id, name, provider_id, type, config, created_at, updated_at FROM models WHERE model_id = $1 ` func (q *Queries) GetModelByModelID(ctx context.Context, modelID string) (Model, error) { @@ -276,7 +228,7 @@ func (q *Queries) GetModelByModelID(ctx context.Context, modelID string) (Model, &i.ID, &i.ModelID, &i.Name, - &i.LlmProviderID, + &i.ProviderID, &i.Type, &i.Config, &i.CreatedAt, @@ -285,10 +237,119 @@ func (q *Queries) GetModelByModelID(ctx context.Context, modelID string) (Model, return i, err } -const listEnabledModels = `-- name: ListEnabledModels :many -SELECT m.id, m.model_id, m.name, m.llm_provider_id, m.type, m.config, m.created_at, m.updated_at +const getModelByProviderAndModelID = `-- name: GetModelByProviderAndModelID :one +SELECT id, model_id, name, provider_id, type, config, created_at, updated_at FROM models +WHERE provider_id = $1 + AND model_id = $2 +LIMIT 1 +` + +type GetModelByProviderAndModelIDParams struct { + ProviderID pgtype.UUID `json:"provider_id"` + ModelID string `json:"model_id"` +} + +func (q *Queries) GetModelByProviderAndModelID(ctx context.Context, arg GetModelByProviderAndModelIDParams) (Model, error) { + row := q.db.QueryRow(ctx, getModelByProviderAndModelID, arg.ProviderID, arg.ModelID) + var i Model + err := row.Scan( + &i.ID, + &i.ModelID, + &i.Name, + &i.ProviderID, + &i.Type, + &i.Config, + &i.CreatedAt, + &i.UpdatedAt, + ) + return i, err +} + +const getProviderByID = `-- name: GetProviderByID :one +SELECT id, name, client_type, icon, enable, config, metadata, created_at, updated_at FROM providers WHERE id = $1 +` + +func (q *Queries) GetProviderByID(ctx context.Context, id pgtype.UUID) (Provider, error) { + row := q.db.QueryRow(ctx, getProviderByID, id) + var i Provider + err := row.Scan( + &i.ID, + &i.Name, + &i.ClientType, + &i.Icon, + &i.Enable, + &i.Config, + &i.Metadata, + &i.CreatedAt, + &i.UpdatedAt, + ) + return i, err +} + +const getProviderByName = `-- name: GetProviderByName :one +SELECT id, name, client_type, icon, enable, config, metadata, created_at, updated_at FROM providers WHERE name = $1 +` + +func (q *Queries) GetProviderByName(ctx context.Context, name string) (Provider, error) { + row := q.db.QueryRow(ctx, getProviderByName, name) + var i Provider + err := row.Scan( + &i.ID, + &i.Name, + &i.ClientType, + &i.Icon, + &i.Enable, + &i.Config, + &i.Metadata, + &i.CreatedAt, + &i.UpdatedAt, + ) + return i, err +} + +const getSpeechModelWithProvider = `-- name: GetSpeechModelWithProvider :one +SELECT + m.id, m.model_id, m.name, m.provider_id, m.type, m.config, m.created_at, m.updated_at, + p.client_type AS provider_type FROM models m -JOIN llm_providers p ON m.llm_provider_id = p.id +JOIN providers p ON p.id = m.provider_id +WHERE m.id = $1 + AND m.type = 'speech' +` + +type GetSpeechModelWithProviderRow struct { + ID pgtype.UUID `json:"id"` + ModelID string `json:"model_id"` + Name pgtype.Text `json:"name"` + ProviderID pgtype.UUID `json:"provider_id"` + Type string `json:"type"` + Config []byte `json:"config"` + CreatedAt pgtype.Timestamptz `json:"created_at"` + UpdatedAt pgtype.Timestamptz `json:"updated_at"` + ProviderType string `json:"provider_type"` +} + +func (q *Queries) GetSpeechModelWithProvider(ctx context.Context, id pgtype.UUID) (GetSpeechModelWithProviderRow, error) { + row := q.db.QueryRow(ctx, getSpeechModelWithProvider, id) + var i GetSpeechModelWithProviderRow + err := row.Scan( + &i.ID, + &i.ModelID, + &i.Name, + &i.ProviderID, + &i.Type, + &i.Config, + &i.CreatedAt, + &i.UpdatedAt, + &i.ProviderType, + ) + return i, err +} + +const listEnabledModels = `-- name: ListEnabledModels :many +SELECT m.id, m.model_id, m.name, m.provider_id, m.type, m.config, m.created_at, m.updated_at +FROM models m +JOIN providers p ON m.provider_id = p.id WHERE p.enable = true ORDER BY m.created_at DESC ` @@ -306,7 +367,7 @@ func (q *Queries) ListEnabledModels(ctx context.Context) ([]Model, error) { &i.ID, &i.ModelID, &i.Name, - &i.LlmProviderID, + &i.ProviderID, &i.Type, &i.Config, &i.CreatedAt, @@ -323,9 +384,9 @@ func (q *Queries) ListEnabledModels(ctx context.Context) ([]Model, error) { } const listEnabledModelsByProviderClientType = `-- name: ListEnabledModelsByProviderClientType :many -SELECT m.id, m.model_id, m.name, m.llm_provider_id, m.type, m.config, m.created_at, m.updated_at +SELECT m.id, m.model_id, m.name, m.provider_id, m.type, m.config, m.created_at, m.updated_at FROM models m -JOIN llm_providers p ON m.llm_provider_id = p.id +JOIN providers p ON m.provider_id = p.id WHERE p.enable = true AND p.client_type = $1 ORDER BY m.created_at DESC @@ -344,7 +405,7 @@ func (q *Queries) ListEnabledModelsByProviderClientType(ctx context.Context, cli &i.ID, &i.ModelID, &i.Name, - &i.LlmProviderID, + &i.ProviderID, &i.Type, &i.Config, &i.CreatedAt, @@ -361,9 +422,9 @@ func (q *Queries) ListEnabledModelsByProviderClientType(ctx context.Context, cli } const listEnabledModelsByType = `-- name: ListEnabledModelsByType :many -SELECT m.id, m.model_id, m.name, m.llm_provider_id, m.type, m.config, m.created_at, m.updated_at +SELECT m.id, m.model_id, m.name, m.provider_id, m.type, m.config, m.created_at, m.updated_at FROM models m -JOIN llm_providers p ON m.llm_provider_id = p.id +JOIN providers p ON m.provider_id = p.id WHERE p.enable = true AND m.type = $1 ORDER BY m.created_at DESC @@ -382,7 +443,7 @@ func (q *Queries) ListEnabledModelsByType(ctx context.Context, type_ string) ([] &i.ID, &i.ModelID, &i.Name, - &i.LlmProviderID, + &i.ProviderID, &i.Type, &i.Config, &i.CreatedAt, @@ -398,42 +459,6 @@ func (q *Queries) ListEnabledModelsByType(ctx context.Context, type_ string) ([] return items, nil } -const listLlmProviders = `-- name: ListLlmProviders :many -SELECT id, name, base_url, api_key, icon, enable, metadata, created_at, updated_at, client_type FROM llm_providers -ORDER BY created_at DESC -` - -func (q *Queries) ListLlmProviders(ctx context.Context) ([]LlmProvider, error) { - rows, err := q.db.Query(ctx, listLlmProviders) - if err != nil { - return nil, err - } - defer rows.Close() - var items []LlmProvider - for rows.Next() { - var i LlmProvider - if err := rows.Scan( - &i.ID, - &i.Name, - &i.BaseUrl, - &i.ApiKey, - &i.Icon, - &i.Enable, - &i.Metadata, - &i.CreatedAt, - &i.UpdatedAt, - &i.ClientType, - ); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil -} - const listModelVariantsByModelUUID = `-- name: ListModelVariantsByModelUUID :many SELECT id, model_uuid, variant_id, weight, metadata, created_at, updated_at FROM model_variants WHERE model_uuid = $1 @@ -469,7 +494,7 @@ func (q *Queries) ListModelVariantsByModelUUID(ctx context.Context, modelUuid pg } const listModels = `-- name: ListModels :many -SELECT id, model_id, name, llm_provider_id, type, config, created_at, updated_at FROM models +SELECT id, model_id, name, provider_id, type, config, created_at, updated_at FROM models ORDER BY created_at DESC ` @@ -486,7 +511,7 @@ func (q *Queries) ListModels(ctx context.Context) ([]Model, error) { &i.ID, &i.ModelID, &i.Name, - &i.LlmProviderID, + &i.ProviderID, &i.Type, &i.Config, &i.CreatedAt, @@ -503,7 +528,7 @@ func (q *Queries) ListModels(ctx context.Context) ([]Model, error) { } const listModelsByModelID = `-- name: ListModelsByModelID :many -SELECT id, model_id, name, llm_provider_id, type, config, created_at, updated_at FROM models +SELECT id, model_id, name, provider_id, type, config, created_at, updated_at FROM models WHERE model_id = $1 ORDER BY created_at DESC ` @@ -521,7 +546,7 @@ func (q *Queries) ListModelsByModelID(ctx context.Context, modelID string) ([]Mo &i.ID, &i.ModelID, &i.Name, - &i.LlmProviderID, + &i.ProviderID, &i.Type, &i.Config, &i.CreatedAt, @@ -538,9 +563,9 @@ func (q *Queries) ListModelsByModelID(ctx context.Context, modelID string) ([]Mo } const listModelsByProviderClientType = `-- name: ListModelsByProviderClientType :many -SELECT m.id, m.model_id, m.name, m.llm_provider_id, m.type, m.config, m.created_at, m.updated_at +SELECT m.id, m.model_id, m.name, m.provider_id, m.type, m.config, m.created_at, m.updated_at FROM models m -JOIN llm_providers p ON m.llm_provider_id = p.id +JOIN providers p ON m.provider_id = p.id WHERE p.client_type = $1 ORDER BY m.created_at DESC ` @@ -558,7 +583,7 @@ func (q *Queries) ListModelsByProviderClientType(ctx context.Context, clientType &i.ID, &i.ModelID, &i.Name, - &i.LlmProviderID, + &i.ProviderID, &i.Type, &i.Config, &i.CreatedAt, @@ -575,13 +600,13 @@ func (q *Queries) ListModelsByProviderClientType(ctx context.Context, clientType } const listModelsByProviderID = `-- name: ListModelsByProviderID :many -SELECT id, model_id, name, llm_provider_id, type, config, created_at, updated_at FROM models -WHERE llm_provider_id = $1 +SELECT id, model_id, name, provider_id, type, config, created_at, updated_at FROM models +WHERE provider_id = $1 ORDER BY created_at DESC ` -func (q *Queries) ListModelsByProviderID(ctx context.Context, llmProviderID pgtype.UUID) ([]Model, error) { - rows, err := q.db.Query(ctx, listModelsByProviderID, llmProviderID) +func (q *Queries) ListModelsByProviderID(ctx context.Context, providerID pgtype.UUID) ([]Model, error) { + rows, err := q.db.Query(ctx, listModelsByProviderID, providerID) if err != nil { return nil, err } @@ -593,7 +618,7 @@ func (q *Queries) ListModelsByProviderID(ctx context.Context, llmProviderID pgty &i.ID, &i.ModelID, &i.Name, - &i.LlmProviderID, + &i.ProviderID, &i.Type, &i.Config, &i.CreatedAt, @@ -610,19 +635,19 @@ func (q *Queries) ListModelsByProviderID(ctx context.Context, llmProviderID pgty } const listModelsByProviderIDAndType = `-- name: ListModelsByProviderIDAndType :many -SELECT id, model_id, name, llm_provider_id, type, config, created_at, updated_at FROM models -WHERE llm_provider_id = $1 +SELECT id, model_id, name, provider_id, type, config, created_at, updated_at FROM models +WHERE provider_id = $1 AND type = $2 ORDER BY created_at DESC ` type ListModelsByProviderIDAndTypeParams struct { - LlmProviderID pgtype.UUID `json:"llm_provider_id"` - Type string `json:"type"` + ProviderID pgtype.UUID `json:"provider_id"` + Type string `json:"type"` } func (q *Queries) ListModelsByProviderIDAndType(ctx context.Context, arg ListModelsByProviderIDAndTypeParams) ([]Model, error) { - rows, err := q.db.Query(ctx, listModelsByProviderIDAndType, arg.LlmProviderID, arg.Type) + rows, err := q.db.Query(ctx, listModelsByProviderIDAndType, arg.ProviderID, arg.Type) if err != nil { return nil, err } @@ -634,7 +659,7 @@ func (q *Queries) ListModelsByProviderIDAndType(ctx context.Context, arg ListMod &i.ID, &i.ModelID, &i.Name, - &i.LlmProviderID, + &i.ProviderID, &i.Type, &i.Config, &i.CreatedAt, @@ -651,7 +676,7 @@ func (q *Queries) ListModelsByProviderIDAndType(ctx context.Context, arg ListMod } const listModelsByType = `-- name: ListModelsByType :many -SELECT id, model_id, name, llm_provider_id, type, config, created_at, updated_at FROM models +SELECT id, model_id, name, provider_id, type, config, created_at, updated_at FROM models WHERE type = $1 ORDER BY created_at DESC ` @@ -669,7 +694,7 @@ func (q *Queries) ListModelsByType(ctx context.Context, type_ string) ([]Model, &i.ID, &i.ModelID, &i.Name, - &i.LlmProviderID, + &i.ProviderID, &i.Type, &i.Config, &i.CreatedAt, @@ -685,57 +710,163 @@ func (q *Queries) ListModelsByType(ctx context.Context, type_ string) ([]Model, return items, nil } -const updateLlmProvider = `-- name: UpdateLlmProvider :one -UPDATE llm_providers -SET - name = $1, - base_url = $2, - api_key = $3, - client_type = $4, - icon = $5, - enable = $6, - metadata = $7, - updated_at = now() -WHERE id = $8 -RETURNING id, name, base_url, api_key, icon, enable, metadata, created_at, updated_at, client_type +const listProviders = `-- name: ListProviders :many +SELECT id, name, client_type, icon, enable, config, metadata, created_at, updated_at FROM providers +WHERE client_type NOT IN ('edge-speech') +ORDER BY created_at DESC ` -type UpdateLlmProviderParams struct { - Name string `json:"name"` - BaseUrl string `json:"base_url"` - ApiKey string `json:"api_key"` - ClientType string `json:"client_type"` - Icon pgtype.Text `json:"icon"` - Enable bool `json:"enable"` - Metadata []byte `json:"metadata"` - ID pgtype.UUID `json:"id"` +func (q *Queries) ListProviders(ctx context.Context) ([]Provider, error) { + rows, err := q.db.Query(ctx, listProviders) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Provider + for rows.Next() { + var i Provider + if err := rows.Scan( + &i.ID, + &i.Name, + &i.ClientType, + &i.Icon, + &i.Enable, + &i.Config, + &i.Metadata, + &i.CreatedAt, + &i.UpdatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil } -func (q *Queries) UpdateLlmProvider(ctx context.Context, arg UpdateLlmProviderParams) (LlmProvider, error) { - row := q.db.QueryRow(ctx, updateLlmProvider, - arg.Name, - arg.BaseUrl, - arg.ApiKey, - arg.ClientType, - arg.Icon, - arg.Enable, - arg.Metadata, - arg.ID, - ) - var i LlmProvider - err := row.Scan( - &i.ID, - &i.Name, - &i.BaseUrl, - &i.ApiKey, - &i.Icon, - &i.Enable, - &i.Metadata, - &i.CreatedAt, - &i.UpdatedAt, - &i.ClientType, - ) - return i, err +const listSpeechModels = `-- name: ListSpeechModels :many +SELECT m.id, m.model_id, m.name, m.provider_id, m.type, m.config, m.created_at, m.updated_at, + p.client_type AS provider_type +FROM models m +JOIN providers p ON p.id = m.provider_id +WHERE m.type = 'speech' +ORDER BY m.created_at DESC +` + +type ListSpeechModelsRow struct { + ID pgtype.UUID `json:"id"` + ModelID string `json:"model_id"` + Name pgtype.Text `json:"name"` + ProviderID pgtype.UUID `json:"provider_id"` + Type string `json:"type"` + Config []byte `json:"config"` + CreatedAt pgtype.Timestamptz `json:"created_at"` + UpdatedAt pgtype.Timestamptz `json:"updated_at"` + ProviderType string `json:"provider_type"` +} + +func (q *Queries) ListSpeechModels(ctx context.Context) ([]ListSpeechModelsRow, error) { + rows, err := q.db.Query(ctx, listSpeechModels) + if err != nil { + return nil, err + } + defer rows.Close() + var items []ListSpeechModelsRow + for rows.Next() { + var i ListSpeechModelsRow + if err := rows.Scan( + &i.ID, + &i.ModelID, + &i.Name, + &i.ProviderID, + &i.Type, + &i.Config, + &i.CreatedAt, + &i.UpdatedAt, + &i.ProviderType, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listSpeechModelsByProviderID = `-- name: ListSpeechModelsByProviderID :many +SELECT id, model_id, name, provider_id, type, config, created_at, updated_at FROM models +WHERE provider_id = $1 + AND type = 'speech' +ORDER BY created_at DESC +` + +func (q *Queries) ListSpeechModelsByProviderID(ctx context.Context, providerID pgtype.UUID) ([]Model, error) { + rows, err := q.db.Query(ctx, listSpeechModelsByProviderID, providerID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Model + for rows.Next() { + var i Model + if err := rows.Scan( + &i.ID, + &i.ModelID, + &i.Name, + &i.ProviderID, + &i.Type, + &i.Config, + &i.CreatedAt, + &i.UpdatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listSpeechProviders = `-- name: ListSpeechProviders :many +SELECT id, name, client_type, icon, enable, config, metadata, created_at, updated_at FROM providers +WHERE client_type IN ('edge-speech') +ORDER BY created_at DESC +` + +func (q *Queries) ListSpeechProviders(ctx context.Context) ([]Provider, error) { + rows, err := q.db.Query(ctx, listSpeechProviders) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Provider + for rows.Next() { + var i Provider + if err := rows.Scan( + &i.ID, + &i.Name, + &i.ClientType, + &i.Icon, + &i.Enable, + &i.Config, + &i.Metadata, + &i.CreatedAt, + &i.UpdatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil } const updateModel = `-- name: UpdateModel :one @@ -743,28 +874,28 @@ UPDATE models SET model_id = $1, name = $2, - llm_provider_id = $3, + provider_id = $3, type = $4, config = $5, updated_at = now() WHERE id = $6 -RETURNING id, model_id, name, llm_provider_id, type, config, created_at, updated_at +RETURNING id, model_id, name, provider_id, type, config, created_at, updated_at ` type UpdateModelParams struct { - ModelID string `json:"model_id"` - Name pgtype.Text `json:"name"` - LlmProviderID pgtype.UUID `json:"llm_provider_id"` - Type string `json:"type"` - Config []byte `json:"config"` - ID pgtype.UUID `json:"id"` + ModelID string `json:"model_id"` + Name pgtype.Text `json:"name"` + ProviderID pgtype.UUID `json:"provider_id"` + Type string `json:"type"` + Config []byte `json:"config"` + ID pgtype.UUID `json:"id"` } func (q *Queries) UpdateModel(ctx context.Context, arg UpdateModelParams) (Model, error) { row := q.db.QueryRow(ctx, updateModel, arg.ModelID, arg.Name, - arg.LlmProviderID, + arg.ProviderID, arg.Type, arg.Config, arg.ID, @@ -774,7 +905,7 @@ func (q *Queries) UpdateModel(ctx context.Context, arg UpdateModelParams) (Model &i.ID, &i.ModelID, &i.Name, - &i.LlmProviderID, + &i.ProviderID, &i.Type, &i.Config, &i.CreatedAt, @@ -783,30 +914,79 @@ func (q *Queries) UpdateModel(ctx context.Context, arg UpdateModelParams) (Model return i, err } +const updateProvider = `-- name: UpdateProvider :one +UPDATE providers +SET + name = $1, + client_type = $2, + icon = $3, + enable = $4, + config = $5, + metadata = $6, + updated_at = now() +WHERE id = $7 +RETURNING id, name, client_type, icon, enable, config, metadata, created_at, updated_at +` + +type UpdateProviderParams struct { + Name string `json:"name"` + ClientType string `json:"client_type"` + Icon pgtype.Text `json:"icon"` + Enable bool `json:"enable"` + Config []byte `json:"config"` + Metadata []byte `json:"metadata"` + ID pgtype.UUID `json:"id"` +} + +func (q *Queries) UpdateProvider(ctx context.Context, arg UpdateProviderParams) (Provider, error) { + row := q.db.QueryRow(ctx, updateProvider, + arg.Name, + arg.ClientType, + arg.Icon, + arg.Enable, + arg.Config, + arg.Metadata, + arg.ID, + ) + var i Provider + err := row.Scan( + &i.ID, + &i.Name, + &i.ClientType, + &i.Icon, + &i.Enable, + &i.Config, + &i.Metadata, + &i.CreatedAt, + &i.UpdatedAt, + ) + return i, err +} + const upsertRegistryModel = `-- name: UpsertRegistryModel :one -INSERT INTO models (model_id, name, llm_provider_id, type, config) +INSERT INTO models (model_id, name, provider_id, type, config) VALUES ($1, $2, $3, $4, $5) -ON CONFLICT (llm_provider_id, model_id) DO UPDATE SET +ON CONFLICT (provider_id, model_id) DO UPDATE SET name = EXCLUDED.name, type = EXCLUDED.type, config = EXCLUDED.config, updated_at = now() -RETURNING id, model_id, name, llm_provider_id, type, config, created_at, updated_at +RETURNING id, model_id, name, provider_id, type, config, created_at, updated_at ` type UpsertRegistryModelParams struct { - ModelID string `json:"model_id"` - Name pgtype.Text `json:"name"` - LlmProviderID pgtype.UUID `json:"llm_provider_id"` - Type string `json:"type"` - Config []byte `json:"config"` + ModelID string `json:"model_id"` + Name pgtype.Text `json:"name"` + ProviderID pgtype.UUID `json:"provider_id"` + Type string `json:"type"` + Config []byte `json:"config"` } func (q *Queries) UpsertRegistryModel(ctx context.Context, arg UpsertRegistryModelParams) (Model, error) { row := q.db.QueryRow(ctx, upsertRegistryModel, arg.ModelID, arg.Name, - arg.LlmProviderID, + arg.ProviderID, arg.Type, arg.Config, ) @@ -815,7 +995,7 @@ func (q *Queries) UpsertRegistryModel(ctx context.Context, arg UpsertRegistryMod &i.ID, &i.ModelID, &i.Name, - &i.LlmProviderID, + &i.ProviderID, &i.Type, &i.Config, &i.CreatedAt, @@ -825,41 +1005,45 @@ func (q *Queries) UpsertRegistryModel(ctx context.Context, arg UpsertRegistryMod } const upsertRegistryProvider = `-- name: UpsertRegistryProvider :one -INSERT INTO llm_providers (name, base_url, api_key, client_type, icon, enable, metadata) -VALUES ($1, $2, '', $3, $4, false, '{}') +INSERT INTO providers (name, client_type, icon, enable, config, metadata) +VALUES ($1, $2, $3, false, $4, '{}') ON CONFLICT (name) DO UPDATE SET icon = EXCLUDED.icon, client_type = EXCLUDED.client_type, + config = CASE + WHEN providers.config->>'api_key' IS NOT NULL AND providers.config->>'api_key' != '' + THEN jsonb_set(EXCLUDED.config, '{api_key}', providers.config->'api_key') + ELSE EXCLUDED.config + END, updated_at = now() -RETURNING id, name, base_url, api_key, icon, enable, metadata, created_at, updated_at, client_type +RETURNING id, name, client_type, icon, enable, config, metadata, created_at, updated_at ` type UpsertRegistryProviderParams struct { Name string `json:"name"` - BaseUrl string `json:"base_url"` ClientType string `json:"client_type"` Icon pgtype.Text `json:"icon"` + Config []byte `json:"config"` } -func (q *Queries) UpsertRegistryProvider(ctx context.Context, arg UpsertRegistryProviderParams) (LlmProvider, error) { +func (q *Queries) UpsertRegistryProvider(ctx context.Context, arg UpsertRegistryProviderParams) (Provider, error) { row := q.db.QueryRow(ctx, upsertRegistryProvider, arg.Name, - arg.BaseUrl, arg.ClientType, arg.Icon, + arg.Config, ) - var i LlmProvider + var i Provider err := row.Scan( &i.ID, &i.Name, - &i.BaseUrl, - &i.ApiKey, + &i.ClientType, &i.Icon, &i.Enable, + &i.Config, &i.Metadata, &i.CreatedAt, &i.UpdatedAt, - &i.ClientType, ) return i, err } diff --git a/internal/db/sqlc/provider_oauth.sql.go b/internal/db/sqlc/provider_oauth.sql.go new file mode 100644 index 00000000..49414e7c --- /dev/null +++ b/internal/db/sqlc/provider_oauth.sql.go @@ -0,0 +1,163 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.30.0 +// source: provider_oauth.sql + +package sqlc + +import ( + "context" + + "github.com/jackc/pgx/v5/pgtype" +) + +const deleteProviderOAuthToken = `-- name: DeleteProviderOAuthToken :exec +DELETE FROM provider_oauth_tokens WHERE provider_id = $1 +` + +func (q *Queries) DeleteProviderOAuthToken(ctx context.Context, providerID pgtype.UUID) error { + _, err := q.db.Exec(ctx, deleteProviderOAuthToken, providerID) + return err +} + +const getProviderOAuthTokenByProvider = `-- name: GetProviderOAuthTokenByProvider :one +SELECT id, provider_id, access_token, refresh_token, expires_at, scope, token_type, state, pkce_code_verifier, created_at, updated_at FROM provider_oauth_tokens WHERE provider_id = $1 +` + +func (q *Queries) GetProviderOAuthTokenByProvider(ctx context.Context, providerID pgtype.UUID) (ProviderOauthToken, error) { + row := q.db.QueryRow(ctx, getProviderOAuthTokenByProvider, providerID) + var i ProviderOauthToken + err := row.Scan( + &i.ID, + &i.ProviderID, + &i.AccessToken, + &i.RefreshToken, + &i.ExpiresAt, + &i.Scope, + &i.TokenType, + &i.State, + &i.PkceCodeVerifier, + &i.CreatedAt, + &i.UpdatedAt, + ) + return i, err +} + +const getProviderOAuthTokenByState = `-- name: GetProviderOAuthTokenByState :one +SELECT id, provider_id, access_token, refresh_token, expires_at, scope, token_type, state, pkce_code_verifier, created_at, updated_at FROM provider_oauth_tokens WHERE state = $1 AND state != '' +` + +func (q *Queries) GetProviderOAuthTokenByState(ctx context.Context, state string) (ProviderOauthToken, error) { + row := q.db.QueryRow(ctx, getProviderOAuthTokenByState, state) + var i ProviderOauthToken + err := row.Scan( + &i.ID, + &i.ProviderID, + &i.AccessToken, + &i.RefreshToken, + &i.ExpiresAt, + &i.Scope, + &i.TokenType, + &i.State, + &i.PkceCodeVerifier, + &i.CreatedAt, + &i.UpdatedAt, + ) + return i, err +} + +const updateProviderOAuthState = `-- name: UpdateProviderOAuthState :exec +INSERT INTO provider_oauth_tokens (provider_id, state, pkce_code_verifier) +VALUES ( + $1, + $2, + $3 +) +ON CONFLICT (provider_id) DO UPDATE SET + state = EXCLUDED.state, + pkce_code_verifier = EXCLUDED.pkce_code_verifier, + updated_at = now() +` + +type UpdateProviderOAuthStateParams struct { + ProviderID pgtype.UUID `json:"provider_id"` + State string `json:"state"` + PkceCodeVerifier string `json:"pkce_code_verifier"` +} + +func (q *Queries) UpdateProviderOAuthState(ctx context.Context, arg UpdateProviderOAuthStateParams) error { + _, err := q.db.Exec(ctx, updateProviderOAuthState, arg.ProviderID, arg.State, arg.PkceCodeVerifier) + return err +} + +const upsertProviderOAuthToken = `-- name: UpsertProviderOAuthToken :one +INSERT INTO provider_oauth_tokens ( + provider_id, + access_token, + refresh_token, + expires_at, + scope, + token_type, + state, + pkce_code_verifier +) +VALUES ( + $1, + $2, + $3, + $4, + $5, + $6, + $7, + $8 +) +ON CONFLICT (provider_id) DO UPDATE SET + access_token = EXCLUDED.access_token, + refresh_token = EXCLUDED.refresh_token, + expires_at = EXCLUDED.expires_at, + scope = EXCLUDED.scope, + token_type = EXCLUDED.token_type, + state = EXCLUDED.state, + pkce_code_verifier = EXCLUDED.pkce_code_verifier, + updated_at = now() +RETURNING id, provider_id, access_token, refresh_token, expires_at, scope, token_type, state, pkce_code_verifier, created_at, updated_at +` + +type UpsertProviderOAuthTokenParams struct { + ProviderID pgtype.UUID `json:"provider_id"` + AccessToken string `json:"access_token"` + RefreshToken string `json:"refresh_token"` + ExpiresAt pgtype.Timestamptz `json:"expires_at"` + Scope string `json:"scope"` + TokenType string `json:"token_type"` + State string `json:"state"` + PkceCodeVerifier string `json:"pkce_code_verifier"` +} + +func (q *Queries) UpsertProviderOAuthToken(ctx context.Context, arg UpsertProviderOAuthTokenParams) (ProviderOauthToken, error) { + row := q.db.QueryRow(ctx, upsertProviderOAuthToken, + arg.ProviderID, + arg.AccessToken, + arg.RefreshToken, + arg.ExpiresAt, + arg.Scope, + arg.TokenType, + arg.State, + arg.PkceCodeVerifier, + ) + var i ProviderOauthToken + err := row.Scan( + &i.ID, + &i.ProviderID, + &i.AccessToken, + &i.RefreshToken, + &i.ExpiresAt, + &i.Scope, + &i.TokenType, + &i.State, + &i.PkceCodeVerifier, + &i.CreatedAt, + &i.UpdatedAt, + ) + return i, err +} diff --git a/internal/db/sqlc/settings.sql.go b/internal/db/sqlc/settings.sql.go index 28873e3a..305b8ae3 100644 --- a/internal/db/sqlc/settings.sql.go +++ b/internal/db/sqlc/settings.sql.go @@ -69,7 +69,7 @@ LEFT JOIN models AS title_models ON title_models.id = bots.title_model_id LEFT JOIN models AS image_models ON image_models.id = bots.image_model_id LEFT JOIN search_providers ON search_providers.id = bots.search_provider_id LEFT JOIN memory_providers ON memory_providers.id = bots.memory_provider_id -LEFT JOIN tts_models ON tts_models.id = bots.tts_model_id +LEFT JOIN models AS tts_models ON tts_models.id = bots.tts_model_id LEFT JOIN browser_contexts ON browser_contexts.id = bots.browser_context_id WHERE bots.id = $1 ` @@ -176,7 +176,7 @@ LEFT JOIN models AS title_models ON title_models.id = updated.title_model_id LEFT JOIN models AS image_models ON image_models.id = updated.image_model_id LEFT JOIN search_providers ON search_providers.id = updated.search_provider_id LEFT JOIN memory_providers ON memory_providers.id = updated.memory_provider_id -LEFT JOIN tts_models ON tts_models.id = updated.tts_model_id +LEFT JOIN models AS tts_models ON tts_models.id = updated.tts_model_id LEFT JOIN browser_contexts ON browser_contexts.id = updated.browser_context_id ` diff --git a/internal/db/sqlc/token_usage.sql.go b/internal/db/sqlc/token_usage.sql.go index b84eb1a2..7a1a7381 100644 --- a/internal/db/sqlc/token_usage.sql.go +++ b/internal/db/sqlc/token_usage.sql.go @@ -95,7 +95,7 @@ SELECT COALESCE(SUM((m.usage->>'outputTokens')::bigint), 0)::bigint AS output_tokens FROM bot_history_messages m LEFT JOIN models mo ON mo.id = m.model_id -LEFT JOIN llm_providers lp ON lp.id = mo.llm_provider_id +LEFT JOIN providers lp ON lp.id = mo.provider_id WHERE m.bot_id = $1 AND m.usage IS NOT NULL AND m.created_at >= $2 diff --git a/internal/db/sqlc/tts_models.sql.go b/internal/db/sqlc/tts_models.sql.go deleted file mode 100644 index e29821e9..00000000 --- a/internal/db/sqlc/tts_models.sql.go +++ /dev/null @@ -1,248 +0,0 @@ -// Code generated by sqlc. DO NOT EDIT. -// versions: -// sqlc v1.30.0 -// source: tts_models.sql - -package sqlc - -import ( - "context" - - "github.com/jackc/pgx/v5/pgtype" -) - -const createTtsModel = `-- name: CreateTtsModel :one -INSERT INTO tts_models (model_id, name, tts_provider_id, config) -VALUES ( - $1, - $2, - $3, - $4 -) -RETURNING id, model_id, name, tts_provider_id, config, created_at, updated_at -` - -type CreateTtsModelParams struct { - ModelID string `json:"model_id"` - Name pgtype.Text `json:"name"` - TtsProviderID pgtype.UUID `json:"tts_provider_id"` - Config []byte `json:"config"` -} - -func (q *Queries) CreateTtsModel(ctx context.Context, arg CreateTtsModelParams) (TtsModel, error) { - row := q.db.QueryRow(ctx, createTtsModel, - arg.ModelID, - arg.Name, - arg.TtsProviderID, - arg.Config, - ) - var i TtsModel - err := row.Scan( - &i.ID, - &i.ModelID, - &i.Name, - &i.TtsProviderID, - &i.Config, - &i.CreatedAt, - &i.UpdatedAt, - ) - return i, err -} - -const deleteTtsModel = `-- name: DeleteTtsModel :exec -DELETE FROM tts_models WHERE id = $1 -` - -func (q *Queries) DeleteTtsModel(ctx context.Context, id pgtype.UUID) error { - _, err := q.db.Exec(ctx, deleteTtsModel, id) - return err -} - -const deleteTtsModelsByProviderID = `-- name: DeleteTtsModelsByProviderID :exec -DELETE FROM tts_models WHERE tts_provider_id = $1 -` - -func (q *Queries) DeleteTtsModelsByProviderID(ctx context.Context, ttsProviderID pgtype.UUID) error { - _, err := q.db.Exec(ctx, deleteTtsModelsByProviderID, ttsProviderID) - return err -} - -const getTtsModelByID = `-- name: GetTtsModelByID :one -SELECT id, model_id, name, tts_provider_id, config, created_at, updated_at FROM tts_models WHERE id = $1 -` - -func (q *Queries) GetTtsModelByID(ctx context.Context, id pgtype.UUID) (TtsModel, error) { - row := q.db.QueryRow(ctx, getTtsModelByID, id) - var i TtsModel - err := row.Scan( - &i.ID, - &i.ModelID, - &i.Name, - &i.TtsProviderID, - &i.Config, - &i.CreatedAt, - &i.UpdatedAt, - ) - return i, err -} - -const getTtsModelByProviderAndModelID = `-- name: GetTtsModelByProviderAndModelID :one -SELECT id, model_id, name, tts_provider_id, config, created_at, updated_at FROM tts_models -WHERE tts_provider_id = $1 - AND model_id = $2 -LIMIT 1 -` - -type GetTtsModelByProviderAndModelIDParams struct { - TtsProviderID pgtype.UUID `json:"tts_provider_id"` - ModelID string `json:"model_id"` -} - -func (q *Queries) GetTtsModelByProviderAndModelID(ctx context.Context, arg GetTtsModelByProviderAndModelIDParams) (TtsModel, error) { - row := q.db.QueryRow(ctx, getTtsModelByProviderAndModelID, arg.TtsProviderID, arg.ModelID) - var i TtsModel - err := row.Scan( - &i.ID, - &i.ModelID, - &i.Name, - &i.TtsProviderID, - &i.Config, - &i.CreatedAt, - &i.UpdatedAt, - ) - return i, err -} - -const getTtsModelWithProvider = `-- name: GetTtsModelWithProvider :one -SELECT - tm.id, tm.model_id, tm.name, tm.tts_provider_id, tm.config, tm.created_at, tm.updated_at, - tp.provider AS provider_type -FROM tts_models tm -JOIN tts_providers tp ON tp.id = tm.tts_provider_id -WHERE tm.id = $1 -` - -type GetTtsModelWithProviderRow struct { - ID pgtype.UUID `json:"id"` - ModelID string `json:"model_id"` - Name pgtype.Text `json:"name"` - TtsProviderID pgtype.UUID `json:"tts_provider_id"` - Config []byte `json:"config"` - CreatedAt pgtype.Timestamptz `json:"created_at"` - UpdatedAt pgtype.Timestamptz `json:"updated_at"` - ProviderType string `json:"provider_type"` -} - -func (q *Queries) GetTtsModelWithProvider(ctx context.Context, id pgtype.UUID) (GetTtsModelWithProviderRow, error) { - row := q.db.QueryRow(ctx, getTtsModelWithProvider, id) - var i GetTtsModelWithProviderRow - err := row.Scan( - &i.ID, - &i.ModelID, - &i.Name, - &i.TtsProviderID, - &i.Config, - &i.CreatedAt, - &i.UpdatedAt, - &i.ProviderType, - ) - return i, err -} - -const listTtsModels = `-- name: ListTtsModels :many -SELECT id, model_id, name, tts_provider_id, config, created_at, updated_at FROM tts_models -ORDER BY created_at DESC -` - -func (q *Queries) ListTtsModels(ctx context.Context) ([]TtsModel, error) { - rows, err := q.db.Query(ctx, listTtsModels) - if err != nil { - return nil, err - } - defer rows.Close() - var items []TtsModel - for rows.Next() { - var i TtsModel - if err := rows.Scan( - &i.ID, - &i.ModelID, - &i.Name, - &i.TtsProviderID, - &i.Config, - &i.CreatedAt, - &i.UpdatedAt, - ); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil -} - -const listTtsModelsByProviderID = `-- name: ListTtsModelsByProviderID :many -SELECT id, model_id, name, tts_provider_id, config, created_at, updated_at FROM tts_models -WHERE tts_provider_id = $1 -ORDER BY created_at DESC -` - -func (q *Queries) ListTtsModelsByProviderID(ctx context.Context, ttsProviderID pgtype.UUID) ([]TtsModel, error) { - rows, err := q.db.Query(ctx, listTtsModelsByProviderID, ttsProviderID) - if err != nil { - return nil, err - } - defer rows.Close() - var items []TtsModel - for rows.Next() { - var i TtsModel - if err := rows.Scan( - &i.ID, - &i.ModelID, - &i.Name, - &i.TtsProviderID, - &i.Config, - &i.CreatedAt, - &i.UpdatedAt, - ); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil -} - -const updateTtsModel = `-- name: UpdateTtsModel :one -UPDATE tts_models -SET - name = $1, - config = $2, - updated_at = now() -WHERE id = $3 -RETURNING id, model_id, name, tts_provider_id, config, created_at, updated_at -` - -type UpdateTtsModelParams struct { - Name pgtype.Text `json:"name"` - Config []byte `json:"config"` - ID pgtype.UUID `json:"id"` -} - -func (q *Queries) UpdateTtsModel(ctx context.Context, arg UpdateTtsModelParams) (TtsModel, error) { - row := q.db.QueryRow(ctx, updateTtsModel, arg.Name, arg.Config, arg.ID) - var i TtsModel - err := row.Scan( - &i.ID, - &i.ModelID, - &i.Name, - &i.TtsProviderID, - &i.Config, - &i.CreatedAt, - &i.UpdatedAt, - ) - return i, err -} diff --git a/internal/db/sqlc/tts_providers.sql.go b/internal/db/sqlc/tts_providers.sql.go deleted file mode 100644 index 5ffcb43c..00000000 --- a/internal/db/sqlc/tts_providers.sql.go +++ /dev/null @@ -1,205 +0,0 @@ -// Code generated by sqlc. DO NOT EDIT. -// versions: -// sqlc v1.30.0 -// source: tts_providers.sql - -package sqlc - -import ( - "context" - - "github.com/jackc/pgx/v5/pgtype" -) - -const createTtsProvider = `-- name: CreateTtsProvider :one -INSERT INTO tts_providers (name, provider, config, enable) -VALUES ( - $1, - $2, - $3, - $4 -) -RETURNING id, name, provider, config, enable, created_at, updated_at -` - -type CreateTtsProviderParams struct { - Name string `json:"name"` - Provider string `json:"provider"` - Config []byte `json:"config"` - Enable bool `json:"enable"` -} - -func (q *Queries) CreateTtsProvider(ctx context.Context, arg CreateTtsProviderParams) (TtsProvider, error) { - row := q.db.QueryRow(ctx, createTtsProvider, - arg.Name, - arg.Provider, - arg.Config, - arg.Enable, - ) - var i TtsProvider - err := row.Scan( - &i.ID, - &i.Name, - &i.Provider, - &i.Config, - &i.Enable, - &i.CreatedAt, - &i.UpdatedAt, - ) - return i, err -} - -const deleteTtsProvider = `-- name: DeleteTtsProvider :exec -DELETE FROM tts_providers WHERE id = $1 -` - -func (q *Queries) DeleteTtsProvider(ctx context.Context, id pgtype.UUID) error { - _, err := q.db.Exec(ctx, deleteTtsProvider, id) - return err -} - -const getTtsProviderByID = `-- name: GetTtsProviderByID :one -SELECT id, name, provider, config, enable, created_at, updated_at FROM tts_providers WHERE id = $1 -` - -func (q *Queries) GetTtsProviderByID(ctx context.Context, id pgtype.UUID) (TtsProvider, error) { - row := q.db.QueryRow(ctx, getTtsProviderByID, id) - var i TtsProvider - err := row.Scan( - &i.ID, - &i.Name, - &i.Provider, - &i.Config, - &i.Enable, - &i.CreatedAt, - &i.UpdatedAt, - ) - return i, err -} - -const getTtsProviderByName = `-- name: GetTtsProviderByName :one -SELECT id, name, provider, config, enable, created_at, updated_at FROM tts_providers WHERE name = $1 -` - -func (q *Queries) GetTtsProviderByName(ctx context.Context, name string) (TtsProvider, error) { - row := q.db.QueryRow(ctx, getTtsProviderByName, name) - var i TtsProvider - err := row.Scan( - &i.ID, - &i.Name, - &i.Provider, - &i.Config, - &i.Enable, - &i.CreatedAt, - &i.UpdatedAt, - ) - return i, err -} - -const listTtsProviders = `-- name: ListTtsProviders :many -SELECT id, name, provider, config, enable, created_at, updated_at FROM tts_providers -ORDER BY created_at DESC -` - -func (q *Queries) ListTtsProviders(ctx context.Context) ([]TtsProvider, error) { - rows, err := q.db.Query(ctx, listTtsProviders) - if err != nil { - return nil, err - } - defer rows.Close() - var items []TtsProvider - for rows.Next() { - var i TtsProvider - if err := rows.Scan( - &i.ID, - &i.Name, - &i.Provider, - &i.Config, - &i.Enable, - &i.CreatedAt, - &i.UpdatedAt, - ); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil -} - -const listTtsProvidersByProvider = `-- name: ListTtsProvidersByProvider :many -SELECT id, name, provider, config, enable, created_at, updated_at FROM tts_providers -WHERE provider = $1 -ORDER BY created_at DESC -` - -func (q *Queries) ListTtsProvidersByProvider(ctx context.Context, provider string) ([]TtsProvider, error) { - rows, err := q.db.Query(ctx, listTtsProvidersByProvider, provider) - if err != nil { - return nil, err - } - defer rows.Close() - var items []TtsProvider - for rows.Next() { - var i TtsProvider - if err := rows.Scan( - &i.ID, - &i.Name, - &i.Provider, - &i.Config, - &i.Enable, - &i.CreatedAt, - &i.UpdatedAt, - ); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil -} - -const updateTtsProvider = `-- name: UpdateTtsProvider :one -UPDATE tts_providers -SET - name = $1, - provider = $2, - config = $3, - enable = $4, - updated_at = now() -WHERE id = $5 -RETURNING id, name, provider, config, enable, created_at, updated_at -` - -type UpdateTtsProviderParams struct { - Name string `json:"name"` - Provider string `json:"provider"` - Config []byte `json:"config"` - Enable bool `json:"enable"` - ID pgtype.UUID `json:"id"` -} - -func (q *Queries) UpdateTtsProvider(ctx context.Context, arg UpdateTtsProviderParams) (TtsProvider, error) { - row := q.db.QueryRow(ctx, updateTtsProvider, - arg.Name, - arg.Provider, - arg.Config, - arg.Enable, - arg.ID, - ) - var i TtsProvider - err := row.Scan( - &i.ID, - &i.Name, - &i.Provider, - &i.Config, - &i.Enable, - &i.CreatedAt, - &i.UpdatedAt, - ) - return i, err -} diff --git a/internal/handlers/providers.go b/internal/handlers/providers.go index 37eaecf4..55dba261 100644 --- a/internal/handlers/providers.go +++ b/internal/handlers/providers.go @@ -62,9 +62,6 @@ func (h *ProvidersHandler) Create(c echo.Context) error { if req.Name == "" { return echo.NewHTTPError(http.StatusBadRequest, "name is required") } - if req.BaseURL == "" { - return echo.NewHTTPError(http.StatusBadRequest, "base_url is required") - } resp, err := h.service.Create(c.Request().Context(), req) if err != nil { @@ -327,10 +324,10 @@ func (h *ProvidersHandler) ImportModels(c echo.Context) error { name = m.ID } _, err := h.modelsService.Create(c.Request().Context(), models.AddRequest{ - ModelID: m.ID, - Name: name, - LlmProviderID: id, - Type: modelType, + ModelID: m.ID, + Name: name, + ProviderID: id, + Type: modelType, Config: models.ModelConfig{ Compatibilities: compatibilities, ReasoningEfforts: m.ReasoningEfforts, diff --git a/internal/handlers/tts_providers.go b/internal/handlers/tts_providers.go index 69c1584f..f52afe43 100644 --- a/internal/handlers/tts_providers.go +++ b/internal/handlers/tts_providers.go @@ -10,239 +10,66 @@ import ( "github.com/memohai/memoh/internal/tts" ) -type TtsProvidersHandler struct { +type SpeechHandler struct { service *tts.Service logger *slog.Logger } -func NewTtsProvidersHandler(log *slog.Logger, service *tts.Service) *TtsProvidersHandler { - return &TtsProvidersHandler{ +func NewSpeechHandler(log *slog.Logger, service *tts.Service) *SpeechHandler { + return &SpeechHandler{ service: service, - logger: log.With(slog.String("handler", "tts_providers")), + logger: log.With(slog.String("handler", "speech")), } } -func (h *TtsProvidersHandler) Register(e *echo.Echo) { - g := e.Group("/tts-providers") - g.GET("/meta", h.ListMeta) - g.POST("", h.Create) - g.GET("", h.List) - g.GET("/:id", h.Get) - g.PUT("/:id", h.Update) - g.DELETE("/:id", h.Delete) +func (h *SpeechHandler) Register(e *echo.Echo) { + pg := e.Group("/speech-providers") + pg.GET("", h.ListProviders) + pg.GET("/meta", h.ListMeta) - g.GET("/:id/models", h.ListModels) - g.POST("/:id/import-models", h.ImportModels) - - mg := e.Group("/tts-models") - mg.POST("", h.CreateModel) - mg.GET("", h.ListAllModels) + mg := e.Group("/speech-models") + mg.GET("", h.ListModels) mg.GET("/:id", h.GetModel) - mg.PUT("/:id", h.UpdateModel) - mg.DELETE("/:id", h.DeleteModel) mg.GET("/:id/capabilities", h.GetModelCapabilities) mg.POST("/:id/test", h.TestModel) } // ListMeta godoc -// @Summary List TTS provider metadata -// @Description List available TTS provider types with their models and capabilities -// @Tags tts-providers +// @Summary List speech provider metadata +// @Description List available speech provider types with their models and capabilities +// @Tags speech-providers // @Success 200 {array} tts.ProviderMetaResponse -// @Router /tts-providers/meta [get]. -func (h *TtsProvidersHandler) ListMeta(c echo.Context) error { +// @Router /speech-providers/meta [get]. +func (h *SpeechHandler) ListMeta(c echo.Context) error { return c.JSON(http.StatusOK, h.service.ListMeta(c.Request().Context())) } -// Create godoc -// @Summary Create a TTS provider -// @Description Create a TTS provider and auto-import its available models -// @Tags tts-providers -// @Accept json +// ListProviders godoc +// @Summary List speech providers +// @Description List providers that support speech (filtered view of unified providers table) +// @Tags speech-providers // @Produce json -// @Param request body tts.CreateProviderRequest true "TTS provider configuration" -// @Success 201 {object} tts.ProviderResponse -// @Failure 400 {object} ErrorResponse +// @Success 200 {array} tts.SpeechProviderResponse // @Failure 500 {object} ErrorResponse -// @Router /tts-providers [post]. -func (h *TtsProvidersHandler) Create(c echo.Context) error { - var req tts.CreateProviderRequest - if err := c.Bind(&req); err != nil { - return echo.NewHTTPError(http.StatusBadRequest, err.Error()) - } - if strings.TrimSpace(req.Name) == "" { - return echo.NewHTTPError(http.StatusBadRequest, "name is required") - } - if strings.TrimSpace(string(req.Provider)) == "" { - return echo.NewHTTPError(http.StatusBadRequest, "provider is required") - } - resp, err := h.service.CreateProvider(c.Request().Context(), req) - if err != nil { - return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) - } - return c.JSON(http.StatusCreated, resp) -} - -// List godoc -// @Summary List TTS providers -// @Tags tts-providers -// @Produce json -// @Param provider query string false "Provider type filter" -// @Success 200 {array} tts.ProviderResponse -// @Failure 500 {object} ErrorResponse -// @Router /tts-providers [get]. -func (h *TtsProvidersHandler) List(c echo.Context) error { - items, err := h.service.ListProviders(c.Request().Context(), c.QueryParam("provider")) +// @Router /speech-providers [get]. +func (h *SpeechHandler) ListProviders(c echo.Context) error { + items, err := h.service.ListSpeechProviders(c.Request().Context()) if err != nil { return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) } return c.JSON(http.StatusOK, items) } -// Get godoc -// @Summary Get a TTS provider -// @Tags tts-providers -// @Produce json -// @Param id path string true "Provider ID" -// @Success 200 {object} tts.ProviderResponse -// @Failure 404 {object} ErrorResponse -// @Router /tts-providers/{id} [get]. -func (h *TtsProvidersHandler) Get(c echo.Context) error { - id := strings.TrimSpace(c.Param("id")) - if id == "" { - return echo.NewHTTPError(http.StatusBadRequest, "id is required") - } - resp, err := h.service.GetProvider(c.Request().Context(), id) - if err != nil { - return echo.NewHTTPError(http.StatusNotFound, err.Error()) - } - return c.JSON(http.StatusOK, resp) -} - -// Update godoc -// @Summary Update a TTS provider -// @Tags tts-providers -// @Accept json -// @Produce json -// @Param id path string true "Provider ID" -// @Param request body tts.UpdateProviderRequest true "Updated configuration" -// @Success 200 {object} tts.ProviderResponse -// @Failure 400 {object} ErrorResponse -// @Failure 500 {object} ErrorResponse -// @Router /tts-providers/{id} [put]. -func (h *TtsProvidersHandler) Update(c echo.Context) error { - id := strings.TrimSpace(c.Param("id")) - if id == "" { - return echo.NewHTTPError(http.StatusBadRequest, "id is required") - } - var req tts.UpdateProviderRequest - if err := c.Bind(&req); err != nil { - return echo.NewHTTPError(http.StatusBadRequest, err.Error()) - } - resp, err := h.service.UpdateProvider(c.Request().Context(), id, req) - if err != nil { - return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) - } - return c.JSON(http.StatusOK, resp) -} - -// Delete godoc -// @Summary Delete a TTS provider -// @Tags tts-providers -// @Param id path string true "Provider ID" -// @Success 204 "No Content" -// @Failure 500 {object} ErrorResponse -// @Router /tts-providers/{id} [delete]. -func (h *TtsProvidersHandler) Delete(c echo.Context) error { - id := strings.TrimSpace(c.Param("id")) - if id == "" { - return echo.NewHTTPError(http.StatusBadRequest, "id is required") - } - if err := h.service.DeleteProvider(c.Request().Context(), id); err != nil { - return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) - } - return c.NoContent(http.StatusNoContent) -} - // ListModels godoc -// @Summary List models for a TTS provider -// @Tags tts-providers +// @Summary List all speech models +// @Description List all models of type 'speech' (filtered view of unified models table) +// @Tags speech-models // @Produce json -// @Param id path string true "Provider ID" -// @Success 200 {array} tts.ModelResponse +// @Success 200 {array} tts.SpeechModelResponse // @Failure 500 {object} ErrorResponse -// @Router /tts-providers/{id}/models [get]. -func (h *TtsProvidersHandler) ListModels(c echo.Context) error { - id := strings.TrimSpace(c.Param("id")) - if id == "" { - return echo.NewHTTPError(http.StatusBadRequest, "id is required") - } - items, err := h.service.ListModelsByProvider(c.Request().Context(), id) - if err != nil { - return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) - } - return c.JSON(http.StatusOK, items) -} - -// ImportModels godoc -// @Summary Import models for a TTS provider -// @Description Discover and import available models from the TTS adapter -// @Tags tts-providers -// @Produce json -// @Param id path string true "Provider ID" -// @Success 200 {array} tts.ModelResponse -// @Failure 500 {object} ErrorResponse -// @Router /tts-providers/{id}/import-models [post]. -func (h *TtsProvidersHandler) ImportModels(c echo.Context) error { - id := strings.TrimSpace(c.Param("id")) - if id == "" { - return echo.NewHTTPError(http.StatusBadRequest, "id is required") - } - items, err := h.service.ImportModels(c.Request().Context(), id) - if err != nil { - return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) - } - return c.JSON(http.StatusOK, items) -} - -// CreateModel godoc -// @Summary Create a TTS model -// @Description Manually create a TTS model under a specific provider -// @Tags tts-models -// @Accept json -// @Produce json -// @Param request body tts.CreateModelRequest true "TTS model configuration" -// @Success 201 {object} tts.ModelResponse -// @Failure 400 {object} ErrorResponse -// @Failure 500 {object} ErrorResponse -// @Router /tts-models [post]. -func (h *TtsProvidersHandler) CreateModel(c echo.Context) error { - var req tts.CreateModelRequest - if err := c.Bind(&req); err != nil { - return echo.NewHTTPError(http.StatusBadRequest, err.Error()) - } - if strings.TrimSpace(req.ModelID) == "" { - return echo.NewHTTPError(http.StatusBadRequest, "model_id is required") - } - if strings.TrimSpace(req.TtsProviderID) == "" { - return echo.NewHTTPError(http.StatusBadRequest, "tts_provider_id is required") - } - resp, err := h.service.CreateModel(c.Request().Context(), req) - if err != nil { - return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) - } - return c.JSON(http.StatusCreated, resp) -} - -// ListAllModels godoc -// @Summary List all TTS models -// @Tags tts-models -// @Produce json -// @Success 200 {array} tts.ModelResponse -// @Failure 500 {object} ErrorResponse -// @Router /tts-models [get]. -func (h *TtsProvidersHandler) ListAllModels(c echo.Context) error { - items, err := h.service.ListAllModels(c.Request().Context()) +// @Router /speech-models [get]. +func (h *SpeechHandler) ListModels(c echo.Context) error { + items, err := h.service.ListSpeechModels(c.Request().Context()) if err != nil { return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) } @@ -250,79 +77,34 @@ func (h *TtsProvidersHandler) ListAllModels(c echo.Context) error { } // GetModel godoc -// @Summary Get a TTS model -// @Tags tts-models +// @Summary Get a speech model +// @Tags speech-models // @Produce json // @Param id path string true "Model ID" -// @Success 200 {object} tts.ModelResponse +// @Success 200 {object} tts.SpeechModelResponse // @Failure 404 {object} ErrorResponse -// @Router /tts-models/{id} [get]. -func (h *TtsProvidersHandler) GetModel(c echo.Context) error { +// @Router /speech-models/{id} [get]. +func (h *SpeechHandler) GetModel(c echo.Context) error { id := strings.TrimSpace(c.Param("id")) if id == "" { return echo.NewHTTPError(http.StatusBadRequest, "id is required") } - resp, err := h.service.GetModel(c.Request().Context(), id) + resp, err := h.service.GetSpeechModel(c.Request().Context(), id) if err != nil { return echo.NewHTTPError(http.StatusNotFound, err.Error()) } return c.JSON(http.StatusOK, resp) } -// UpdateModel godoc -// @Summary Update a TTS model -// @Tags tts-models -// @Accept json -// @Produce json -// @Param id path string true "Model ID" -// @Param request body tts.UpdateModelRequest true "Updated configuration" -// @Success 200 {object} tts.ModelResponse -// @Failure 400 {object} ErrorResponse -// @Failure 500 {object} ErrorResponse -// @Router /tts-models/{id} [put]. -func (h *TtsProvidersHandler) UpdateModel(c echo.Context) error { - id := strings.TrimSpace(c.Param("id")) - if id == "" { - return echo.NewHTTPError(http.StatusBadRequest, "id is required") - } - var req tts.UpdateModelRequest - if err := c.Bind(&req); err != nil { - return echo.NewHTTPError(http.StatusBadRequest, err.Error()) - } - resp, err := h.service.UpdateModel(c.Request().Context(), id, req) - if err != nil { - return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) - } - return c.JSON(http.StatusOK, resp) -} - -// DeleteModel godoc -// @Summary Delete a TTS model -// @Tags tts-models -// @Param id path string true "Model ID" -// @Success 204 "No Content" -// @Failure 500 {object} ErrorResponse -// @Router /tts-models/{id} [delete]. -func (h *TtsProvidersHandler) DeleteModel(c echo.Context) error { - id := strings.TrimSpace(c.Param("id")) - if id == "" { - return echo.NewHTTPError(http.StatusBadRequest, "id is required") - } - if err := h.service.DeleteModel(c.Request().Context(), id); err != nil { - return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) - } - return c.NoContent(http.StatusNoContent) -} - // GetModelCapabilities godoc -// @Summary Get TTS model capabilities -// @Tags tts-models +// @Summary Get speech model capabilities +// @Tags speech-models // @Produce json // @Param id path string true "Model ID" // @Success 200 {object} tts.ModelCapabilities // @Failure 404 {object} ErrorResponse -// @Router /tts-models/{id}/capabilities [get]. -func (h *TtsProvidersHandler) GetModelCapabilities(c echo.Context) error { +// @Router /speech-models/{id}/capabilities [get]. +func (h *SpeechHandler) GetModelCapabilities(c echo.Context) error { id := strings.TrimSpace(c.Param("id")) if id == "" { return echo.NewHTTPError(http.StatusBadRequest, "id is required") @@ -335,9 +117,9 @@ func (h *TtsProvidersHandler) GetModelCapabilities(c echo.Context) error { } // TestModel godoc -// @Summary Test TTS model synthesis +// @Summary Test speech model synthesis // @Description Synthesize text using a specific model's config and return audio -// @Tags tts-models +// @Tags speech-models // @Accept json // @Produce application/octet-stream // @Param id path string true "Model ID" @@ -345,8 +127,8 @@ func (h *TtsProvidersHandler) GetModelCapabilities(c echo.Context) error { // @Success 200 {file} binary "Audio data" // @Failure 400 {object} ErrorResponse // @Failure 500 {object} ErrorResponse -// @Router /tts-models/{id}/test [post]. -func (h *TtsProvidersHandler) TestModel(c echo.Context) error { +// @Router /speech-models/{id}/test [post]. +func (h *SpeechHandler) TestModel(c echo.Context) error { id := strings.TrimSpace(c.Param("id")) if id == "" { return echo.NewHTTPError(http.StatusBadRequest, "id is required") diff --git a/internal/memory/adapters/builtin/dense_runtime.go b/internal/memory/adapters/builtin/dense_runtime.go index 92d47c11..11940087 100644 --- a/internal/memory/adapters/builtin/dense_runtime.go +++ b/internal/memory/adapters/builtin/dense_runtime.go @@ -548,10 +548,10 @@ func resolveDenseEmbeddingModel(ctx context.Context, queries *dbsqlc.Queries, mo if row.Type != "embedding" { return denseModelSpec{}, fmt.Errorf("dense runtime: model %s is not an embedding model", modelRef) } - if !row.LlmProviderID.Valid { + if !row.ProviderID.Valid { return denseModelSpec{}, fmt.Errorf("dense runtime: model %s has no provider", modelRef) } - provider, err := queries.GetLlmProviderByID(ctx, row.LlmProviderID) + provider, err := queries.GetProviderByID(ctx, row.ProviderID) if err != nil { return denseModelSpec{}, fmt.Errorf("dense runtime: get embedding provider: %w", err) } @@ -564,11 +564,17 @@ func resolveDenseEmbeddingModel(ctx context.Context, queries *dbsqlc.Queries, mo if cfg.Dimensions == nil || *cfg.Dimensions <= 0 { return denseModelSpec{}, fmt.Errorf("dense runtime: embedding model %s missing dimensions", modelRef) } + var providerCfg map[string]any + if len(provider.Config) > 0 { + _ = json.Unmarshal(provider.Config, &providerCfg) + } + baseURL, _ := providerCfg["base_url"].(string) + apiKey, _ := providerCfg["api_key"].(string) return denseModelSpec{ modelID: strings.TrimSpace(row.ModelID), clientType: strings.TrimSpace(provider.ClientType), - baseURL: strings.TrimSpace(provider.BaseUrl), - apiKey: strings.TrimSpace(provider.ApiKey), + baseURL: strings.TrimSpace(baseURL), + apiKey: strings.TrimSpace(apiKey), dimensions: *cfg.Dimensions, }, nil } diff --git a/internal/models/config.go b/internal/models/config.go new file mode 100644 index 00000000..c1ab1a4a --- /dev/null +++ b/internal/models/config.go @@ -0,0 +1,16 @@ +package models + +import "encoding/json" + +// providerConfigString extracts a string value from a provider's JSONB config bytes. +func providerConfigString(raw []byte, key string) string { + if len(raw) == 0 { + return "" + } + var cfg map[string]any + if err := json.Unmarshal(raw, &cfg); err != nil { + return "" + } + v, _ := cfg[key].(string) + return v +} diff --git a/internal/models/models.go b/internal/models/models.go index de6b7756..f4cfddff 100644 --- a/internal/models/models.go +++ b/internal/models/models.go @@ -43,9 +43,9 @@ func (s *Service) Create(ctx context.Context, req AddRequest) (AddResponse, erro return AddResponse{}, fmt.Errorf("validation failed: %w", err) } - llmProviderID, err := db.ParseUUID(model.LlmProviderID) + providerID, err := db.ParseUUID(model.ProviderID) if err != nil { - return AddResponse{}, fmt.Errorf("invalid llm provider ID: %w", err) + return AddResponse{}, fmt.Errorf("invalid provider ID: %w", err) } configJSON, err := json.Marshal(model.Config) @@ -54,10 +54,10 @@ func (s *Service) Create(ctx context.Context, req AddRequest) (AddResponse, erro } params := sqlc.CreateModelParams{ - ModelID: model.ModelID, - LlmProviderID: llmProviderID, - Type: string(model.Type), - Config: configJSON, + ModelID: model.ModelID, + ProviderID: providerID, + Type: string(model.Type), + Config: configJSON, } if model.Name != "" { @@ -126,9 +126,9 @@ func (s *Service) List(ctx context.Context) ([]GetResponse, error) { return s.convertToGetResponseList(dbModels), nil } -// ListByType returns models filtered by type (chat or embedding). +// ListByType returns models filtered by type (chat, embedding, or speech). func (s *Service) ListByType(ctx context.Context, modelType ModelType) ([]GetResponse, error) { - if modelType != ModelTypeChat && modelType != ModelTypeEmbedding { + if modelType != ModelTypeChat && modelType != ModelTypeEmbedding && modelType != ModelTypeSpeech { return nil, fmt.Errorf("invalid model type: %s", modelType) } @@ -165,7 +165,7 @@ func (s *Service) ListEnabled(ctx context.Context) ([]GetResponse, error) { // ListEnabledByType returns models from enabled providers filtered by type. func (s *Service) ListEnabledByType(ctx context.Context, modelType ModelType) ([]GetResponse, error) { - if modelType != ModelTypeChat && modelType != ModelTypeEmbedding { + if modelType != ModelTypeChat && modelType != ModelTypeEmbedding && modelType != ModelTypeSpeech { return nil, fmt.Errorf("invalid model type: %s", modelType) } dbModels, err := s.queries.ListEnabledModelsByType(ctx, string(modelType)) @@ -206,7 +206,7 @@ func (s *Service) ListByProviderID(ctx context.Context, providerID string) ([]Ge // ListByProviderIDAndType returns models filtered by provider ID and type. func (s *Service) ListByProviderIDAndType(ctx context.Context, providerID string, modelType ModelType) ([]GetResponse, error) { - if modelType != ModelTypeChat && modelType != ModelTypeEmbedding { + if modelType != ModelTypeChat && modelType != ModelTypeEmbedding && modelType != ModelTypeSpeech { return nil, fmt.Errorf("invalid model type: %s", modelType) } if strings.TrimSpace(providerID) == "" { @@ -217,8 +217,8 @@ func (s *Service) ListByProviderIDAndType(ctx context.Context, providerID string return nil, fmt.Errorf("invalid provider id: %w", err) } dbModels, err := s.queries.ListModelsByProviderIDAndType(ctx, sqlc.ListModelsByProviderIDAndTypeParams{ - LlmProviderID: uuid, - Type: string(modelType), + ProviderID: uuid, + Type: string(modelType), }) if err != nil { return nil, fmt.Errorf("failed to list models by provider and type: %w", err) @@ -238,9 +238,9 @@ func (s *Service) UpdateByID(ctx context.Context, id string, req UpdateRequest) return GetResponse{}, fmt.Errorf("validation failed: %w", err) } - llmProviderID, err := db.ParseUUID(model.LlmProviderID) + providerID, err := db.ParseUUID(model.ProviderID) if err != nil { - return GetResponse{}, fmt.Errorf("invalid llm provider ID: %w", err) + return GetResponse{}, fmt.Errorf("invalid provider ID: %w", err) } configJSON, err := json.Marshal(model.Config) @@ -249,11 +249,11 @@ func (s *Service) UpdateByID(ctx context.Context, id string, req UpdateRequest) } params := sqlc.UpdateModelParams{ - ID: uuid, - ModelID: model.ModelID, - LlmProviderID: llmProviderID, - Type: string(model.Type), - Config: configJSON, + ID: uuid, + ModelID: model.ModelID, + ProviderID: providerID, + Type: string(model.Type), + Config: configJSON, } if model.Name != "" { @@ -286,9 +286,9 @@ func (s *Service) UpdateByModelID(ctx context.Context, modelID string, req Updat return GetResponse{}, fmt.Errorf("validation failed: %w", err) } - llmProviderID, err := db.ParseUUID(model.LlmProviderID) + providerID, err := db.ParseUUID(model.ProviderID) if err != nil { - return GetResponse{}, fmt.Errorf("invalid llm provider ID: %w", err) + return GetResponse{}, fmt.Errorf("invalid provider ID: %w", err) } configJSON, err := json.Marshal(model.Config) @@ -297,11 +297,11 @@ func (s *Service) UpdateByModelID(ctx context.Context, modelID string, req Updat } params := sqlc.UpdateModelParams{ - ID: current.ID, - ModelID: model.ModelID, - LlmProviderID: llmProviderID, - Type: string(model.Type), - Config: configJSON, + ID: current.ID, + ModelID: model.ModelID, + ProviderID: providerID, + Type: string(model.Type), + Config: configJSON, } if model.Name != "" { @@ -361,7 +361,7 @@ func (s *Service) Count(ctx context.Context) (int64, error) { // CountByType returns the number of models of a specific type. func (s *Service) CountByType(ctx context.Context, modelType ModelType) (int64, error) { - if modelType != ModelTypeChat && modelType != ModelTypeEmbedding { + if modelType != ModelTypeChat && modelType != ModelTypeEmbedding && modelType != ModelTypeSpeech { return 0, fmt.Errorf("invalid model type: %s", modelType) } @@ -382,8 +382,8 @@ func (s *Service) convertToGetResponse(dbModel sqlc.Model) GetResponse { }, } - if dbModel.LlmProviderID.Valid { - resp.LlmProviderID = dbModel.LlmProviderID.String() + if dbModel.ProviderID.Valid { + resp.ProviderID = dbModel.ProviderID.String() } if dbModel.Name.Valid { @@ -427,7 +427,9 @@ func IsValidClientType(clientType ClientType) bool { case ClientTypeOpenAIResponses, ClientTypeOpenAICompletions, ClientTypeAnthropicMessages, - ClientTypeGoogleGenerativeAI: + ClientTypeGoogleGenerativeAI, + ClientTypeOpenAICodex, + ClientTypeEdgeSpeech: return true default: return false @@ -435,45 +437,46 @@ func IsValidClientType(clientType ClientType) bool { } // SelectMemoryModel selects a chat model for memory operations. -func SelectMemoryModel(ctx context.Context, modelsService *Service, queries *sqlc.Queries) (GetResponse, sqlc.LlmProvider, error) { +func SelectMemoryModel(ctx context.Context, modelsService *Service, queries *sqlc.Queries) (GetResponse, sqlc.Provider, error) { if modelsService == nil { - return GetResponse{}, sqlc.LlmProvider{}, errors.New("models service not configured") + return GetResponse{}, sqlc.Provider{}, errors.New("models service not configured") } if queries == nil { - return GetResponse{}, sqlc.LlmProvider{}, errors.New("queries not configured") + return GetResponse{}, sqlc.Provider{}, errors.New("queries not configured") } candidates, err := modelsService.ListByType(ctx, ModelTypeChat) if err != nil || len(candidates) == 0 { - return GetResponse{}, sqlc.LlmProvider{}, errors.New("no chat models available for memory operations") + return GetResponse{}, sqlc.Provider{}, errors.New("no chat models available for memory operations") } selected := candidates[0] - provider, err := FetchProviderByID(ctx, queries, selected.LlmProviderID) + provider, err := FetchProviderByID(ctx, queries, selected.ProviderID) if err != nil { - return GetResponse{}, sqlc.LlmProvider{}, err + return GetResponse{}, sqlc.Provider{}, err } return selected, provider, nil } // SelectMemoryModelForBot delegates to SelectMemoryModel. -func SelectMemoryModelForBot(ctx context.Context, modelsService *Service, queries *sqlc.Queries, _ string) (GetResponse, sqlc.LlmProvider, error) { +func SelectMemoryModelForBot(ctx context.Context, modelsService *Service, queries *sqlc.Queries, _ string) (GetResponse, sqlc.Provider, error) { return SelectMemoryModel(ctx, modelsService, queries) } // FetchProviderByID fetches a provider by ID. -func FetchProviderByID(ctx context.Context, queries *sqlc.Queries, providerID string) (sqlc.LlmProvider, error) { +func FetchProviderByID(ctx context.Context, queries *sqlc.Queries, providerID string) (sqlc.Provider, error) { if strings.TrimSpace(providerID) == "" { - return sqlc.LlmProvider{}, errors.New("provider id missing") + return sqlc.Provider{}, errors.New("provider id missing") } parsed, err := db.ParseUUID(providerID) if err != nil { - return sqlc.LlmProvider{}, err + return sqlc.Provider{}, err } - provider, err := queries.GetLlmProviderByID(ctx, parsed) + provider, err := queries.GetProviderByID(ctx, parsed) if err != nil { - return sqlc.LlmProvider{}, err + return sqlc.Provider{}, err } - if strings.TrimSpace(provider.ApiKey) != "" { - channel.SetIMErrorSecrets("llm-provider:"+providerID, provider.ApiKey) + apiKey := providerConfigString(provider.Config, "api_key") + if strings.TrimSpace(apiKey) != "" { + channel.SetIMErrorSecrets("provider:"+providerID, apiKey) } return provider, nil } diff --git a/internal/models/models_test.go b/internal/models/models_test.go index e9147fe0..aa0d4cad 100644 --- a/internal/models/models_test.go +++ b/internal/models/models_test.go @@ -19,20 +19,20 @@ func TestModel_Validate(t *testing.T) { { name: "valid chat model", model: models.Model{ - ModelID: "gpt-4", - Name: "GPT-4", - LlmProviderID: "11111111-1111-1111-1111-111111111111", - Type: models.ModelTypeChat, + ModelID: "gpt-4", + Name: "GPT-4", + ProviderID: "11111111-1111-1111-1111-111111111111", + Type: models.ModelTypeChat, }, wantErr: false, }, { name: "valid chat model with compatibilities", model: models.Model{ - ModelID: "gpt-4o", - Name: "GPT-4o", - LlmProviderID: "11111111-1111-1111-1111-111111111111", - Type: models.ModelTypeChat, + ModelID: "gpt-4o", + Name: "GPT-4o", + ProviderID: "11111111-1111-1111-1111-111111111111", + Type: models.ModelTypeChat, Config: models.ModelConfig{ Compatibilities: []string{"vision", "tool-call", "reasoning"}, }, @@ -42,24 +42,24 @@ func TestModel_Validate(t *testing.T) { { name: "valid embedding model", model: models.Model{ - ModelID: "text-embedding-ada-002", - Name: "Ada Embeddings", - LlmProviderID: "11111111-1111-1111-1111-111111111111", - Type: models.ModelTypeEmbedding, - Config: models.ModelConfig{Dimensions: intPtr(1536)}, + ModelID: "text-embedding-ada-002", + Name: "Ada Embeddings", + ProviderID: "11111111-1111-1111-1111-111111111111", + Type: models.ModelTypeEmbedding, + Config: models.ModelConfig{Dimensions: intPtr(1536)}, }, wantErr: false, }, { name: "missing model_id", model: models.Model{ - LlmProviderID: "11111111-1111-1111-1111-111111111111", - Type: models.ModelTypeChat, + ProviderID: "11111111-1111-1111-1111-111111111111", + Type: models.ModelTypeChat, }, wantErr: true, }, { - name: "missing llm_provider_id", + name: "missing provider_id", model: models.Model{ ModelID: "gpt-4", Type: models.ModelTypeChat, @@ -67,38 +67,38 @@ func TestModel_Validate(t *testing.T) { wantErr: true, }, { - name: "invalid llm_provider_id", + name: "invalid provider_id", model: models.Model{ - ModelID: "gpt-4", - LlmProviderID: "not-a-uuid", - Type: models.ModelTypeChat, + ModelID: "gpt-4", + ProviderID: "not-a-uuid", + Type: models.ModelTypeChat, }, wantErr: true, }, { name: "invalid model type", model: models.Model{ - ModelID: "gpt-4", - LlmProviderID: "11111111-1111-1111-1111-111111111111", - Type: "invalid", + ModelID: "gpt-4", + ProviderID: "11111111-1111-1111-1111-111111111111", + Type: "invalid", }, wantErr: true, }, { name: "embedding model missing dimensions", model: models.Model{ - ModelID: "text-embedding-ada-002", - LlmProviderID: "11111111-1111-1111-1111-111111111111", - Type: models.ModelTypeEmbedding, + ModelID: "text-embedding-ada-002", + ProviderID: "11111111-1111-1111-1111-111111111111", + Type: models.ModelTypeEmbedding, }, wantErr: true, }, { name: "invalid compatibility", model: models.Model{ - ModelID: "gpt-4", - LlmProviderID: "11111111-1111-1111-1111-111111111111", - Type: models.ModelTypeChat, + ModelID: "gpt-4", + ProviderID: "11111111-1111-1111-1111-111111111111", + Type: models.ModelTypeChat, Config: models.ModelConfig{ Compatibilities: []string{"vision", "smell"}, }, diff --git a/internal/models/probe.go b/internal/models/probe.go index 412ecf35..44299fb2 100644 --- a/internal/models/probe.go +++ b/internal/models/probe.go @@ -36,12 +36,12 @@ func (s *Service) Test(ctx context.Context, id string) (TestResponse, error) { return TestResponse{}, fmt.Errorf("get model: %w", err) } - provider, err := s.queries.GetLlmProviderByID(ctx, model.LlmProviderID) + provider, err := s.queries.GetProviderByID(ctx, model.ProviderID) if err != nil { return TestResponse{}, fmt.Errorf("get provider: %w", err) } - baseURL := strings.TrimRight(provider.BaseUrl, "/") + baseURL := strings.TrimRight(providerConfigString(provider.Config, "base_url"), "/") clientType := ClientType(provider.ClientType) creds, err := s.resolveModelCredentials(ctx, provider) if err != nil { @@ -199,12 +199,14 @@ type modelCredentials struct { CodexAccountID string } -func (s *Service) resolveModelCredentials(ctx context.Context, provider sqlc.LlmProvider) (modelCredentials, error) { +func (s *Service) resolveModelCredentials(ctx context.Context, provider sqlc.Provider) (modelCredentials, error) { + apiKey := providerConfigString(provider.Config, "api_key") + if ClientType(provider.ClientType) != ClientTypeOpenAICodex { - return modelCredentials{APIKey: provider.ApiKey}, nil + return modelCredentials{APIKey: apiKey}, nil } - tokenRow, err := s.queries.GetLlmProviderOAuthTokenByProvider(ctx, provider.ID) + tokenRow, err := s.queries.GetProviderOAuthTokenByProvider(ctx, provider.ID) if err != nil { return modelCredentials{}, err } diff --git a/internal/models/types.go b/internal/models/types.go index ff8b3c6e..203ebd1a 100644 --- a/internal/models/types.go +++ b/internal/models/types.go @@ -11,6 +11,7 @@ type ModelType string const ( ModelTypeChat ModelType = "chat" ModelTypeEmbedding ModelType = "embedding" + ModelTypeSpeech ModelType = "speech" ) type ClientType string @@ -21,6 +22,7 @@ const ( ClientTypeAnthropicMessages ClientType = "anthropic-messages" ClientTypeGoogleGenerativeAI ClientType = "google-generative-ai" ClientTypeOpenAICodex ClientType = "openai-codex" + ClientTypeEdgeSpeech ClientType = "edge-speech" ) const ( @@ -60,24 +62,24 @@ type ModelConfig struct { } type Model struct { - ModelID string `json:"model_id"` - Name string `json:"name"` - LlmProviderID string `json:"llm_provider_id"` - Type ModelType `json:"type"` - Config ModelConfig `json:"config"` + ModelID string `json:"model_id"` + Name string `json:"name"` + ProviderID string `json:"provider_id"` + Type ModelType `json:"type"` + Config ModelConfig `json:"config"` } func (m *Model) Validate() error { if m.ModelID == "" { return errors.New("model ID is required") } - if m.LlmProviderID == "" { - return errors.New("llm provider ID is required") + if m.ProviderID == "" { + return errors.New("provider ID is required") } - if _, err := uuid.Parse(m.LlmProviderID); err != nil { - return errors.New("llm provider ID must be a valid UUID") + if _, err := uuid.Parse(m.ProviderID); err != nil { + return errors.New("provider ID must be a valid UUID") } - if m.Type != ModelTypeChat && m.Type != ModelTypeEmbedding { + if m.Type != ModelTypeChat && m.Type != ModelTypeEmbedding && m.Type != ModelTypeSpeech { return errors.New("invalid model type") } if m.Type == ModelTypeEmbedding { diff --git a/internal/providers/credentials.go b/internal/providers/credentials.go index dc5f5e8a..8535e34a 100644 --- a/internal/providers/credentials.go +++ b/internal/providers/credentials.go @@ -19,14 +19,15 @@ type ModelCredentials struct { CodexAccountID string } -func SupportsOpenAICodexOAuth(provider sqlc.LlmProvider) bool { +func SupportsOpenAICodexOAuth(provider sqlc.Provider) bool { return supportsOAuth(provider) } -func (s *Service) ResolveModelCredentials(ctx context.Context, provider sqlc.LlmProvider) (ModelCredentials, error) { +func (s *Service) ResolveModelCredentials(ctx context.Context, provider sqlc.Provider) (ModelCredentials, error) { if models.ClientType(provider.ClientType) != models.ClientTypeOpenAICodex { + apiKey := ProviderConfigString(provider, "api_key") return ModelCredentials{ - APIKey: provider.ApiKey, + APIKey: apiKey, }, nil } diff --git a/internal/providers/oauth.go b/internal/providers/oauth.go index a03c16b2..55d94e7e 100644 --- a/internal/providers/oauth.go +++ b/internal/providers/oauth.go @@ -104,7 +104,7 @@ func (s *Service) oauthConfig(metadata map[string]any) openAIOAuthConfig { return cfg } -func supportsOAuth(provider sqlc.LlmProvider) bool { +func supportsOAuth(provider sqlc.Provider) bool { return models.ClientType(provider.ClientType) == models.ClientTypeOpenAICodex } @@ -113,7 +113,7 @@ func (s *Service) StartOAuthAuthorization(ctx context.Context, providerID string if err != nil { return "", err } - provider, err := s.queries.GetLlmProviderByID(ctx, providerUUID) + provider, err := s.queries.GetProviderByID(ctx, providerUUID) if err != nil { return "", fmt.Errorf("get provider: %w", err) } @@ -160,7 +160,7 @@ func (s *Service) HandleOAuthCallback(ctx context.Context, state, code string) ( if err != nil { return "", err } - provider, err := s.queries.GetLlmProviderByID(ctx, providerUUID) + provider, err := s.queries.GetProviderByID(ctx, providerUUID) if err != nil { return "", fmt.Errorf("get provider: %w", err) } @@ -193,7 +193,7 @@ func (s *Service) GetOAuthStatus(ctx context.Context, providerID string) (*OAuth if err != nil { return nil, err } - provider, err := s.queries.GetLlmProviderByID(ctx, providerUUID) + provider, err := s.queries.GetProviderByID(ctx, providerUUID) if err != nil { return nil, fmt.Errorf("get provider: %w", err) } @@ -226,14 +226,14 @@ func (s *Service) RevokeOAuthToken(ctx context.Context, providerID string) error if err != nil { return err } - provider, err := s.queries.GetLlmProviderByID(ctx, providerUUID) + provider, err := s.queries.GetProviderByID(ctx, providerUUID) if err != nil { return fmt.Errorf("get provider: %w", err) } if !supportsOAuth(provider) { return errors.New("provider does not support oauth") } - return s.queries.DeleteLlmProviderOAuthToken(ctx, providerUUID) + return s.queries.DeleteProviderOAuthToken(ctx, providerUUID) } func (s *Service) GetValidAccessToken(ctx context.Context, providerID string) (string, error) { @@ -255,7 +255,7 @@ func (s *Service) GetValidAccessToken(ctx context.Context, providerID string) (s if err != nil { return "", err } - provider, err := s.queries.GetLlmProviderByID(ctx, providerUUID) + provider, err := s.queries.GetProviderByID(ctx, providerUUID) if err != nil { return "", fmt.Errorf("get provider: %w", err) } @@ -285,7 +285,7 @@ func (s *Service) getOAuthToken(ctx context.Context, providerID string) (*provid if err != nil { return nil, err } - row, err := s.queries.GetLlmProviderOAuthTokenByProvider(ctx, providerUUID) + row, err := s.queries.GetProviderOAuthTokenByProvider(ctx, providerUUID) if err != nil { return nil, err } @@ -293,7 +293,7 @@ func (s *Service) getOAuthToken(ctx context.Context, providerID string) (*provid } func (s *Service) getOAuthTokenByState(ctx context.Context, state string) (*providerOAuthToken, error) { - row, err := s.queries.GetLlmProviderOAuthTokenByState(ctx, state) + row, err := s.queries.GetProviderOAuthTokenByState(ctx, state) if err != nil { return nil, err } @@ -305,8 +305,8 @@ func (s *Service) updateOAuthState(ctx context.Context, providerID, state, codeV if err != nil { return err } - return s.queries.UpdateLlmProviderOAuthState(ctx, sqlc.UpdateLlmProviderOAuthStateParams{ - LlmProviderID: providerUUID, + return s.queries.UpdateProviderOAuthState(ctx, sqlc.UpdateProviderOAuthStateParams{ + ProviderID: providerUUID, State: state, PkceCodeVerifier: codeVerifier, }) @@ -321,8 +321,8 @@ func (s *Service) saveOAuthToken(ctx context.Context, providerID string, token p if !token.ExpiresAt.IsZero() { expiresAt = pgtype.Timestamptz{Time: token.ExpiresAt, Valid: true} } - _, err = s.queries.UpsertLlmProviderOAuthToken(ctx, sqlc.UpsertLlmProviderOAuthTokenParams{ - LlmProviderID: providerUUID, + _, err = s.queries.UpsertProviderOAuthToken(ctx, sqlc.UpsertProviderOAuthTokenParams{ + ProviderID: providerUUID, AccessToken: token.AccessToken, RefreshToken: token.RefreshToken, ExpiresAt: expiresAt, @@ -334,9 +334,9 @@ func (s *Service) saveOAuthToken(ctx context.Context, providerID string, token p return err } -func toProviderOAuthToken(row sqlc.LlmProviderOauthToken) *providerOAuthToken { +func toProviderOAuthToken(row sqlc.ProviderOauthToken) *providerOAuthToken { token := &providerOAuthToken{ - ProviderID: row.LlmProviderID.String(), + ProviderID: row.ProviderID.String(), AccessToken: row.AccessToken, RefreshToken: row.RefreshToken, Scope: row.Scope, diff --git a/internal/providers/service.go b/internal/providers/service.go index b9940e46..9b69a281 100644 --- a/internal/providers/service.go +++ b/internal/providers/service.go @@ -40,13 +40,18 @@ func NewService(log *slog.Logger, queries *sqlc.Queries, callbackURL string) *Se } } -// Create creates a new LLM provider. +// Create creates a new provider. func (s *Service) Create(ctx context.Context, req CreateRequest) (GetResponse, error) { metadataJSON, err := json.Marshal(req.Metadata) if err != nil { return GetResponse{}, fmt.Errorf("marshal metadata: %w", err) } + configJSON, err := json.Marshal(req.Config) + if err != nil { + return GetResponse{}, fmt.Errorf("marshal config: %w", err) + } + clientType := req.ClientType if clientType == "" { clientType = string(models.ClientTypeOpenAICompletions) @@ -57,13 +62,12 @@ func (s *Service) Create(ctx context.Context, req CreateRequest) (GetResponse, e icon = pgtype.Text{String: req.Icon, Valid: true} } - provider, err := s.queries.CreateLlmProvider(ctx, sqlc.CreateLlmProviderParams{ + provider, err := s.queries.CreateProvider(ctx, sqlc.CreateProviderParams{ Name: req.Name, - BaseUrl: req.BaseURL, - ApiKey: req.APIKey, ClientType: clientType, Icon: icon, Enable: true, + Config: configJSON, Metadata: metadataJSON, }) if err != nil { @@ -80,7 +84,7 @@ func (s *Service) Get(ctx context.Context, id string) (GetResponse, error) { return GetResponse{}, err } - provider, err := s.queries.GetLlmProviderByID(ctx, providerID) + provider, err := s.queries.GetProviderByID(ctx, providerID) if err != nil { return GetResponse{}, fmt.Errorf("get provider: %w", err) } @@ -90,7 +94,7 @@ func (s *Service) Get(ctx context.Context, id string) (GetResponse, error) { // GetByName retrieves a provider by name. func (s *Service) GetByName(ctx context.Context, name string) (GetResponse, error) { - provider, err := s.queries.GetLlmProviderByName(ctx, name) + provider, err := s.queries.GetProviderByName(ctx, name) if err != nil { return GetResponse{}, fmt.Errorf("get provider by name: %w", err) } @@ -100,7 +104,7 @@ func (s *Service) GetByName(ctx context.Context, name string) (GetResponse, erro // List retrieves all providers. func (s *Service) List(ctx context.Context) ([]GetResponse, error) { - providers, err := s.queries.ListLlmProviders(ctx) + providers, err := s.queries.ListProviders(ctx) if err != nil { return nil, fmt.Errorf("list providers: %w", err) } @@ -119,7 +123,7 @@ func (s *Service) Update(ctx context.Context, id string, req UpdateRequest) (Get return GetResponse{}, err } - existing, err := s.queries.GetLlmProviderByID(ctx, providerID) + existing, err := s.queries.GetProviderByID(ctx, providerID) if err != nil { return GetResponse{}, fmt.Errorf("get provider: %w", err) } @@ -129,13 +133,6 @@ func (s *Service) Update(ctx context.Context, id string, req UpdateRequest) (Get name = *req.Name } - baseURL := existing.BaseUrl - if req.BaseURL != nil { - baseURL = *req.BaseURL - } - - apiKey := resolveUpdatedAPIKey(existing.ApiKey, req.APIKey) - clientType := existing.ClientType if req.ClientType != nil { clientType = *req.ClientType @@ -151,6 +148,20 @@ func (s *Service) Update(ctx context.Context, id string, req UpdateRequest) (Get enable = *req.Enable } + existingConfig := providerConfig(existing.Config) + if req.Config != nil { + existingAPIKey := configString(existingConfig, "api_key") + newAPIKey := configString(req.Config, "api_key") + if newAPIKey != "" && newAPIKey == maskAPIKey(existingAPIKey) { + req.Config["api_key"] = existingAPIKey + } + existingConfig = req.Config + } + configJSON, err := json.Marshal(existingConfig) + if err != nil { + return GetResponse{}, fmt.Errorf("marshal config: %w", err) + } + metadataMap := providerMetadata(existing.Metadata) if req.Metadata != nil { metadataMap = req.Metadata @@ -160,14 +171,13 @@ func (s *Service) Update(ctx context.Context, id string, req UpdateRequest) (Get return GetResponse{}, fmt.Errorf("marshal metadata: %w", err) } - updated, err := s.queries.UpdateLlmProvider(ctx, sqlc.UpdateLlmProviderParams{ + updated, err := s.queries.UpdateProvider(ctx, sqlc.UpdateProviderParams{ ID: providerID, Name: name, - BaseUrl: baseURL, - ApiKey: apiKey, ClientType: clientType, Icon: icon, Enable: enable, + Config: configJSON, Metadata: metadataJSON, }) if err != nil { @@ -184,7 +194,7 @@ func (s *Service) Delete(ctx context.Context, id string) error { return err } - if err := s.queries.DeleteLlmProvider(ctx, providerID); err != nil { + if err := s.queries.DeleteProvider(ctx, providerID); err != nil { return fmt.Errorf("delete provider: %w", err) } return nil @@ -192,7 +202,7 @@ func (s *Service) Delete(ctx context.Context, id string) error { // Count returns the total count of providers. func (s *Service) Count(ctx context.Context) (int64, error) { - count, err := s.queries.CountLlmProviders(ctx) + count, err := s.queries.CountProviders(ctx) if err != nil { return 0, fmt.Errorf("count providers: %w", err) } @@ -209,12 +219,13 @@ func (s *Service) Test(ctx context.Context, id string) (TestResponse, error) { return TestResponse{}, err } - provider, err := s.queries.GetLlmProviderByID(ctx, providerID) + provider, err := s.queries.GetProviderByID(ctx, providerID) if err != nil { return TestResponse{}, fmt.Errorf("get provider: %w", err) } - baseURL := strings.TrimRight(provider.BaseUrl, "/") + cfg := providerConfig(provider.Config) + baseURL := strings.TrimRight(configString(cfg, "base_url"), "/") clientType := models.ClientType(provider.ClientType) creds, err := s.ResolveModelCredentials(ctx, provider) @@ -242,7 +253,7 @@ func (s *Service) FetchRemoteModels(ctx context.Context, id string) ([]RemoteMod return nil, err } - provider, err := s.queries.GetLlmProviderByID(ctx, providerID) + provider, err := s.queries.GetProviderByID(ctx, providerID) if err != nil { return nil, fmt.Errorf("get provider: %w", err) } @@ -270,7 +281,9 @@ func (s *Service) FetchRemoteModels(ctx context.Context, id string) ([]RemoteMod return remoteModels, nil } - baseURL := strings.TrimRight(provider.BaseUrl, "/") + cfg := providerConfig(provider.Config) + baseURL := strings.TrimRight(configString(cfg, "base_url"), "/") + apiKey := configString(cfg, "api_key") modelsURL := fmt.Sprintf("%s/models", baseURL) ctx, cancel := context.WithTimeout(ctx, probeTimeout) @@ -281,11 +294,11 @@ func (s *Service) FetchRemoteModels(ctx context.Context, id string) ([]RemoteMod return nil, fmt.Errorf("create request: %w", err) } - if provider.ApiKey != "" && !supportsOAuth(provider) { - req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", provider.ApiKey)) + if apiKey != "" && !supportsOAuth(provider) { + req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", apiKey)) } - resp, err := http.DefaultClient.Do(req) //nolint:gosec // G704: URL is from operator-configured LLM provider base URL + resp, err := http.DefaultClient.Do(req) //nolint:gosec // G704: URL is from operator-configured provider base URL if err != nil { return nil, fmt.Errorf("execute request: %w", err) } @@ -305,7 +318,7 @@ func (s *Service) FetchRemoteModels(ctx context.Context, id string) ([]RemoteMod } // toGetResponse converts a database provider to a response. -func (s *Service) toGetResponse(provider sqlc.LlmProvider) GetResponse { +func (s *Service) toGetResponse(provider sqlc.Provider) GetResponse { var metadata map[string]any if len(provider.Metadata) > 0 { if err := json.Unmarshal(provider.Metadata, &metadata); err != nil { @@ -315,7 +328,8 @@ func (s *Service) toGetResponse(provider sqlc.LlmProvider) GetResponse { } } - maskedAPIKey := maskAPIKey(provider.ApiKey) + cfg := providerConfig(provider.Config) + maskedCfg := maskConfigAPIKey(cfg) var icon string if provider.Icon.Valid { @@ -325,17 +339,57 @@ func (s *Service) toGetResponse(provider sqlc.LlmProvider) GetResponse { return GetResponse{ ID: provider.ID.String(), Name: provider.Name, - BaseURL: provider.BaseUrl, - APIKey: maskedAPIKey, ClientType: provider.ClientType, Icon: icon, Enable: provider.Enable, + Config: maskedCfg, Metadata: metadata, CreatedAt: provider.CreatedAt.Time, UpdatedAt: provider.UpdatedAt.Time, } } +// providerConfig parses the provider config JSONB. +func providerConfig(raw []byte) map[string]any { + if len(raw) == 0 { + return map[string]any{} + } + var cfg map[string]any + if err := json.Unmarshal(raw, &cfg); err != nil { + return map[string]any{} + } + if cfg == nil { + return map[string]any{} + } + return cfg +} + +// configString extracts a string from the config map. +func configString(cfg map[string]any, key string) string { + if cfg == nil { + return "" + } + v, _ := cfg[key].(string) + return v +} + +// ProviderConfigString is a public helper for extracting a string from the config JSONB. +func ProviderConfigString(provider sqlc.Provider, key string) string { + return configString(providerConfig(provider.Config), key) +} + +// maskConfigAPIKey returns a copy of config with api_key masked. +func maskConfigAPIKey(cfg map[string]any) map[string]any { + result := make(map[string]any, len(cfg)) + for k, v := range cfg { + result[k] = v + } + if apiKey, _ := result["api_key"].(string); apiKey != "" { + result["api_key"] = maskAPIKey(apiKey) + } + return result +} + // maskAPIKey masks an API key for security. func maskAPIKey(apiKey string) string { if apiKey == "" { @@ -346,14 +400,3 @@ func maskAPIKey(apiKey string) string { } return apiKey[:8] + strings.Repeat("*", len(apiKey)-8) } - -// resolveUpdatedAPIKey keeps the original key when the request value matches the masked version. -func resolveUpdatedAPIKey(existing string, updated *string) string { - if updated == nil { - return existing - } - if *updated == maskAPIKey(existing) { - return existing - } - return *updated -} diff --git a/internal/providers/service_test.go b/internal/providers/service_test.go index a4426d2f..6a5ee44f 100644 --- a/internal/providers/service_test.go +++ b/internal/providers/service_test.go @@ -2,39 +2,35 @@ package providers import "testing" -func TestResolveUpdatedAPIKey(t *testing.T) { +func TestMaskAPIKey(t *testing.T) { t.Parallel() - existing := "sk-1234567890abcdef" - masked := maskAPIKey(existing) - - t.Run("nil update keeps existing", func(t *testing.T) { + t.Run("short key is fully masked", func(t *testing.T) { t.Parallel() - if got := resolveUpdatedAPIKey(existing, nil); got != existing { - t.Fatalf("expected existing key, got %q", got) + if got := maskAPIKey("sk-12"); got != "*****" { + t.Fatalf("expected fully masked, got %q", got) } }) - t.Run("masked update keeps existing", func(t *testing.T) { + t.Run("long key preserves prefix", func(t *testing.T) { t.Parallel() - if got := resolveUpdatedAPIKey(existing, &masked); got != existing { - t.Fatalf("expected existing key, got %q", got) + key := "sk-1234567890abcdef" + masked := maskAPIKey(key) + if masked == key { + t.Fatal("masked key should differ from original") + } + if len(masked) != len(key) { + t.Fatalf("masked length %d != original length %d", len(masked), len(key)) + } + if masked[:8] != key[:8] { + t.Fatalf("prefix mismatch: %q vs %q", masked[:8], key[:8]) } }) - t.Run("new key replaces existing", func(t *testing.T) { + t.Run("empty key returns empty", func(t *testing.T) { t.Parallel() - next := "sk-new-secret" - if got := resolveUpdatedAPIKey(existing, &next); got != next { - t.Fatalf("expected new key, got %q", got) - } - }) - - t.Run("empty update clears key", func(t *testing.T) { - t.Parallel() - empty := "" - if got := resolveUpdatedAPIKey(existing, &empty); got != empty { - t.Fatalf("expected empty key, got %q", got) + if got := maskAPIKey(""); got != "" { + t.Fatalf("expected empty, got %q", got) } }) } diff --git a/internal/providers/types.go b/internal/providers/types.go index 81583407..9b023f27 100644 --- a/internal/providers/types.go +++ b/internal/providers/types.go @@ -2,24 +2,22 @@ package providers import "time" -// CreateRequest represents a request to create a new LLM provider. +// CreateRequest represents a request to create a new provider. type CreateRequest struct { Name string `json:"name" validate:"required"` - BaseURL string `json:"base_url" validate:"required,url"` - APIKey string `json:"api_key"` //nolint:gosec // intentional: LLM provider API key supplied by operator ClientType string `json:"client_type" validate:"required"` Icon string `json:"icon,omitempty"` + Config map[string]any `json:"config,omitempty"` Metadata map[string]any `json:"metadata,omitempty"` } -// UpdateRequest represents a request to update an existing LLM provider. +// UpdateRequest represents a request to update an existing provider. type UpdateRequest struct { Name *string `json:"name,omitempty"` - BaseURL *string `json:"base_url,omitempty"` - APIKey *string `json:"api_key,omitempty"` //nolint:gosec // intentional: LLM provider API key update field ClientType *string `json:"client_type,omitempty"` Icon *string `json:"icon,omitempty"` Enable *bool `json:"enable,omitempty"` + Config map[string]any `json:"config,omitempty"` Metadata map[string]any `json:"metadata,omitempty"` } @@ -27,11 +25,10 @@ type UpdateRequest struct { type GetResponse struct { ID string `json:"id"` Name string `json:"name"` - BaseURL string `json:"base_url"` - APIKey string `json:"api_key,omitempty"` //nolint:gosec // intentional: partially masked API key for display ClientType string `json:"client_type"` Icon string `json:"icon,omitempty"` Enable bool `json:"enable"` + Config map[string]any `json:"config,omitempty"` Metadata map[string]any `json:"metadata,omitempty"` CreatedAt time.Time `json:"created_at"` UpdatedAt time.Time `json:"updated_at"` diff --git a/internal/registry/registry.go b/internal/registry/registry.go index 52c529f8..4e82403a 100644 --- a/internal/registry/registry.go +++ b/internal/registry/registry.go @@ -17,7 +17,8 @@ import ( // Load reads all .yaml / .yml files from dir and returns parsed provider // definitions. It returns nil (no error) when the directory does not exist. -func Load(dir string) ([]ProviderDefinition, error) { +// Malformed files are skipped with a warning logged via log. +func Load(log *slog.Logger, dir string) ([]ProviderDefinition, error) { entries, err := os.ReadDir(dir) if err != nil { if os.IsNotExist(err) { @@ -42,7 +43,9 @@ func Load(dir string) ([]ProviderDefinition, error) { } var def ProviderDefinition if err := yaml.Unmarshal(data, &def); err != nil { - return nil, fmt.Errorf("parse %s: %w", path, err) + log.Warn("registry: skipping malformed provider file", + slog.String("path", path), slog.Any("error", err)) + continue } if def.Name == "" { continue @@ -63,11 +66,25 @@ func Sync(ctx context.Context, logger *slog.Logger, queries *sqlc.Queries, defs icon = pgtype.Text{String: def.Icon, Valid: true} } + providerCfg := make(map[string]any) + for k, v := range def.Config { + providerCfg[k] = v + } + if def.BaseURL != "" { + providerCfg["base_url"] = def.BaseURL + } + providerConfigJSON, err := json.Marshal(providerCfg) + if err != nil { + logger.Warn("registry: failed to marshal provider config", + slog.String("name", def.Name), slog.Any("error", err)) + continue + } + provider, err := queries.UpsertRegistryProvider(ctx, sqlc.UpsertRegistryProviderParams{ Name: def.Name, - BaseUrl: def.BaseURL, ClientType: def.ClientType, Icon: icon, + Config: providerConfigJSON, }) if err != nil { logger.Warn("registry: failed to upsert provider", slog.String("name", def.Name), slog.Any("error", err)) @@ -93,11 +110,11 @@ func Sync(ctx context.Context, logger *slog.Logger, queries *sqlc.Queries, defs } _, err = queries.UpsertRegistryModel(ctx, sqlc.UpsertRegistryModelParams{ - ModelID: m.ModelID, - Name: name, - LlmProviderID: provider.ID, - Type: typ, - Config: configJSON, + ModelID: m.ModelID, + Name: name, + ProviderID: provider.ID, + Type: typ, + Config: configJSON, }) if err != nil { logger.Warn("registry: failed to upsert model", diff --git a/internal/registry/types.go b/internal/registry/types.go index 1ed7eb66..bd62ede4 100644 --- a/internal/registry/types.go +++ b/internal/registry/types.go @@ -5,21 +5,15 @@ type ProviderDefinition struct { Name string `yaml:"name"` ClientType string `yaml:"client_type"` Icon string `yaml:"icon,omitempty"` - BaseURL string `yaml:"base_url"` + BaseURL string `yaml:"base_url,omitempty"` + Config map[string]any `yaml:"config,omitempty"` Models []ModelDefinition `yaml:"models"` } // ModelDefinition describes a model within a provider definition. type ModelDefinition struct { - ModelID string `yaml:"model_id"` - Name string `yaml:"name"` - Type string `yaml:"type"` - Config ModelConfig `yaml:"config"` -} - -// ModelConfig mirrors the JSONB config stored per model. -type ModelConfig struct { - Dimensions *int `yaml:"dimensions,omitempty" json:"dimensions,omitempty"` - Compatibilities []string `yaml:"compatibilities,omitempty" json:"compatibilities,omitempty"` - ContextWindow *int `yaml:"context_window,omitempty" json:"context_window,omitempty"` + ModelID string `yaml:"model_id"` + Name string `yaml:"name"` + Type string `yaml:"type"` + Config map[string]any `yaml:"config"` } diff --git a/internal/tts/service.go b/internal/tts/service.go index 284b4903..afda9370 100644 --- a/internal/tts/service.go +++ b/internal/tts/service.go @@ -9,8 +9,6 @@ import ( "log/slog" "strings" - "github.com/jackc/pgx/v5/pgtype" - "github.com/memohai/memoh/internal/db" "github.com/memohai/memoh/internal/db/sqlc" ) @@ -36,332 +34,63 @@ func (s *Service) ListMeta(_ context.Context) []ProviderMetaResponse { } // --------------------------------------------------------------------------- -// Provider CRUD +// Read helpers (speech-filtered views of unified tables) // --------------------------------------------------------------------------- -func (s *Service) CreateProvider(ctx context.Context, req CreateProviderRequest) (ProviderResponse, error) { - adapter, err := s.registry.Get(req.Provider) +// ListSpeechProviders returns providers with speech client types. +func (s *Service) ListSpeechProviders(ctx context.Context) ([]SpeechProviderResponse, error) { + rows, err := s.queries.ListSpeechProviders(ctx) if err != nil { - return ProviderResponse{}, fmt.Errorf("unsupported provider: %s", req.Provider) + return nil, fmt.Errorf("list speech providers: %w", err) } - row, err := s.queries.CreateTtsProvider(ctx, sqlc.CreateTtsProviderParams{ - Name: strings.TrimSpace(req.Name), - Provider: string(req.Provider), - Config: []byte("{}"), - Enable: false, - }) - if err != nil { - return ProviderResponse{}, fmt.Errorf("create tts provider: %w", err) - } - - if importErr := s.importModelsForProvider(ctx, row.ID, adapter); importErr != nil { - s.logger.Warn("auto-import models failed", slog.String("provider_id", row.ID.String()), slog.Any("error", importErr)) - } - - return s.toProviderResponse(row), nil -} - -func (s *Service) GetProvider(ctx context.Context, id string) (ProviderResponse, error) { - pgID, err := db.ParseUUID(id) - if err != nil { - return ProviderResponse{}, err - } - row, err := s.queries.GetTtsProviderByID(ctx, pgID) - if err != nil { - return ProviderResponse{}, fmt.Errorf("get tts provider: %w", err) - } - return s.toProviderResponse(row), nil -} - -func (s *Service) ListProviders(ctx context.Context, provider string) ([]ProviderResponse, error) { - provider = strings.TrimSpace(provider) - var ( - rows []sqlc.TtsProvider - err error - ) - if provider == "" { - rows, err = s.queries.ListTtsProviders(ctx) - } else { - rows, err = s.queries.ListTtsProvidersByProvider(ctx, provider) - } - if err != nil { - return nil, fmt.Errorf("list tts providers: %w", err) - } - items := make([]ProviderResponse, 0, len(rows)) + items := make([]SpeechProviderResponse, 0, len(rows)) for _, row := range rows { - items = append(items, s.toProviderResponse(row)) + items = append(items, toSpeechProviderResponse(row)) } return items, nil } -func (s *Service) UpdateProvider(ctx context.Context, id string, req UpdateProviderRequest) (ProviderResponse, error) { - pgID, err := db.ParseUUID(id) +// ListSpeechModels returns all speech-type models. +func (s *Service) ListSpeechModels(ctx context.Context) ([]SpeechModelResponse, error) { + rows, err := s.queries.ListSpeechModels(ctx) if err != nil { - return ProviderResponse{}, err + return nil, fmt.Errorf("list speech models: %w", err) } - current, err := s.queries.GetTtsProviderByID(ctx, pgID) - if err != nil { - return ProviderResponse{}, fmt.Errorf("get tts provider: %w", err) - } - name := current.Name - if req.Name != nil { - name = strings.TrimSpace(*req.Name) - } - enable := current.Enable - if req.Enable != nil { - enable = *req.Enable - } - updated, err := s.queries.UpdateTtsProvider(ctx, sqlc.UpdateTtsProviderParams{ - ID: pgID, - Name: name, - Provider: current.Provider, - Config: current.Config, - Enable: enable, - }) - if err != nil { - return ProviderResponse{}, fmt.Errorf("update tts provider: %w", err) - } - return s.toProviderResponse(updated), nil -} - -func (s *Service) DeleteProvider(ctx context.Context, id string) error { - pgID, err := db.ParseUUID(id) - if err != nil { - return err - } - return s.queries.DeleteTtsProvider(ctx, pgID) -} - -// EnsureDefaults creates a default TTS provider for each registered adapter -// type that does not yet exist in the database. -func (s *Service) EnsureDefaults(ctx context.Context) error { - rows, err := s.queries.ListTtsProviders(ctx) - if err != nil { - return fmt.Errorf("list tts providers: %w", err) - } - existing := make(map[string]struct{}, len(rows)) + items := make([]SpeechModelResponse, 0, len(rows)) for _, row := range rows { - existing[row.Provider] = struct{}{} + items = append(items, toSpeechModelFromListRow(row)) } - - for _, meta := range s.registry.ListMeta() { - if _, ok := existing[meta.Provider]; ok { - continue - } - adapter, adapterErr := s.registry.Get(TtsType(meta.Provider)) - if adapterErr != nil { - continue - } - row, createErr := s.queries.CreateTtsProvider(ctx, sqlc.CreateTtsProviderParams{ - Name: meta.DisplayName, - Provider: meta.Provider, - Config: []byte("{}"), - Enable: false, - }) - if createErr != nil { - s.logger.Warn("failed to create default tts provider", - slog.String("provider", meta.Provider), - slog.Any("error", createErr), - ) - continue - } - if importErr := s.importModelsForProvider(ctx, row.ID, adapter); importErr != nil { - s.logger.Warn("auto-import models failed for default tts provider", - slog.String("provider", meta.Provider), - slog.Any("error", importErr), - ) - } - s.logger.Info("created default tts provider", slog.String("provider", meta.Provider)) - } - return nil + return items, nil } -// --------------------------------------------------------------------------- -// Model CRUD -// --------------------------------------------------------------------------- - -func (s *Service) CreateModel(ctx context.Context, req CreateModelRequest) (ModelResponse, error) { - modelID := strings.TrimSpace(req.ModelID) - if modelID == "" { - return ModelResponse{}, errors.New("model_id is required") - } - providerPgID, err := db.ParseUUID(req.TtsProviderID) - if err != nil { - return ModelResponse{}, fmt.Errorf("invalid tts_provider_id: %w", err) - } - provider, err := s.queries.GetTtsProviderByID(ctx, providerPgID) - if err != nil { - return ModelResponse{}, fmt.Errorf("get tts provider: %w", err) - } - cfgJSON := []byte("{}") - if req.Config != nil { - cfgJSON, err = json.Marshal(req.Config) - if err != nil { - return ModelResponse{}, fmt.Errorf("marshal config: %w", err) - } - } - name := pgtype.Text{} - if n := strings.TrimSpace(req.Name); n != "" { - name = pgtype.Text{String: n, Valid: true} - } - row, err := s.queries.CreateTtsModel(ctx, sqlc.CreateTtsModelParams{ - ModelID: modelID, - Name: name, - TtsProviderID: providerPgID, - Config: cfgJSON, - }) - if err != nil { - return ModelResponse{}, fmt.Errorf("create tts model: %w", err) - } - return s.toModelResponse(row, provider.Provider), nil -} - -func (s *Service) ListModelsByProvider(ctx context.Context, providerID string) ([]ModelResponse, error) { +// ListSpeechModelsByProvider returns speech models for a given provider. +func (s *Service) ListSpeechModelsByProvider(ctx context.Context, providerID string) ([]SpeechModelResponse, error) { pgID, err := db.ParseUUID(providerID) if err != nil { return nil, err } - provider, err := s.queries.GetTtsProviderByID(ctx, pgID) + rows, err := s.queries.ListSpeechModelsByProviderID(ctx, pgID) if err != nil { - return nil, fmt.Errorf("get tts provider: %w", err) + return nil, fmt.Errorf("list speech models by provider: %w", err) } - rows, err := s.queries.ListTtsModelsByProviderID(ctx, pgID) - if err != nil { - return nil, fmt.Errorf("list tts models: %w", err) - } - items := make([]ModelResponse, 0, len(rows)) + items := make([]SpeechModelResponse, 0, len(rows)) for _, row := range rows { - items = append(items, s.toModelResponse(row, provider.Provider)) + items = append(items, toSpeechModelFromModel(row, "")) } return items, nil } -func (s *Service) ListAllModels(ctx context.Context) ([]ModelResponse, error) { - rows, err := s.queries.ListTtsModels(ctx) - if err != nil { - return nil, fmt.Errorf("list tts models: %w", err) - } - providerCache := make(map[string]string) - items := make([]ModelResponse, 0, len(rows)) - for _, row := range rows { - providerType, ok := providerCache[row.TtsProviderID.String()] - if !ok { - p, pErr := s.queries.GetTtsProviderByID(ctx, row.TtsProviderID) - if pErr != nil { - providerType = "" - } else { - providerType = p.Provider - } - providerCache[row.TtsProviderID.String()] = providerType - } - items = append(items, s.toModelResponse(row, providerType)) - } - return items, nil -} - -func (s *Service) GetModel(ctx context.Context, id string) (ModelResponse, error) { +// GetSpeechModel returns a speech model by ID. +func (s *Service) GetSpeechModel(ctx context.Context, id string) (SpeechModelResponse, error) { pgID, err := db.ParseUUID(id) if err != nil { - return ModelResponse{}, err + return SpeechModelResponse{}, err } - row, err := s.queries.GetTtsModelWithProvider(ctx, pgID) + row, err := s.queries.GetSpeechModelWithProvider(ctx, pgID) if err != nil { - return ModelResponse{}, fmt.Errorf("get tts model: %w", err) + return SpeechModelResponse{}, fmt.Errorf("get speech model: %w", err) } - return s.toModelWithProviderResponse(row), nil -} - -func (s *Service) UpdateModel(ctx context.Context, id string, req UpdateModelRequest) (ModelResponse, error) { - pgID, err := db.ParseUUID(id) - if err != nil { - return ModelResponse{}, err - } - current, err := s.queries.GetTtsModelByID(ctx, pgID) - if err != nil { - return ModelResponse{}, fmt.Errorf("get tts model: %w", err) - } - name := current.Name - if req.Name != nil { - name = pgtype.Text{String: strings.TrimSpace(*req.Name), Valid: true} - } - config := current.Config - if req.Config != nil { - configJSON, marshalErr := json.Marshal(req.Config) - if marshalErr != nil { - return ModelResponse{}, fmt.Errorf("marshal config: %w", marshalErr) - } - config = configJSON - } - updated, err := s.queries.UpdateTtsModel(ctx, sqlc.UpdateTtsModelParams{ - ID: pgID, - Name: name, - Config: config, - }) - if err != nil { - return ModelResponse{}, fmt.Errorf("update tts model: %w", err) - } - provider, _ := s.queries.GetTtsProviderByID(ctx, updated.TtsProviderID) - return s.toModelResponse(updated, provider.Provider), nil -} - -func (s *Service) DeleteModel(ctx context.Context, id string) error { - pgID, err := db.ParseUUID(id) - if err != nil { - return err - } - return s.queries.DeleteTtsModel(ctx, pgID) -} - -// ImportModels discovers models from the adapter and upserts them into the database. -func (s *Service) ImportModels(ctx context.Context, providerID string) ([]ModelResponse, error) { - pgID, err := db.ParseUUID(providerID) - if err != nil { - return nil, err - } - provider, err := s.queries.GetTtsProviderByID(ctx, pgID) - if err != nil { - return nil, fmt.Errorf("get tts provider: %w", err) - } - adapter, err := s.registry.Get(TtsType(provider.Provider)) - if err != nil { - return nil, fmt.Errorf("unsupported provider: %s", provider.Provider) - } - if importErr := s.importModelsForProvider(ctx, pgID, adapter); importErr != nil { - return nil, importErr - } - return s.ListModelsByProvider(ctx, providerID) -} - -func (s *Service) importModelsForProvider(ctx context.Context, providerID pgtype.UUID, adapter TtsAdapter) error { - models := adapter.Models() - for _, m := range models { - existing, err := s.queries.GetTtsModelByProviderAndModelID(ctx, sqlc.GetTtsModelByProviderAndModelIDParams{ - TtsProviderID: providerID, - ModelID: m.ID, - }) - name := pgtype.Text{String: m.Name, Valid: m.Name != ""} - if err == nil { - _, updateErr := s.queries.UpdateTtsModel(ctx, sqlc.UpdateTtsModelParams{ - ID: existing.ID, - Name: name, - Config: existing.Config, - }) - if updateErr != nil { - return fmt.Errorf("update tts model %s: %w", m.ID, updateErr) - } - } else { - _, createErr := s.queries.CreateTtsModel(ctx, sqlc.CreateTtsModelParams{ - ModelID: m.ID, - Name: name, - TtsProviderID: providerID, - Config: []byte("{}"), - }) - if createErr != nil { - return fmt.Errorf("create tts model %s: %w", m.ID, createErr) - } - } - } - return nil + return toSpeechModelWithProviderResponse(row), nil } // --------------------------------------------------------------------------- @@ -375,22 +104,17 @@ func (s *Service) Synthesize(ctx context.Context, modelID string, text string, o if err != nil { return nil, "", err } - modelRow, err := s.queries.GetTtsModelWithProvider(ctx, pgID) + modelRow, err := s.queries.GetSpeechModelWithProvider(ctx, pgID) if err != nil { - return nil, "", fmt.Errorf("get tts model: %w", err) + return nil, "", fmt.Errorf("get speech model: %w", err) } - adapter, err := s.registry.Get(TtsType(modelRow.ProviderType)) + adapterType := clientTypeToTtsType(modelRow.ProviderType) + adapter, err := s.registry.Get(adapterType) if err != nil { return nil, "", fmt.Errorf("unsupported provider: %s", modelRow.ProviderType) } - var savedCfg map[string]any - if len(modelRow.Config) > 0 { - _ = json.Unmarshal(modelRow.Config, &savedCfg) - } - if savedCfg == nil { - savedCfg = make(map[string]any) - } + savedCfg := parseModelConfig(modelRow.Config) for k, v := range overrideCfg { savedCfg[k] = v } @@ -417,23 +141,17 @@ func (s *Service) StreamToFile(ctx context.Context, modelID string, text string, if err != nil { return "", err } - modelRow, err := s.queries.GetTtsModelWithProvider(ctx, pgID) + modelRow, err := s.queries.GetSpeechModelWithProvider(ctx, pgID) if err != nil { - return "", fmt.Errorf("get tts model: %w", err) + return "", fmt.Errorf("get speech model: %w", err) } - adapter, err := s.registry.Get(TtsType(modelRow.ProviderType)) + adapterType := clientTypeToTtsType(modelRow.ProviderType) + adapter, err := s.registry.Get(adapterType) if err != nil { return "", fmt.Errorf("unsupported provider: %s", modelRow.ProviderType) } - var savedCfg map[string]any - if len(modelRow.Config) > 0 { - _ = json.Unmarshal(modelRow.Config, &savedCfg) - } - if savedCfg == nil { - savedCfg = make(map[string]any) - } - + savedCfg := parseModelConfig(modelRow.Config) audioCfg := buildAudioConfig(savedCfg) if err := audioCfg.Validate(); err != nil { return "", fmt.Errorf("invalid audio config: %w", err) @@ -463,7 +181,7 @@ func (s *Service) StreamToFile(ctx context.Context, modelID string, text string, } // --------------------------------------------------------------------------- -// Helpers +// Capabilities // --------------------------------------------------------------------------- // GetModelCapabilities returns the adapter-level capabilities for a stored model. @@ -472,11 +190,12 @@ func (s *Service) GetModelCapabilities(ctx context.Context, modelID string) (*Mo if err != nil { return nil, err } - modelRow, err := s.queries.GetTtsModelWithProvider(ctx, pgID) + modelRow, err := s.queries.GetSpeechModelWithProvider(ctx, pgID) if err != nil { - return nil, fmt.Errorf("get tts model: %w", err) + return nil, fmt.Errorf("get speech model: %w", err) } - adapter, err := s.registry.Get(TtsType(modelRow.ProviderType)) + adapterType := clientTypeToTtsType(modelRow.ProviderType) + adapter, err := s.registry.Get(adapterType) if err != nil { return nil, fmt.Errorf("unsupported provider: %s", modelRow.ProviderType) } @@ -488,6 +207,34 @@ func (s *Service) GetModelCapabilities(ctx context.Context, modelID string) (*Mo return nil, fmt.Errorf("model %s not found in adapter", modelRow.ModelID) } +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +// clientTypeToTtsType maps the unified client_type to the TTS adapter type. +func clientTypeToTtsType(clientType string) TtsType { + switch clientType { + case "edge-speech": + return "edge" + default: + return TtsType(clientType) + } +} + +func parseModelConfig(raw []byte) map[string]any { + if len(raw) == 0 { + return make(map[string]any) + } + var cfg map[string]any + if err := json.Unmarshal(raw, &cfg); err != nil { + return make(map[string]any) + } + if cfg == nil { + return make(map[string]any) + } + return cfg +} + func buildAudioConfig(cfg map[string]any) AudioConfig { ac := AudioConfig{} if voice, ok := cfg["voice"].(map[string]any); ok { @@ -545,59 +292,76 @@ func resolveContentType(format string) string { } } -func (*Service) toProviderResponse(row sqlc.TtsProvider) ProviderResponse { - return ProviderResponse{ - ID: row.ID.String(), - Name: row.Name, - Provider: row.Provider, - Enable: row.Enable, - CreatedAt: row.CreatedAt.Time, - UpdatedAt: row.UpdatedAt.Time, +func toSpeechProviderResponse(row sqlc.Provider) SpeechProviderResponse { + return SpeechProviderResponse{ + ID: row.ID.String(), + Name: row.Name, + ClientType: row.ClientType, + Enable: row.Enable, + CreatedAt: row.CreatedAt.Time, + UpdatedAt: row.UpdatedAt.Time, } } -func (s *Service) toModelResponse(row sqlc.TtsModel, providerType string) ModelResponse { +func toSpeechModelFromListRow(row sqlc.ListSpeechModelsRow) SpeechModelResponse { var cfg map[string]any if len(row.Config) > 0 { - if err := json.Unmarshal(row.Config, &cfg); err != nil { - s.logger.Warn("tts model config unmarshal failed", slog.String("id", row.ID.String()), slog.Any("error", err)) - } + _ = json.Unmarshal(row.Config, &cfg) } name := "" if row.Name.Valid { name = row.Name.String } - return ModelResponse{ - ID: row.ID.String(), - ModelID: row.ModelID, - Name: name, - TtsProviderID: row.TtsProviderID.String(), - ProviderType: providerType, - Config: cfg, - CreatedAt: row.CreatedAt.Time, - UpdatedAt: row.UpdatedAt.Time, + return SpeechModelResponse{ + ID: row.ID.String(), + ModelID: row.ModelID, + Name: name, + ProviderID: row.ProviderID.String(), + ProviderType: row.ProviderType, + Config: cfg, + CreatedAt: row.CreatedAt.Time, + UpdatedAt: row.UpdatedAt.Time, } } -func (s *Service) toModelWithProviderResponse(row sqlc.GetTtsModelWithProviderRow) ModelResponse { +func toSpeechModelFromModel(row sqlc.Model, providerType string) SpeechModelResponse { var cfg map[string]any if len(row.Config) > 0 { - if err := json.Unmarshal(row.Config, &cfg); err != nil { - s.logger.Warn("tts model config unmarshal failed", slog.String("id", row.ID.String()), slog.Any("error", err)) - } + _ = json.Unmarshal(row.Config, &cfg) } name := "" if row.Name.Valid { name = row.Name.String } - return ModelResponse{ - ID: row.ID.String(), - ModelID: row.ModelID, - Name: name, - TtsProviderID: row.TtsProviderID.String(), - ProviderType: row.ProviderType, - Config: cfg, - CreatedAt: row.CreatedAt.Time, - UpdatedAt: row.UpdatedAt.Time, + return SpeechModelResponse{ + ID: row.ID.String(), + ModelID: row.ModelID, + Name: name, + ProviderID: row.ProviderID.String(), + ProviderType: providerType, + Config: cfg, + CreatedAt: row.CreatedAt.Time, + UpdatedAt: row.UpdatedAt.Time, + } +} + +func toSpeechModelWithProviderResponse(row sqlc.GetSpeechModelWithProviderRow) SpeechModelResponse { + var cfg map[string]any + if len(row.Config) > 0 { + _ = json.Unmarshal(row.Config, &cfg) + } + name := "" + if row.Name.Valid { + name = row.Name.String + } + return SpeechModelResponse{ + ID: row.ID.String(), + ModelID: row.ModelID, + Name: name, + ProviderID: row.ProviderID.String(), + ProviderType: row.ProviderType, + Config: cfg, + CreatedAt: row.CreatedAt.Time, + UpdatedAt: row.UpdatedAt.Time, } } diff --git a/internal/tts/types.go b/internal/tts/types.go index c0239269..461b1bd8 100644 --- a/internal/tts/types.go +++ b/internal/tts/types.go @@ -2,27 +2,7 @@ package tts import "time" -// --- Provider types --- - -type CreateProviderRequest struct { - Name string `json:"name"` - Provider TtsType `json:"provider"` -} - -type UpdateProviderRequest struct { - Name *string `json:"name,omitempty"` - Enable *bool `json:"enable,omitempty"` -} - -type ProviderResponse struct { - ID string `json:"id"` - Name string `json:"name"` - Provider string `json:"provider"` - Enable bool `json:"enable"` - CreatedAt time.Time `json:"created_at"` - UpdatedAt time.Time `json:"updated_at"` -} - +// ProviderMetaResponse exposes adapter metadata (from the registry, not DB). type ProviderMetaResponse struct { Provider string `json:"provider"` DisplayName string `json:"display_name"` @@ -31,33 +11,41 @@ type ProviderMetaResponse struct { Models []ModelInfo `json:"models"` } -// --- Model types --- - -type ModelResponse struct { - ID string `json:"id"` - ModelID string `json:"model_id"` - Name string `json:"name"` - TtsProviderID string `json:"tts_provider_id"` - ProviderType string `json:"provider_type,omitempty"` - Config map[string]any `json:"config,omitempty"` - CreatedAt time.Time `json:"created_at"` - UpdatedAt time.Time `json:"updated_at"` +// SpeechProviderResponse represents a speech-capable provider from the unified providers table. +type SpeechProviderResponse struct { + ID string `json:"id"` + Name string `json:"name"` + ClientType string `json:"client_type"` + Enable bool `json:"enable"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` } -type CreateModelRequest struct { - ModelID string `json:"model_id"` - Name string `json:"name"` - TtsProviderID string `json:"tts_provider_id"` - Config map[string]any `json:"config,omitempty"` +// SpeechModelResponse represents a speech model from the unified models table. +type SpeechModelResponse struct { + ID string `json:"id"` + ModelID string `json:"model_id"` + Name string `json:"name"` + ProviderID string `json:"provider_id"` + ProviderType string `json:"provider_type,omitempty"` + Config map[string]any `json:"config,omitempty"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` } -type UpdateModelRequest struct { +// UpdateSpeechProviderRequest is used for updating a speech provider. +type UpdateSpeechProviderRequest struct { + Name *string `json:"name,omitempty"` + Enable *bool `json:"enable,omitempty"` +} + +// UpdateSpeechModelRequest is used for updating a speech model. +type UpdateSpeechModelRequest struct { Name *string `json:"name,omitempty"` Config map[string]any `json:"config,omitempty"` } -// --- Synthesis types --- - +// TestSynthesizeRequest represents a text-to-speech test request. type TestSynthesizeRequest struct { Text string `json:"text"` Config map[string]any `json:"config,omitempty"` diff --git a/packages/icons/src/icons/Brave.vue b/packages/icons/src/icons/Brave.vue index f47bb4cf..2a5ef408 100644 --- a/packages/icons/src/icons/Brave.vue +++ b/packages/icons/src/icons/Brave.vue @@ -5,7 +5,23 @@ :height="size" viewBox="0 0 1024 1024" v-bind="$attrs" - > + >