diff --git a/agent/src/agent.ts b/agent/src/agent.ts index 10c0ff2a..d1b12287 100644 --- a/agent/src/agent.ts +++ b/agent/src/agent.ts @@ -16,6 +16,7 @@ import { MCPConnection, Schedule, } from './types' +import { ModelInput, hasInputModality } from './types/model' import { system, schedule, user, subagentSystem } from './prompts' import { AuthFetcher } from './index' import { createModel } from './model' @@ -26,7 +27,8 @@ import { dedupeAttachments, AttachmentsStreamExtractor, } from './utils/attachments' -import type { ContainerFileAttachment } from './types/attachment' +import type { ContainerFileAttachment, ImageAttachment } from './types/attachment' +import { readFileSync } from 'fs' import { getMCPTools } from './tools/mcp' import { getTools } from './tools' import { buildIdentityHeaders } from './utils/headers' @@ -167,27 +169,59 @@ export const createAgent = ( } const generateUserPrompt = (input: AgentInput) => { - const images = input.attachments.filter( - (attachment) => attachment.type === 'image', - ) - const files = input.attachments.filter( + const supportsImage = hasInputModality(modelConfig, ModelInput.Image) + + // Separate attachments by model capability: native images vs fallback file paths. + const nativeImages = supportsImage + ? input.attachments.filter((a) => a.type === 'image') + : [] + const fallbackFiles = input.attachments.filter( (a): a is ContainerFileAttachment => a.type === 'file', ) + // Images the model cannot handle natively are mentioned as path references. + const unsupportedImages: ContainerFileAttachment[] = supportsImage + ? [] + : input.attachments + .filter((a) => a.type === 'image') + .map((a) => ({ + type: 'file' as const, + path: String((a as ImageAttachment).path || a.metadata?.path || '[image]'), + metadata: a.metadata, + })) + const allFiles: ContainerFileAttachment[] = [...fallbackFiles, ...unsupportedImages] + const text = user(input.query, { channelIdentityId: identity.channelIdentityId || identity.contactId || '', displayName: identity.displayName || identity.contactName || 'User', channel: currentChannel, conversationType: identity.conversationType || 'direct', date: new Date(), - attachments: files, + attachments: allFiles, }) + const imageParts: ImagePart[] = nativeImages.map((image) => { + const img = image as ImageAttachment + if (img.base64) { + return { type: 'image', image: img.base64 } as ImagePart + } + if (img.path) { + try { + const data = readFileSync(img.path) + const mime = img.mime || 'image/png' + return { type: 'image', image: `data:${mime};base64,${data.toString('base64')}` } as ImagePart + } catch { + return { type: 'image', image: '' } as ImagePart + } + } + if (img.url) { + return { type: 'image', image: img.url } as ImagePart + } + return { type: 'image', image: '' } as ImagePart + }).filter((p) => p.image !== '') const userMessage: UserModelMessage = { role: 'user', content: [ { type: 'text', text }, - ...images.map( - (image) => ({ type: 'image', image: image.base64 }) as ImagePart, - ), + ...imageParts, ], } return userMessage @@ -461,9 +495,12 @@ export const createAgent = ( break case 'file': yield { - type: 'image_delta', - image: chunk.file.base64, - metadata: chunk, + type: 'attachment_delta', + attachments: [{ + type: 'image', + url: `data:${chunk.file.mediaType ?? 'image/png'};base64,${chunk.file.base64}`, + mime: chunk.file.mediaType ?? 'image/png', + }], } } } diff --git a/agent/src/models.ts b/agent/src/models.ts index 999762a6..a8c03b5d 100644 --- a/agent/src/models.ts +++ b/agent/src/models.ts @@ -16,7 +16,7 @@ export const ClientTypeModel = z.enum([ export const ModelConfigModel = z.object({ modelId: z.string().min(1, 'Model ID is required'), clientType: ClientTypeModel, - input: z.array(z.enum(['text', 'image'])), + input: z.array(z.enum(['text', 'image', 'audio', 'video', 'file'])), apiKey: z.string().min(1, 'API key is required'), baseUrl: z.string(), }) @@ -49,7 +49,10 @@ export const ScheduleModel = z.object({ export const ImageAttachmentModel = z.object({ type: z.literal('image'), - base64: z.string().min(1, 'Image base64 is required'), + base64: z.string().optional(), + path: z.string().optional(), + mime: z.string().optional(), + name: z.string().optional(), metadata: z.record(z.string(), z.any()).optional(), }) diff --git a/agent/src/types/action.ts b/agent/src/types/action.ts index 8cb53b49..2c123307 100644 --- a/agent/src/types/action.ts +++ b/agent/src/types/action.ts @@ -39,11 +39,6 @@ export interface AttachmentDeltaAction extends BaseAction { attachments: AgentAttachment[] } -export interface ImageDeltaAction extends BaseAction { - type: 'image_delta' - image: string -} - export interface TextEndAction extends BaseAction { type: 'text_end' } @@ -79,7 +74,6 @@ export type AgentAction = | TextStartAction | TextDeltaAction | AttachmentDeltaAction - | ImageDeltaAction | TextEndAction | ToolCallStartAction | ToolCallEndAction diff --git a/agent/src/types/attachment.ts b/agent/src/types/attachment.ts index ea6df047..961bdd0b 100644 --- a/agent/src/types/attachment.ts +++ b/agent/src/types/attachment.ts @@ -1,11 +1,17 @@ export interface BaseAgentAttachment { type: string + url?: string + name?: string + mime?: string + asset_id?: string metadata?: Record } export interface ImageAttachment extends BaseAgentAttachment { type: 'image' - base64: string + base64?: string + url?: string + path?: string } export interface ContainerFileAttachment extends BaseAgentAttachment { diff --git a/agent/src/types/model.ts b/agent/src/types/model.ts index 7de45ac6..937070ef 100644 --- a/agent/src/types/model.ts +++ b/agent/src/types/model.ts @@ -14,6 +14,9 @@ export enum ClientType { export enum ModelInput { Text = 'text', Image = 'image', + Audio = 'audio', + Video = 'video', + File = 'file', } export interface ModelConfig { @@ -22,4 +25,7 @@ export interface ModelConfig { modelId: string clientType: ClientType input: ModelInput[] -} \ No newline at end of file +} + +export const hasInputModality = (config: ModelConfig, modality: ModelInput): boolean => + config.input.includes(modality) \ No newline at end of file diff --git a/agent/src/utils/attachments.ts b/agent/src/utils/attachments.ts index faadff5e..69876f4a 100644 --- a/agent/src/utils/attachments.ts +++ b/agent/src/utils/attachments.ts @@ -10,7 +10,7 @@ const ATTACHMENTS_END = '' const getAttachmentKey = (a: AgentAttachment): string => { switch (a.type) { case 'file': return `file:${a.path}` - case 'image': return `image:${a.base64.slice(0, 64)}` + case 'image': return `image:${(a.base64 ?? a.url ?? '').slice(0, 64)}` } } diff --git a/cmd/agent/main.go b/cmd/agent/main.go index b0656bf4..18ed4b77 100644 --- a/cmd/agent/main.go +++ b/cmd/agent/main.go @@ -36,6 +36,9 @@ import ( dbsqlc "github.com/memohai/memoh/internal/db/sqlc" "github.com/memohai/memoh/internal/embeddings" "github.com/memohai/memoh/internal/handlers" + "github.com/memohai/memoh/internal/healthcheck" + channelchecker "github.com/memohai/memoh/internal/healthcheck/checkers/channel" + mcpchecker "github.com/memohai/memoh/internal/healthcheck/checkers/mcp" "github.com/memohai/memoh/internal/logger" "github.com/memohai/memoh/internal/mcp" mcpcontainer "github.com/memohai/memoh/internal/mcp/providers/container" @@ -45,6 +48,8 @@ import ( mcpschedule "github.com/memohai/memoh/internal/mcp/providers/schedule" mcpweb "github.com/memohai/memoh/internal/mcp/providers/web" mcpfederation "github.com/memohai/memoh/internal/mcp/sources/federation" + "github.com/memohai/memoh/internal/media" + "github.com/memohai/memoh/internal/media/providers/containerfs" "github.com/memohai/memoh/internal/memory" "github.com/memohai/memoh/internal/message" "github.com/memohai/memoh/internal/message/event" @@ -102,13 +107,15 @@ func main() { // services requiring provide functions provideRouteService, provideMessageService, + provideMediaService, // channel infrastructure local.NewRouteHub, provideChannelRegistry, - channel.NewService, + channel.NewStore, provideChannelRouter, provideChannelManager, + provideChannelLifecycleService, // conversation flow provideChatResolver, @@ -328,18 +335,37 @@ func provideChannelRegistry(log *slog.Logger, hub *local.RouteHub) *channel.Regi return registry } -func provideChannelRouter(log *slog.Logger, registry *channel.Registry, routeService *route.DBService, msgService *message.DBService, resolver *flow.Resolver, identityService *identities.Service, botService *bots.Service, policyService *policy.Service, preauthService *preauth.Service, bindService *bind.Service, rc *boot.RuntimeConfig) *inbound.ChannelInboundProcessor { - return inbound.NewChannelInboundProcessor(log, registry, routeService, msgService, resolver, identityService, botService, policyService, preauthService, bindService, rc.JwtSecret, 5*time.Minute) +func provideChannelRouter( + log *slog.Logger, + registry *channel.Registry, + routeService *route.DBService, + msgService *message.DBService, + resolver *flow.Resolver, + identityService *identities.Service, + botService *bots.Service, + policyService *policy.Service, + preauthService *preauth.Service, + bindService *bind.Service, + mediaService *media.Service, + rc *boot.RuntimeConfig, +) *inbound.ChannelInboundProcessor { + processor := inbound.NewChannelInboundProcessor(log, registry, routeService, msgService, resolver, identityService, botService, policyService, preauthService, bindService, rc.JwtSecret, 5*time.Minute) + processor.SetMediaService(mediaService) + return processor } -func provideChannelManager(log *slog.Logger, registry *channel.Registry, channelService *channel.Service, channelRouter *inbound.ChannelInboundProcessor) *channel.Manager { - mgr := channel.NewManager(log, registry, channelService, channelRouter) +func provideChannelManager(log *slog.Logger, registry *channel.Registry, channelStore *channel.Store, channelRouter *inbound.ChannelInboundProcessor) *channel.Manager { + mgr := channel.NewManager(log, registry, channelStore, channelRouter) if mw := channelRouter.IdentityMiddleware(); mw != nil { mgr.Use(mw) } return mgr } +func provideChannelLifecycleService(channelStore *channel.Store, channelManager *channel.Manager) *channel.Lifecycle { + return channel.NewLifecycle(channelStore, channelManager) +} + // --------------------------------------------------------------------------- // containerd handler & tool gateway // --------------------------------------------------------------------------- @@ -348,9 +374,9 @@ func provideContainerdHandler(log *slog.Logger, service ctr.Service, cfg config. return handlers.NewContainerdHandler(log, service, cfg.MCP, cfg.Containerd.Namespace, botService, accountService, policyService, queries) } -func provideToolGatewayService(log *slog.Logger, cfg config.Config, channelManager *channel.Manager, registry *channel.Registry, channelService *channel.Service, scheduleService *schedule.Service, memoryService *memory.Service, chatService *conversation.Service, accountService *accounts.Service, settingsService *settings.Service, searchProviderService *searchproviders.Service, manager *mcp.Manager, containerdHandler *handlers.ContainerdHandler, mcpConnService *mcp.ConnectionService) *mcp.ToolGatewayService { +func provideToolGatewayService(log *slog.Logger, cfg config.Config, channelManager *channel.Manager, registry *channel.Registry, channelStore *channel.Store, scheduleService *schedule.Service, memoryService *memory.Service, chatService *conversation.Service, accountService *accounts.Service, settingsService *settings.Service, searchProviderService *searchproviders.Service, manager *mcp.Manager, containerdHandler *handlers.ContainerdHandler, mcpConnService *mcp.ConnectionService) *mcp.ToolGatewayService { messageExec := mcpmessage.NewExecutor(log, channelManager, channelManager, registry) - directoryExec := mcpdirectory.NewExecutor(log, registry, channelService, registry) + directoryExec := mcpdirectory.NewExecutor(log, registry, channelStore, registry) scheduleExec := mcpschedule.NewExecutor(log, scheduleService) memoryExec := mcpmemory.NewExecutor(log, memoryService, chatService, accountService) webExec := mcpweb.NewExecutor(log, settingsService, searchProviderService) @@ -392,20 +418,34 @@ func provideAuthHandler(log *slog.Logger, accountService *accounts.Service, rc * return handlers.NewAuthHandler(log, accountService, rc.JwtSecret, rc.JwtExpiresIn) } -func provideMessageHandler(log *slog.Logger, resolver *flow.Resolver, chatService *conversation.Service, msgService *message.DBService, botService *bots.Service, accountService *accounts.Service, identityService *identities.Service, hub *event.Hub) *handlers.MessageHandler { - return handlers.NewMessageHandler(log, resolver, chatService, msgService, botService, accountService, identityService, hub) +func provideMessageHandler(log *slog.Logger, resolver *flow.Resolver, chatService *conversation.Service, msgService *message.DBService, mediaService *media.Service, botService *bots.Service, accountService *accounts.Service, identityService *identities.Service, hub *event.Hub) *handlers.MessageHandler { + h := handlers.NewMessageHandler(log, resolver, chatService, msgService, botService, accountService, identityService, hub) + h.SetMediaService(mediaService) + return h } -func provideUsersHandler(log *slog.Logger, accountService *accounts.Service, identityService *identities.Service, botService *bots.Service, routeService *route.DBService, channelService *channel.Service, channelManager *channel.Manager, registry *channel.Registry) *handlers.UsersHandler { - return handlers.NewUsersHandler(log, accountService, identityService, botService, routeService, channelService, channelManager, registry) +func provideMediaService(log *slog.Logger, queries *dbsqlc.Queries, cfg config.Config) (*media.Service, error) { + dataRoot := strings.TrimSpace(cfg.MCP.DataRoot) + if dataRoot == "" { + dataRoot = config.DefaultDataRoot + } + provider, err := containerfs.New(dataRoot) + if err != nil { + return nil, fmt.Errorf("init media provider: %w", err) + } + return media.NewService(log, queries, provider), nil } -func provideCLIHandler(channelManager *channel.Manager, channelService *channel.Service, chatService *conversation.Service, hub *local.RouteHub, botService *bots.Service, accountService *accounts.Service) *handlers.LocalChannelHandler { - return handlers.NewLocalChannelHandler(local.CLIType, channelManager, channelService, chatService, hub, botService, accountService) +func provideUsersHandler(log *slog.Logger, accountService *accounts.Service, identityService *identities.Service, botService *bots.Service, routeService *route.DBService, channelStore *channel.Store, channelLifecycle *channel.Lifecycle, channelManager *channel.Manager, registry *channel.Registry) *handlers.UsersHandler { + return handlers.NewUsersHandler(log, accountService, identityService, botService, routeService, channelStore, channelLifecycle, channelManager, registry) } -func provideWebHandler(channelManager *channel.Manager, channelService *channel.Service, chatService *conversation.Service, hub *local.RouteHub, botService *bots.Service, accountService *accounts.Service) *handlers.LocalChannelHandler { - return handlers.NewLocalChannelHandler(local.WebType, channelManager, channelService, chatService, hub, botService, accountService) +func provideCLIHandler(channelManager *channel.Manager, channelStore *channel.Store, chatService *conversation.Service, hub *local.RouteHub, botService *bots.Service, accountService *accounts.Service) *handlers.LocalChannelHandler { + return handlers.NewLocalChannelHandler(local.CLIType, channelManager, channelStore, chatService, hub, botService, accountService) +} + +func provideWebHandler(channelManager *channel.Manager, channelStore *channel.Store, chatService *conversation.Service, hub *local.RouteHub, botService *bots.Service, accountService *accounts.Service) *handlers.LocalChannelHandler { + return handlers.NewLocalChannelHandler(local.WebType, channelManager, channelStore, chatService, hub, botService, accountService) } // --------------------------------------------------------------------------- @@ -477,7 +517,7 @@ func startContainerReconciliation(lc fx.Lifecycle, containerdHandler *handlers.C }) } -func startServer(lc fx.Lifecycle, logger *slog.Logger, srv *server.Server, shutdowner fx.Shutdowner, cfg config.Config, queries *dbsqlc.Queries, botService *bots.Service, containerdHandler *handlers.ContainerdHandler, mcpConnService *mcp.ConnectionService, toolGateway *mcp.ToolGatewayService) { +func startServer(lc fx.Lifecycle, logger *slog.Logger, srv *server.Server, shutdowner fx.Shutdowner, cfg config.Config, queries *dbsqlc.Queries, botService *bots.Service, containerdHandler *handlers.ContainerdHandler, mcpConnService *mcp.ConnectionService, toolGateway *mcp.ToolGatewayService, channelManager *channel.Manager) { fmt.Printf("Starting Memoh Agent %s\n", version.GetInfo()) lc.Append(fx.Hook{ @@ -486,7 +526,12 @@ func startServer(lc fx.Lifecycle, logger *slog.Logger, srv *server.Server, shutd return err } botService.SetContainerLifecycle(containerdHandler) - botService.AddRuntimeChecker(mcp.NewConnectionChecker(logger, mcpConnService, toolGateway)) + botService.AddRuntimeChecker(healthcheck.NewRuntimeCheckerAdapter( + mcpchecker.NewChecker(logger, mcpConnService, toolGateway), + )) + botService.AddRuntimeChecker(healthcheck.NewRuntimeCheckerAdapter( + channelchecker.NewChecker(logger, channelManager), + )) go func() { if err := srv.Start(); err != nil && !errors.Is(err, http.ErrServerClosed) { diff --git a/db/migrations/0001_init.down.sql b/db/migrations/0001_init.down.sql index e0861ca1..556ae72e 100644 --- a/db/migrations/0001_init.down.sql +++ b/db/migrations/0001_init.down.sql @@ -1,3 +1,7 @@ +DROP TABLE IF EXISTS bot_history_message_assets; +DROP TABLE IF EXISTS media_assets; +DROP TABLE IF EXISTS bot_storage_bindings; +DROP TABLE IF EXISTS storage_providers; DROP TABLE IF EXISTS subagents; DROP TABLE IF EXISTS schedule; DROP TABLE IF EXISTS lifecycle_events; diff --git a/db/migrations/0001_init.up.sql b/db/migrations/0001_init.up.sql index 6b8b7cb6..7ccfb786 100644 --- a/db/migrations/0001_init.up.sql +++ b/db/migrations/0001_init.up.sql @@ -86,7 +86,7 @@ CREATE TABLE IF NOT EXISTS models ( name TEXT, llm_provider_id UUID NOT NULL REFERENCES llm_providers(id) ON DELETE CASCADE, dimensions INTEGER, - is_multimodal BOOLEAN NOT NULL DEFAULT false, + input_modalities TEXT[] NOT NULL DEFAULT ARRAY['text']::TEXT[], type TEXT NOT NULL DEFAULT 'chat', created_at TIMESTAMPTZ NOT NULL DEFAULT now(), updated_at TIMESTAMPTZ NOT NULL DEFAULT now(), @@ -169,11 +169,10 @@ CREATE TABLE IF NOT EXISTS bot_channel_configs ( self_identity JSONB NOT NULL DEFAULT '{}'::jsonb, routing JSONB NOT NULL DEFAULT '{}'::jsonb, capabilities JSONB NOT NULL DEFAULT '{}'::jsonb, - status TEXT NOT NULL DEFAULT 'pending', + disabled BOOLEAN NOT NULL DEFAULT false, verified_at TIMESTAMPTZ, created_at TIMESTAMPTZ NOT NULL DEFAULT now(), updated_at TIMESTAMPTZ NOT NULL DEFAULT now(), - CONSTRAINT bot_channel_status_check CHECK (status IN ('pending', 'verified', 'disabled')), CONSTRAINT bot_channel_unique UNIQUE (bot_id, channel_type) ); @@ -343,3 +342,64 @@ CREATE TABLE IF NOT EXISTS subagents ( CREATE INDEX IF NOT EXISTS idx_subagents_bot_id ON subagents(bot_id); CREATE INDEX IF NOT EXISTS idx_subagents_deleted ON subagents(deleted); +-- storage_providers: pluggable object storage backends +CREATE TABLE IF NOT EXISTS storage_providers ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + name TEXT NOT NULL, + provider TEXT NOT NULL, + config JSONB NOT NULL DEFAULT '{}'::jsonb, + created_at TIMESTAMPTZ NOT NULL DEFAULT now(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT now(), + CONSTRAINT storage_providers_name_unique UNIQUE (name), + CONSTRAINT storage_providers_provider_check CHECK (provider IN ('localfs', 's3', 'gcs')) +); + +-- bot_storage_bindings: per-bot storage backend selection +CREATE TABLE IF NOT EXISTS bot_storage_bindings ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + bot_id UUID NOT NULL REFERENCES bots(id) ON DELETE CASCADE, + storage_provider_id UUID NOT NULL REFERENCES storage_providers(id) ON DELETE CASCADE, + base_path TEXT NOT NULL DEFAULT '', + created_at TIMESTAMPTZ NOT NULL DEFAULT now(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT now(), + CONSTRAINT bot_storage_bindings_unique UNIQUE (bot_id) +); + +CREATE INDEX IF NOT EXISTS idx_bot_storage_bindings_bot_id ON bot_storage_bindings(bot_id); + +-- media_assets: immutable media objects with dedup by content hash +CREATE TABLE IF NOT EXISTS media_assets ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + bot_id UUID NOT NULL REFERENCES bots(id) ON DELETE CASCADE, + storage_provider_id UUID REFERENCES storage_providers(id) ON DELETE SET NULL, + content_hash TEXT NOT NULL, + media_type TEXT NOT NULL, + mime TEXT NOT NULL DEFAULT 'application/octet-stream', + size_bytes BIGINT NOT NULL DEFAULT 0, + storage_key TEXT NOT NULL, + original_name TEXT, + width INTEGER, + height INTEGER, + duration_ms BIGINT, + metadata JSONB NOT NULL DEFAULT '{}'::jsonb, + created_at TIMESTAMPTZ NOT NULL DEFAULT now(), + CONSTRAINT media_assets_bot_hash_unique UNIQUE (bot_id, content_hash) +); + +CREATE INDEX IF NOT EXISTS idx_media_assets_bot_id ON media_assets(bot_id); +CREATE INDEX IF NOT EXISTS idx_media_assets_content_hash ON media_assets(content_hash); + +-- bot_history_message_assets: join table linking messages to media assets +CREATE TABLE IF NOT EXISTS bot_history_message_assets ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + message_id UUID NOT NULL REFERENCES bot_history_messages(id) ON DELETE CASCADE, + asset_id UUID NOT NULL REFERENCES media_assets(id) ON DELETE CASCADE, + role TEXT NOT NULL DEFAULT 'attachment', + ordinal INTEGER NOT NULL DEFAULT 0, + created_at TIMESTAMPTZ NOT NULL DEFAULT now(), + CONSTRAINT message_asset_unique UNIQUE (message_id, asset_id) +); + +CREATE INDEX IF NOT EXISTS idx_message_assets_message_id ON bot_history_message_assets(message_id); +CREATE INDEX IF NOT EXISTS idx_message_assets_asset_id ON bot_history_message_assets(asset_id); + diff --git a/db/migrations/0005_channel_config_disabled.down.sql b/db/migrations/0005_channel_config_disabled.down.sql new file mode 100644 index 00000000..26410708 --- /dev/null +++ b/db/migrations/0005_channel_config_disabled.down.sql @@ -0,0 +1,13 @@ +DO $$ +BEGIN + IF EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_name = 'bot_channel_configs' AND column_name = 'disabled' + ) THEN + ALTER TABLE bot_channel_configs ADD COLUMN IF NOT EXISTS status TEXT NOT NULL DEFAULT 'verified'; + UPDATE bot_channel_configs SET status = CASE WHEN disabled THEN 'disabled' ELSE 'verified' END; + ALTER TABLE bot_channel_configs DROP COLUMN disabled; + ALTER TABLE bot_channel_configs DROP CONSTRAINT IF EXISTS bot_channel_status_check; + ALTER TABLE bot_channel_configs ADD CONSTRAINT bot_channel_status_check CHECK (status IN ('pending', 'verified', 'disabled')); + END IF; +END $$; diff --git a/db/migrations/0005_channel_config_disabled.up.sql b/db/migrations/0005_channel_config_disabled.up.sql new file mode 100644 index 00000000..5829dfb3 --- /dev/null +++ b/db/migrations/0005_channel_config_disabled.up.sql @@ -0,0 +1,13 @@ +-- Replace status (TEXT) with disabled (BOOLEAN). Idempotent: no-op when already migrated. +DO $$ +BEGIN + IF EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_name = 'bot_channel_configs' AND column_name = 'status' + ) THEN + ALTER TABLE bot_channel_configs ADD COLUMN IF NOT EXISTS disabled BOOLEAN NOT NULL DEFAULT false; + UPDATE bot_channel_configs SET disabled = (status = 'disabled'); + ALTER TABLE bot_channel_configs DROP CONSTRAINT IF EXISTS bot_channel_status_check; + ALTER TABLE bot_channel_configs DROP COLUMN status; + END IF; +END $$; diff --git a/db/migrations/0006_model_modalities.down.sql b/db/migrations/0006_model_modalities.down.sql new file mode 100644 index 00000000..edbbfd86 --- /dev/null +++ b/db/migrations/0006_model_modalities.down.sql @@ -0,0 +1,7 @@ +ALTER TABLE models ADD COLUMN IF NOT EXISTS is_multimodal BOOLEAN NOT NULL DEFAULT false; + +UPDATE models SET is_multimodal = true WHERE 'image' = ANY(input_modalities); +UPDATE models SET is_multimodal = false WHERE NOT ('image' = ANY(input_modalities)); + +ALTER TABLE models DROP COLUMN IF EXISTS input_modalities; +ALTER TABLE models DROP COLUMN IF EXISTS output_modalities; diff --git a/db/migrations/0006_model_modalities.up.sql b/db/migrations/0006_model_modalities.up.sql new file mode 100644 index 00000000..ae9b0648 --- /dev/null +++ b/db/migrations/0006_model_modalities.up.sql @@ -0,0 +1,8 @@ +-- Replace is_multimodal boolean with input modality array. +ALTER TABLE models ADD COLUMN IF NOT EXISTS input_modalities TEXT[] NOT NULL DEFAULT ARRAY['text']::TEXT[]; + +-- Migrate existing data: true -> ['text','image'], false -> ['text'] +UPDATE models SET input_modalities = ARRAY['text','image']::TEXT[] WHERE is_multimodal = true; +UPDATE models SET input_modalities = ARRAY['text']::TEXT[] WHERE is_multimodal = false; + +ALTER TABLE models DROP COLUMN IF EXISTS is_multimodal; diff --git a/db/migrations/0007_media_assets.down.sql b/db/migrations/0007_media_assets.down.sql new file mode 100644 index 00000000..4a8bf0f0 --- /dev/null +++ b/db/migrations/0007_media_assets.down.sql @@ -0,0 +1,4 @@ +DROP TABLE IF EXISTS bot_history_message_assets; +DROP TABLE IF EXISTS media_assets; +DROP TABLE IF EXISTS bot_storage_bindings; +DROP TABLE IF EXISTS storage_providers; diff --git a/db/migrations/0007_media_assets.up.sql b/db/migrations/0007_media_assets.up.sql new file mode 100644 index 00000000..a4014352 --- /dev/null +++ b/db/migrations/0007_media_assets.up.sql @@ -0,0 +1,60 @@ +-- storage_providers: pluggable object storage backends +CREATE TABLE IF NOT EXISTS storage_providers ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + name TEXT NOT NULL, + provider TEXT NOT NULL, + config JSONB NOT NULL DEFAULT '{}'::jsonb, + created_at TIMESTAMPTZ NOT NULL DEFAULT now(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT now(), + CONSTRAINT storage_providers_name_unique UNIQUE (name), + CONSTRAINT storage_providers_provider_check CHECK (provider IN ('localfs', 's3', 'gcs')) +); + +-- bot_storage_bindings: per-bot storage backend selection +CREATE TABLE IF NOT EXISTS bot_storage_bindings ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + bot_id UUID NOT NULL REFERENCES bots(id) ON DELETE CASCADE, + storage_provider_id UUID NOT NULL REFERENCES storage_providers(id) ON DELETE CASCADE, + base_path TEXT NOT NULL DEFAULT '', + created_at TIMESTAMPTZ NOT NULL DEFAULT now(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT now(), + CONSTRAINT bot_storage_bindings_unique UNIQUE (bot_id) +); + +CREATE INDEX IF NOT EXISTS idx_bot_storage_bindings_bot_id ON bot_storage_bindings(bot_id); + +-- media_assets: immutable media objects with dedup by content hash +CREATE TABLE IF NOT EXISTS media_assets ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + bot_id UUID NOT NULL REFERENCES bots(id) ON DELETE CASCADE, + storage_provider_id UUID REFERENCES storage_providers(id) ON DELETE SET NULL, + content_hash TEXT NOT NULL, + media_type TEXT NOT NULL, + mime TEXT NOT NULL DEFAULT 'application/octet-stream', + size_bytes BIGINT NOT NULL DEFAULT 0, + storage_key TEXT NOT NULL, + original_name TEXT, + width INTEGER, + height INTEGER, + duration_ms BIGINT, + metadata JSONB NOT NULL DEFAULT '{}'::jsonb, + created_at TIMESTAMPTZ NOT NULL DEFAULT now(), + CONSTRAINT media_assets_bot_hash_unique UNIQUE (bot_id, content_hash) +); + +CREATE INDEX IF NOT EXISTS idx_media_assets_bot_id ON media_assets(bot_id); +CREATE INDEX IF NOT EXISTS idx_media_assets_content_hash ON media_assets(content_hash); + +-- bot_history_message_assets: join table linking messages to media assets +CREATE TABLE IF NOT EXISTS bot_history_message_assets ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + message_id UUID NOT NULL REFERENCES bot_history_messages(id) ON DELETE CASCADE, + asset_id UUID NOT NULL REFERENCES media_assets(id) ON DELETE CASCADE, + role TEXT NOT NULL DEFAULT 'attachment', + ordinal INTEGER NOT NULL DEFAULT 0, + created_at TIMESTAMPTZ NOT NULL DEFAULT now(), + CONSTRAINT message_asset_unique UNIQUE (message_id, asset_id) +); + +CREATE INDEX IF NOT EXISTS idx_message_assets_message_id ON bot_history_message_assets(message_id); +CREATE INDEX IF NOT EXISTS idx_message_assets_asset_id ON bot_history_message_assets(asset_id); diff --git a/db/queries/channels.sql b/db/queries/channels.sql index da22f1c2..d0634aa5 100644 --- a/db/queries/channels.sql +++ b/db/queries/channels.sql @@ -1,18 +1,22 @@ +-- name: DeleteBotChannelConfig :exec +DELETE FROM bot_channel_configs +WHERE bot_id = $1 AND channel_type = $2; + -- name: GetBotChannelConfig :one -SELECT id, bot_id, channel_type, credentials, external_identity, self_identity, routing, capabilities, status, verified_at, created_at, updated_at +SELECT id, bot_id, channel_type, credentials, external_identity, self_identity, routing, capabilities, disabled, verified_at, created_at, updated_at FROM bot_channel_configs WHERE bot_id = $1 AND channel_type = $2 LIMIT 1; -- name: GetBotChannelConfigByExternalIdentity :one -SELECT id, bot_id, channel_type, credentials, external_identity, self_identity, routing, capabilities, status, verified_at, created_at, updated_at +SELECT id, bot_id, channel_type, credentials, external_identity, self_identity, routing, capabilities, disabled, verified_at, created_at, updated_at FROM bot_channel_configs WHERE channel_type = $1 AND external_identity = $2 LIMIT 1; -- name: UpsertBotChannelConfig :one INSERT INTO bot_channel_configs ( - bot_id, channel_type, credentials, external_identity, self_identity, routing, capabilities, status, verified_at + bot_id, channel_type, credentials, external_identity, self_identity, routing, capabilities, disabled, verified_at ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) ON CONFLICT (bot_id, channel_type) @@ -22,13 +26,21 @@ DO UPDATE SET self_identity = EXCLUDED.self_identity, routing = EXCLUDED.routing, capabilities = EXCLUDED.capabilities, - status = EXCLUDED.status, + disabled = EXCLUDED.disabled, verified_at = EXCLUDED.verified_at, updated_at = now() -RETURNING id, bot_id, channel_type, credentials, external_identity, self_identity, routing, capabilities, status, verified_at, created_at, updated_at; +RETURNING id, bot_id, channel_type, credentials, external_identity, self_identity, routing, capabilities, disabled, verified_at, created_at, updated_at; + +-- name: UpdateBotChannelConfigDisabled :one +UPDATE bot_channel_configs +SET + disabled = $3, + updated_at = now() +WHERE bot_id = $1 AND channel_type = $2 +RETURNING id, bot_id, channel_type, credentials, external_identity, self_identity, routing, capabilities, disabled, verified_at, created_at, updated_at; -- name: ListBotChannelConfigsByType :many -SELECT id, bot_id, channel_type, credentials, external_identity, self_identity, routing, capabilities, status, verified_at, created_at, updated_at +SELECT id, bot_id, channel_type, credentials, external_identity, self_identity, routing, capabilities, disabled, verified_at, created_at, updated_at FROM bot_channel_configs WHERE channel_type = $1 ORDER BY created_at DESC; diff --git a/db/queries/media.sql b/db/queries/media.sql new file mode 100644 index 00000000..08fee2ae --- /dev/null +++ b/db/queries/media.sql @@ -0,0 +1,116 @@ +-- name: CreateStorageProvider :one +INSERT INTO storage_providers (name, provider, config) +VALUES (sqlc.arg(name), sqlc.arg(provider), sqlc.arg(config)) +RETURNING *; + +-- name: GetStorageProviderByID :one +SELECT * FROM storage_providers WHERE id = sqlc.arg(id); + +-- name: GetStorageProviderByName :one +SELECT * FROM storage_providers WHERE name = sqlc.arg(name); + +-- name: ListStorageProviders :many +SELECT * FROM storage_providers ORDER BY created_at DESC; + +-- name: UpsertBotStorageBinding :one +INSERT INTO bot_storage_bindings (bot_id, storage_provider_id, base_path) +VALUES (sqlc.arg(bot_id), sqlc.arg(storage_provider_id), sqlc.arg(base_path)) +ON CONFLICT (bot_id) DO UPDATE SET + storage_provider_id = EXCLUDED.storage_provider_id, + base_path = EXCLUDED.base_path, + updated_at = now() +RETURNING *; + +-- name: GetBotStorageBinding :one +SELECT * FROM bot_storage_bindings WHERE bot_id = sqlc.arg(bot_id); + +-- name: CreateMediaAsset :one +INSERT INTO media_assets ( + bot_id, storage_provider_id, content_hash, media_type, mime, + size_bytes, storage_key, original_name, width, height, duration_ms, metadata +) +VALUES ( + sqlc.arg(bot_id), + sqlc.narg(storage_provider_id)::uuid, + sqlc.arg(content_hash), + sqlc.arg(media_type), + sqlc.arg(mime), + sqlc.arg(size_bytes), + sqlc.arg(storage_key), + sqlc.narg(original_name)::text, + sqlc.narg(width)::integer, + sqlc.narg(height)::integer, + sqlc.narg(duration_ms)::bigint, + sqlc.arg(metadata) +) +ON CONFLICT (bot_id, content_hash) DO UPDATE SET + bot_id = media_assets.bot_id +RETURNING *; + +-- name: GetMediaAssetByID :one +SELECT * FROM media_assets WHERE id = sqlc.arg(id); + +-- name: GetMediaAssetByHash :one +SELECT * FROM media_assets +WHERE bot_id = sqlc.arg(bot_id) AND content_hash = sqlc.arg(content_hash); + +-- name: ListMediaAssetsByBotID :many +SELECT * FROM media_assets +WHERE bot_id = sqlc.arg(bot_id) +ORDER BY created_at DESC; + +-- name: DeleteMediaAsset :exec +DELETE FROM media_assets WHERE id = sqlc.arg(id); + +-- name: CreateMessageAsset :one +INSERT INTO bot_history_message_assets (message_id, asset_id, role, ordinal) +VALUES (sqlc.arg(message_id), sqlc.arg(asset_id), sqlc.arg(role), sqlc.arg(ordinal)) +ON CONFLICT (message_id, asset_id) DO UPDATE SET + role = EXCLUDED.role, + ordinal = EXCLUDED.ordinal +RETURNING *; + +-- name: ListMessageAssets :many +SELECT + ma.id AS rel_id, + ma.message_id, + ma.asset_id, + ma.role, + ma.ordinal, + a.media_type, + a.mime, + a.size_bytes, + a.storage_key, + a.original_name, + a.width, + a.height, + a.duration_ms, + a.metadata AS asset_metadata +FROM bot_history_message_assets ma +JOIN media_assets a ON a.id = ma.asset_id +WHERE ma.message_id = sqlc.arg(message_id) +ORDER BY ma.ordinal ASC; + +-- name: ListMessageAssetsBatch :many +SELECT + ma.id AS rel_id, + ma.message_id, + ma.asset_id, + ma.role, + ma.ordinal, + a.media_type, + a.mime, + a.size_bytes, + a.storage_key, + a.original_name, + a.width, + a.height, + a.duration_ms, + a.metadata AS asset_metadata +FROM bot_history_message_assets ma +JOIN media_assets a ON a.id = ma.asset_id +WHERE ma.message_id = ANY(sqlc.arg(message_ids)::uuid[]) +ORDER BY ma.message_id, ma.ordinal ASC; + +-- name: DeleteMessageAssets :exec +DELETE FROM bot_history_message_assets WHERE message_id = sqlc.arg(message_id); diff --git a/db/queries/models.sql b/db/queries/models.sql index 3a472652..f1adc4ff 100644 --- a/db/queries/models.sql +++ b/db/queries/models.sql @@ -46,13 +46,13 @@ SELECT COUNT(*) FROM llm_providers; SELECT COUNT(*) FROM llm_providers WHERE client_type = sqlc.arg(client_type); -- name: CreateModel :one -INSERT INTO models (model_id, name, llm_provider_id, dimensions, is_multimodal, type) +INSERT INTO models (model_id, name, llm_provider_id, dimensions, input_modalities, type) VALUES ( sqlc.arg(model_id), sqlc.arg(name), sqlc.arg(llm_provider_id), sqlc.arg(dimensions), - sqlc.arg(is_multimodal), + sqlc.arg(input_modalities), sqlc.arg(type) ) RETURNING *; @@ -95,7 +95,7 @@ SET name = sqlc.arg(name), llm_provider_id = sqlc.arg(llm_provider_id), dimensions = sqlc.arg(dimensions), - is_multimodal = sqlc.arg(is_multimodal), + input_modalities = sqlc.arg(input_modalities), type = sqlc.arg(type), updated_at = now() WHERE id = sqlc.arg(id) @@ -108,7 +108,7 @@ SET name = sqlc.arg(name), llm_provider_id = sqlc.arg(llm_provider_id), dimensions = sqlc.arg(dimensions), - is_multimodal = sqlc.arg(is_multimodal), + input_modalities = sqlc.arg(input_modalities), type = sqlc.arg(type), updated_at = now() WHERE model_id = sqlc.arg(model_id) diff --git a/docker/config/nginx.conf b/docker/config/nginx.conf index 77807793..c0fd12f9 100644 --- a/docker/config/nginx.conf +++ b/docker/config/nginx.conf @@ -5,6 +5,8 @@ server { root /usr/share/nginx/html; index index.html; + client_max_body_size 50m; + # Gzip 压缩 gzip on; gzip_vary on; diff --git a/internal/auth/jwt.go b/internal/auth/jwt.go index 8c837b43..86854928 100644 --- a/internal/auth/jwt.go +++ b/internal/auth/jwt.go @@ -28,7 +28,7 @@ func JWTMiddleware(secret string, skipper middleware.Skipper) echo.MiddlewareFun return echojwt.WithConfig(echojwt.Config{ SigningKey: []byte(secret), SigningMethod: "HS256", - TokenLookup: "header:Authorization:Bearer ", + TokenLookup: "header:Authorization:Bearer ,query:token", Skipper: skipper, NewClaimsFunc: func(c echo.Context) jwt.Claims { return jwt.MapClaims{} diff --git a/internal/bots/service.go b/internal/bots/service.go index 96abe0af..257fbd31 100644 --- a/internal/bots/service.go +++ b/internal/bots/service.go @@ -387,7 +387,7 @@ func (s *Service) ListChecks(ctx context.Context, botID string) ([]BotCheck, err if err != nil { return nil, err } - return s.buildRuntimeChecks(ctx, row) + return s.buildRuntimeChecks(ctx, row, true) } func (s *Service) enqueueCreateLifecycle(botID string) { @@ -683,7 +683,7 @@ func decodeMetadata(payload []byte) (map[string]any, error) { } func (s *Service) attachCheckSummary(ctx context.Context, bot *Bot, row sqlc.Bot) error { - checks, err := s.buildRuntimeChecks(ctx, row) + checks, err := s.buildRuntimeChecks(ctx, row, false) if err != nil { return err } @@ -693,67 +693,93 @@ func (s *Service) attachCheckSummary(ctx context.Context, bot *Bot, row sqlc.Bot return nil } -func (s *Service) buildRuntimeChecks(ctx context.Context, row sqlc.Bot) ([]BotCheck, error) { +// buildRuntimeChecks composes builtin checks and optional dynamic checker results. +// includeDynamic is disabled when computing list summary to avoid expensive runtime probes. +func (s *Service) buildRuntimeChecks(ctx context.Context, row sqlc.Bot, includeDynamic bool) ([]BotCheck, error) { status := strings.TrimSpace(row.Status) checks := make([]BotCheck, 0, 4) if status == BotStatusCreating { checks = append(checks, BotCheck{ - CheckKey: BotCheckKeyContainerInit, + ID: BotCheckTypeContainerInit, + Type: BotCheckTypeContainerInit, + TitleKey: "bots.checks.titles.containerInit", Status: BotCheckStatusUnknown, Summary: "Initialization is in progress.", Detail: "Bot resources are still being provisioned.", }) checks = append(checks, BotCheck{ - CheckKey: BotCheckKeyContainerRecord, + ID: BotCheckTypeContainerRecord, + Type: BotCheckTypeContainerRecord, + TitleKey: "bots.checks.titles.containerRecord", Status: BotCheckStatusUnknown, Summary: "Container record is pending.", Detail: "Container record will be checked after initialization.", }) checks = append(checks, BotCheck{ - CheckKey: BotCheckKeyContainerTask, + ID: BotCheckTypeContainerTask, + Type: BotCheckTypeContainerTask, + TitleKey: "bots.checks.titles.containerTask", Status: BotCheckStatusUnknown, Summary: "Container task state is pending.", Detail: "Task state will be checked after initialization.", }) checks = append(checks, BotCheck{ - CheckKey: BotCheckKeyContainerData, + ID: BotCheckTypeContainerData, + Type: BotCheckTypeContainerData, + TitleKey: "bots.checks.titles.containerDataPath", Status: BotCheckStatusUnknown, Summary: "Container host path check is pending.", Detail: "Data path will be checked after initialization.", }) + if includeDynamic { + checks = s.appendDynamicChecks(ctx, row.ID.String(), checks) + } return checks, nil } if status == BotStatusDeleting { checks = append(checks, BotCheck{ - CheckKey: BotCheckKeyDelete, + ID: BotCheckTypeDelete, + Type: BotCheckTypeDelete, + TitleKey: "bots.checks.titles.botDelete", Status: BotCheckStatusUnknown, Summary: "Deletion is in progress.", Detail: "Bot resources are being cleaned up.", }) checks = append(checks, BotCheck{ - CheckKey: BotCheckKeyContainerRecord, + ID: BotCheckTypeContainerRecord, + Type: BotCheckTypeContainerRecord, + TitleKey: "bots.checks.titles.containerRecord", Status: BotCheckStatusUnknown, Summary: "Container record check is skipped.", Detail: "Bot is deleting and container checks are paused.", }) checks = append(checks, BotCheck{ - CheckKey: BotCheckKeyContainerTask, + ID: BotCheckTypeContainerTask, + Type: BotCheckTypeContainerTask, + TitleKey: "bots.checks.titles.containerTask", Status: BotCheckStatusUnknown, Summary: "Container task check is skipped.", Detail: "Bot is deleting and task checks are paused.", }) checks = append(checks, BotCheck{ - CheckKey: BotCheckKeyContainerData, + ID: BotCheckTypeContainerData, + Type: BotCheckTypeContainerData, + TitleKey: "bots.checks.titles.containerDataPath", Status: BotCheckStatusUnknown, Summary: "Container host path check is skipped.", Detail: "Bot is deleting and data path checks are paused.", }) + if includeDynamic { + checks = s.appendDynamicChecks(ctx, row.ID.String(), checks) + } return checks, nil } checks = append(checks, BotCheck{ - CheckKey: BotCheckKeyContainerInit, + ID: BotCheckTypeContainerInit, + Type: BotCheckTypeContainerInit, + TitleKey: "bots.checks.titles.containerInit", Status: BotCheckStatusOK, Summary: "Initialization finished.", }) @@ -762,30 +788,41 @@ func (s *Service) buildRuntimeChecks(ctx context.Context, row sqlc.Bot) ([]BotCh if err != nil { if errors.Is(err, pgx.ErrNoRows) { checks = append(checks, BotCheck{ - CheckKey: BotCheckKeyContainerRecord, + ID: BotCheckTypeContainerRecord, + Type: BotCheckTypeContainerRecord, + TitleKey: "bots.checks.titles.containerRecord", Status: BotCheckStatusError, Summary: "Container record is missing.", Detail: "No container is attached to this bot.", }) checks = append(checks, BotCheck{ - CheckKey: BotCheckKeyContainerTask, + ID: BotCheckTypeContainerTask, + Type: BotCheckTypeContainerTask, + TitleKey: "bots.checks.titles.containerTask", Status: BotCheckStatusUnknown, Summary: "Container task state is unknown.", Detail: "Task state cannot be determined without a container record.", }) checks = append(checks, BotCheck{ - CheckKey: BotCheckKeyContainerData, + ID: BotCheckTypeContainerData, + Type: BotCheckTypeContainerData, + TitleKey: "bots.checks.titles.containerDataPath", Status: BotCheckStatusUnknown, Summary: "Container data path is unknown.", Detail: "Data path cannot be determined without a container record.", }) + if includeDynamic { + checks = s.appendDynamicChecks(ctx, row.ID.String(), checks) + } return checks, nil } return nil, err } checks = append(checks, BotCheck{ - CheckKey: BotCheckKeyContainerRecord, + ID: BotCheckTypeContainerRecord, + Type: BotCheckTypeContainerRecord, + TitleKey: "bots.checks.titles.containerRecord", Status: BotCheckStatusOK, Summary: "Container record exists.", Detail: fmt.Sprintf("container_id=%s", strings.TrimSpace(containerRow.ContainerID)), @@ -798,7 +835,9 @@ func (s *Service) buildRuntimeChecks(ctx context.Context, row sqlc.Bot) ([]BotCh taskStatus := strings.TrimSpace(strings.ToLower(containerRow.Status)) taskCheck := BotCheck{ - CheckKey: BotCheckKeyContainerTask, + ID: BotCheckTypeContainerTask, + Type: BotCheckTypeContainerTask, + TitleKey: "bots.checks.titles.containerTask", Status: BotCheckStatusWarn, Summary: "Container task state needs attention.", } @@ -820,7 +859,9 @@ func (s *Service) buildRuntimeChecks(ctx context.Context, row sqlc.Bot) ([]BotCh hostPath = strings.TrimSpace(containerRow.HostPath.String) } dataCheck := BotCheck{ - CheckKey: BotCheckKeyContainerData, + ID: BotCheckTypeContainerData, + Type: BotCheckTypeContainerData, + TitleKey: "bots.checks.titles.containerDataPath", Status: BotCheckStatusWarn, Summary: "Container host path needs attention.", Metadata: map[string]any{"host_path": hostPath}, @@ -828,6 +869,9 @@ func (s *Service) buildRuntimeChecks(ctx context.Context, row sqlc.Bot) ([]BotCh if hostPath == "" { dataCheck.Detail = "host path is empty" checks = append(checks, dataCheck) + if includeDynamic { + checks = s.appendDynamicChecks(ctx, row.ID.String(), checks) + } return checks, nil } info, statErr := os.Stat(hostPath) @@ -850,53 +894,42 @@ func (s *Service) buildRuntimeChecks(ctx context.Context, row sqlc.Bot) ([]BotCh dataCheck.Detail = statErr.Error() } checks = append(checks, dataCheck) + if includeDynamic { + checks = s.appendDynamicChecks(ctx, row.ID.String(), checks) + } return checks, nil } -// builtinCheckKeys returns keys produced by buildRuntimeChecks. -var builtinCheckKeys = []string{ - BotCheckKeyContainerInit, - BotCheckKeyContainerRecord, - BotCheckKeyContainerTask, - BotCheckKeyContainerData, -} - -// ListCheckKeys returns all available check keys for a bot (builtin + registered checkers). -func (s *Service) ListCheckKeys(ctx context.Context, botID string) ([]string, error) { - keys := make([]string, 0, len(builtinCheckKeys)+8) - keys = append(keys, builtinCheckKeys...) +// appendDynamicChecks appends checks from registered runtime checkers. +func (s *Service) appendDynamicChecks(ctx context.Context, botID string, checks []BotCheck) []BotCheck { for _, checker := range s.checkers { - keys = append(keys, checker.CheckKeys(ctx, botID)...) - } - return keys, nil -} - -// RunCheck evaluates a single check key for a bot. -func (s *Service) RunCheck(ctx context.Context, botID, key string) (BotCheck, error) { - // Try registered checkers first (they own dynamic keys like mcp.*). - for _, checker := range s.checkers { - for _, k := range checker.CheckKeys(ctx, botID) { - if k == key { - return checker.RunCheck(ctx, botID, key), nil + items := checker.ListChecks(ctx, botID) + for _, item := range items { + item.ID = strings.TrimSpace(item.ID) + item.Type = strings.TrimSpace(item.Type) + item.Status = strings.TrimSpace(item.Status) + if item.ID == "" { + if item.Type != "" { + item.ID = item.Type + } else { + item.ID = "runtime.unknown" + if s.logger != nil { + s.logger.Warn("runtime checker returned check without id and type", + slog.String("bot_id", botID)) + } + } } + if item.Type == "" { + item.Type = item.ID + } + if item.Status == "" { + item.Status = BotCheckStatusUnknown + } + checks = append(checks, item) } } - // Fall back to builtin checks. - checks, err := s.ListChecks(ctx, botID) - if err != nil { - return BotCheck{}, err - } - for _, c := range checks { - if c.CheckKey == key { - return c, nil - } - } - return BotCheck{ - CheckKey: key, - Status: BotCheckStatusUnknown, - Summary: "Check key not found.", - }, nil + return checks } func summarizeChecks(checks []BotCheck) (string, int32) { diff --git a/internal/bots/types.go b/internal/bots/types.go index ad37359b..37af3f51 100644 --- a/internal/bots/types.go +++ b/internal/bots/types.go @@ -32,7 +32,10 @@ type BotMember struct { // BotCheck represents one resource check row for a bot. type BotCheck struct { - CheckKey string `json:"check_key"` + ID string `json:"id"` + Type string `json:"type"` + TitleKey string `json:"title_key"` + Subtitle string `json:"subtitle,omitempty"` Status string `json:"status"` Summary string `json:"summary"` Detail string `json:"detail,omitempty"` @@ -82,11 +85,6 @@ type ListChecksResponse struct { Items []BotCheck `json:"items"` } -// ListCheckKeysResponse wraps the list of available check keys. -type ListCheckKeysResponse struct { - Keys []string `json:"keys"` -} - // ContainerLifecycle handles container lifecycle events bound to bot operations. type ContainerLifecycle interface { SetupBotContainer(ctx context.Context, botID string) error @@ -95,10 +93,8 @@ type ContainerLifecycle interface { // RuntimeChecker produces runtime check items for a bot. type RuntimeChecker interface { - // CheckKeys returns the check keys this checker can evaluate for a bot. - CheckKeys(ctx context.Context, botID string) []string - // RunCheck evaluates a single check key and returns the result. - RunCheck(ctx context.Context, botID, key string) BotCheck + // ListChecks evaluates dynamic runtime checks for a bot. + ListChecks(ctx context.Context, botID string) []BotCheck } const ( @@ -126,11 +122,13 @@ const ( ) const ( - BotCheckKeyContainerInit = "container.init" - BotCheckKeyContainerRecord = "container.record" - BotCheckKeyContainerTask = "container.task" - BotCheckKeyContainerData = "container.data_path" - BotCheckKeyDelete = "bot.delete" + BotCheckTypeContainerInit = "container.init" + BotCheckTypeContainerRecord = "container.record" + BotCheckTypeContainerTask = "container.task" + BotCheckTypeContainerData = "container.data_path" + BotCheckTypeDelete = "bot.delete" + BotCheckTypeMCPConnection = "mcp.connection" + BotCheckTypeChannelConn = "channel.connection" ) const ( diff --git a/internal/channel/adapter.go b/internal/channel/adapter.go index 7343b138..34630bef 100644 --- a/internal/channel/adapter.go +++ b/internal/channel/adapter.go @@ -3,6 +3,7 @@ package channel import ( "context" "errors" + "io" "sync/atomic" ) @@ -50,6 +51,21 @@ type ProcessingStatusNotifier interface { ProcessingFailed(ctx context.Context, cfg ChannelConfig, msg InboundMessage, info ProcessingStatusInfo, handle ProcessingStatusHandle, cause error) error } +// AttachmentPayload contains resolved attachment bytes and optional metadata. +// Caller must close Reader. +type AttachmentPayload struct { + Reader io.ReadCloser + Mime string + Name string + Size int64 +} + +// AttachmentResolver resolves attachment references (for example platform_key) +// into readable bytes for persistence or transformation pipelines. +type AttachmentResolver interface { + ResolveAttachment(ctx context.Context, cfg ChannelConfig, attachment Attachment) (AttachmentPayload, error) +} + // Adapter is the base interface every channel adapter must implement. type Adapter interface { Type() ChannelType diff --git a/internal/channel/adapters/feishu/feishu.go b/internal/channel/adapters/feishu/feishu.go index 9b8c4630..e4da2e34 100644 --- a/internal/channel/adapters/feishu/feishu.go +++ b/internal/channel/adapters/feishu/feishu.go @@ -4,6 +4,7 @@ import ( "context" "encoding/json" "fmt" + "io" "log/slog" "net/http" "strings" @@ -18,6 +19,7 @@ import ( "github.com/memohai/memoh/internal/channel" "github.com/memohai/memoh/internal/channel/adapters/common" + "github.com/memohai/memoh/internal/media" ) // FeishuAdapter implements the channel.Adapter, channel.Sender, and channel.Receiver interfaces for Feishu. @@ -367,6 +369,7 @@ func (a *FeishuAdapter) Connect(ctx context.Context, cfg channel.ChannelConfig, text := msg.Message.PlainText() rawMessageID := "" rawMessageType := "" + rawContent := "" if event != nil && event.Event != nil && event.Event.Message != nil { if event.Event.Message.MessageId != nil { rawMessageID = strings.TrimSpace(*event.Event.Message.MessageId) @@ -374,6 +377,19 @@ func (a *FeishuAdapter) Connect(ctx context.Context, cfg channel.ChannelConfig, if event.Event.Message.MessageType != nil { rawMessageType = strings.TrimSpace(string(*event.Event.Message.MessageType)) } + if event.Event.Message.Content != nil { + rawContent = common.SummarizeText(*event.Event.Message.Content) + } + } + if a.logger != nil { + a.logger.Debug("feishu inbound extracted", + slog.String("config_id", cfg.ID), + slog.String("message_id", rawMessageID), + slog.String("message_type", rawMessageType), + slog.String("text", common.SummarizeText(text)), + slog.Int("attachments", len(msg.Message.Attachments)), + slog.String("raw_content_prefix", rawContent), + ) } if text == "" && len(msg.Message.Attachments) == 0 { if a.logger != nil { @@ -659,6 +675,10 @@ func (a *FeishuAdapter) sendAttachment(ctx context.Context, client *lark.Client, if resp.StatusCode != http.StatusOK { return fmt.Errorf("failed to download attachment, status: %d", resp.StatusCode) } + maxBytes := media.MaxAssetBytes + if resp.ContentLength > maxBytes { + return fmt.Errorf("%w: max %d bytes", media.ErrAssetTooLarge, maxBytes) + } if strings.HasPrefix(att.Mime, "image/") || att.Type == channel.AttachmentImage { uploadReq := larkim.NewCreateImageReqBuilder(). Body(larkim.NewCreateImageReqBodyBuilder(). @@ -726,6 +746,76 @@ func (a *FeishuAdapter) sendAttachment(ctx context.Context, client *lark.Client, return a.handleResponse("", sendResp, err) } +// ResolveAttachment resolves a Feishu attachment reference to a byte stream. +// User-sent resources must be fetched via the message-resource API which +// requires both message_id and file_key. The message_id is expected in +// attachment.Metadata["message_id"]. +func (a *FeishuAdapter) ResolveAttachment(ctx context.Context, cfg channel.ChannelConfig, attachment channel.Attachment) (channel.AttachmentPayload, error) { + platformKey := strings.TrimSpace(attachment.PlatformKey) + if platformKey == "" { + return channel.AttachmentPayload{}, fmt.Errorf("feishu attachment platform_key is required") + } + messageID := "" + if attachment.Metadata != nil { + if v, ok := attachment.Metadata["message_id"].(string); ok { + messageID = strings.TrimSpace(v) + } + } + if messageID == "" { + return channel.AttachmentPayload{}, fmt.Errorf("feishu attachment metadata.message_id is required") + } + feishuCfg, err := parseConfig(cfg.Credentials) + if err != nil { + return channel.AttachmentPayload{}, err + } + client := lark.NewClient(feishuCfg.AppID, feishuCfg.AppSecret) + + resourceType := "file" + if isFeishuImageAttachment(attachment) { + resourceType = "image" + } + req := larkim.NewGetMessageResourceReqBuilder(). + MessageId(messageID). + FileKey(platformKey). + Type(resourceType). + Build() + resp, err := client.Im.V1.MessageResource.Get(ctx, req) + if err != nil { + return channel.AttachmentPayload{}, fmt.Errorf("download feishu resource: %w", err) + } + if !resp.Success() { + return channel.AttachmentPayload{}, fmt.Errorf("download feishu resource: %s (code: %d)", resp.Msg, resp.Code) + } + if resp.File == nil { + return channel.AttachmentPayload{}, fmt.Errorf("download feishu resource: empty payload") + } + mime := strings.TrimSpace(attachment.Mime) + if mime == "" { + if isFeishuImageAttachment(attachment) { + mime = "image/png" + } else { + mime = "application/octet-stream" + } + } + name := strings.TrimSpace(attachment.Name) + if name == "" { + name = strings.TrimSpace(resp.FileName) + } + return channel.AttachmentPayload{ + Reader: io.NopCloser(resp.File), + Mime: mime, + Name: name, + Size: attachment.Size, + }, nil +} + +func isFeishuImageAttachment(att channel.Attachment) bool { + if att.Type == channel.AttachmentImage || att.Type == channel.AttachmentGIF { + return true + } + return strings.HasPrefix(strings.ToLower(strings.TrimSpace(att.Mime)), "image/") +} + // resolveFeishuFileType maps MIME type and filename to a Feishu file type constant. func resolveFeishuFileType(name, mime string) string { lower := strings.ToLower(mime) diff --git a/internal/channel/adapters/feishu/feishu_test.go b/internal/channel/adapters/feishu/feishu_test.go index 42da2b86..a8726bf5 100644 --- a/internal/channel/adapters/feishu/feishu_test.go +++ b/internal/channel/adapters/feishu/feishu_test.go @@ -202,6 +202,9 @@ func TestExtractFeishuInboundImageAttachmentReference(t *testing.T) { if att.SourcePlatform != Type.String() { t.Fatalf("unexpected source platform: %s", att.SourcePlatform) } + if att.Metadata == nil || att.Metadata["message_id"] == nil { + t.Fatal("expected message_id in attachment metadata") + } } func TestFeishuDescriptorIncludesStreamingAndMedia(t *testing.T) { @@ -217,6 +220,54 @@ func TestFeishuDescriptorIncludesStreamingAndMedia(t *testing.T) { } } +func TestFeishuResolveAttachmentRequiresPlatformKey(t *testing.T) { + t.Parallel() + + adapter := NewFeishuAdapter(nil) + _, err := adapter.ResolveAttachment(context.Background(), channel.ChannelConfig{}, channel.Attachment{}) + if err == nil { + t.Fatal("expected error when platform_key is missing") + } + if !strings.Contains(err.Error(), "platform_key") { + t.Fatalf("expected platform_key error, got: %v", err) + } +} + +func TestFeishuResolveAttachmentRequiresMessageID(t *testing.T) { + t.Parallel() + + adapter := NewFeishuAdapter(nil) + _, err := adapter.ResolveAttachment(context.Background(), channel.ChannelConfig{}, channel.Attachment{ + PlatformKey: "img_123", + }) + if err == nil { + t.Fatal("expected error when message_id is missing") + } + if !strings.Contains(err.Error(), "message_id") { + t.Fatalf("expected message_id error, got: %v", err) + } +} + +func TestIsFeishuImageAttachment(t *testing.T) { + t.Parallel() + + if !isFeishuImageAttachment(channel.Attachment{Type: channel.AttachmentImage}) { + t.Fatal("expected image type to be identified as image") + } + if !isFeishuImageAttachment(channel.Attachment{Type: channel.AttachmentGIF}) { + t.Fatal("expected gif type to be identified as image") + } + if !isFeishuImageAttachment(channel.Attachment{Mime: "image/jpeg"}) { + t.Fatal("expected image/ mime to be identified as image") + } + if isFeishuImageAttachment(channel.Attachment{Type: channel.AttachmentFile}) { + t.Fatal("expected file type to not be identified as image") + } + if isFeishuImageAttachment(channel.Attachment{Type: channel.AttachmentAudio}) { + t.Fatal("expected audio type to not be identified as image") + } +} + func TestBuildFeishuStreamCardContent(t *testing.T) { t.Parallel() @@ -238,6 +289,21 @@ func TestBuildFeishuStreamCardContent(t *testing.T) { } } +func TestBuildFeishuStreamCardContentWithState(t *testing.T) { + t.Parallel() + + payload, err := buildFeishuStreamCardContent("answer body") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if !strings.Contains(payload, "answer body") { + t.Fatalf("expected stream body content in payload: %s", payload) + } + if strings.Contains(payload, "Tools:**") || strings.Contains(payload, "Calling:") { + t.Fatalf("expected no tool/process panel in payload: %s", payload) + } +} + func TestNormalizeFeishuStreamText(t *testing.T) { t.Parallel() @@ -364,7 +430,7 @@ func TestExtractFeishuInboundMentionOtherUserIgnored(t *testing.T) { func TestExtractFeishuInboundPostMentionFallback(t *testing.T) { t.Parallel() - content := `{"zh_cn":{"title":"","content":[[{"tag":"at","user_name":"bot"},{"tag":"text","text":" hi"}]]}}` + content := `{"title":"","content":[[{"tag":"at","user_name":"bot"},{"tag":"text","text":" hi"}]]}` msgType := larkim.MsgTypePost chatType := "group" chatID := "oc_post_1" @@ -392,7 +458,7 @@ func TestExtractFeishuInboundPostMentionBotMatched(t *testing.T) { t.Parallel() botOpenID := "ou_bot_123" - content := `{"zh_cn":{"title":"","content":[[{"tag":"at","user_id":"ou_bot_123"},{"tag":"text","text":" hi"}]]}}` + content := `{"title":"","content":[[{"tag":"at","user_id":"ou_bot_123"},{"tag":"text","text":" hi"}]]}` msgType := larkim.MsgTypePost chatType := "group" chatID := "oc_post_bot" @@ -413,10 +479,42 @@ func TestExtractFeishuInboundPostMentionBotMatched(t *testing.T) { } } +func TestExtractFeishuInboundPostRootContent(t *testing.T) { + t.Parallel() + + // Feishu event payload uses root-level content + content := `{"title":"","content":[[{"tag":"img","image_key":"img_v3_02uv_81bc4785-24d6-4fe2-b841-c3c4c691fc0g","width":1438,"height":810}],[{"tag":"text","text":"这是什么作品","style":[]}]]}` + msgType := larkim.MsgTypePost + chatType := "p2p" + chatID := "oc_eb2b5e623f3a21e288fce40878564f8e" + msgID := "om_x100b5606f7fc6ca4b376e5432634210" + event := &larkim.P2MessageReceiveV1{ + Event: &larkim.P2MessageReceiveV1Data{ + Message: &larkim.EventMessage{ + MessageId: &msgID, + MessageType: &msgType, + Content: &content, + ChatType: &chatType, + ChatId: &chatID, + }, + }, + } + got := extractFeishuInbound(event, "") + if got.Message.PlainText() != "这是什么作品" { + t.Fatalf("expected text 这是什么作品, got %q", got.Message.PlainText()) + } + if len(got.Message.Attachments) != 1 { + t.Fatalf("expected 1 attachment, got %d", len(got.Message.Attachments)) + } + if got.Message.Attachments[0].PlatformKey != "img_v3_02uv_81bc4785-24d6-4fe2-b841-c3c4c691fc0g" { + t.Fatalf("unexpected platform_key: %q", got.Message.Attachments[0].PlatformKey) + } +} + func TestExtractFeishuInboundPostMentionOtherIgnored(t *testing.T) { t.Parallel() - content := `{"zh_cn":{"title":"","content":[[{"tag":"at","user_id":"ou_someone_else"},{"tag":"text","text":" hi"}]]}}` + content := `{"title":"","content":[[{"tag":"at","user_id":"ou_someone_else"},{"tag":"text","text":" hi"}]]}` msgType := larkim.MsgTypePost chatType := "group" chatID := "oc_post_other" diff --git a/internal/channel/adapters/feishu/inbound.go b/internal/channel/adapters/feishu/inbound.go index 54bee3f4..7a4a4846 100644 --- a/internal/channel/adapters/feishu/inbound.go +++ b/internal/channel/adapters/feishu/inbound.go @@ -40,15 +40,28 @@ func extractFeishuInbound(event *larkim.P2MessageReceiveV1, botOpenID string) ch msg.Text = txt } case larkim.MsgTypePost: - if postText := extractFeishuPostText(contentMap); postText != "" { + postText := extractFeishuPostText(contentMap) + if postText != "" { msg.Text = postText } + postAtts := extractFeishuPostAttachments(contentMap, msg.ID) + for _, att := range postAtts { + msg.Attachments = append(msg.Attachments, att) + } + if len(postAtts) > 0 || postText != "" { + slog.Debug("feishu post extracted", + "message_id", msg.ID, + "text_len", len(postText), + "attachments", len(postAtts), + ) + } case larkim.MsgTypeImage: if key, ok := contentMap["image_key"].(string); ok { msg.Attachments = append(msg.Attachments, channel.Attachment{ Type: channel.AttachmentImage, PlatformKey: key, SourcePlatform: Type.String(), + Metadata: map[string]any{"message_id": msg.ID}, }) } case larkim.MsgTypeFile, larkim.MsgTypeAudio, larkim.MsgTypeMedia: @@ -66,6 +79,7 @@ func extractFeishuInbound(event *larkim.P2MessageReceiveV1, botOpenID string) ch PlatformKey: key, SourcePlatform: Type.String(), Name: name, + Metadata: map[string]any{"message_id": msg.ID}, }) } } @@ -232,13 +246,63 @@ func hasFeishuAtTag(raw any) bool { return false } -func extractFeishuPostText(contentMap map[string]any) string { - zhCN, ok := contentMap["zh_cn"].(map[string]any) - if !ok { - return "" +// getFeishuPostContentLines returns content lines from post message. +// Feishu event payload uses root-level content: {"title":"","content":[[...],[...]]}. +func getFeishuPostContentLines(contentMap map[string]any) []any { + if lines, ok := contentMap["content"].([]any); ok { + return lines } - linesRaw, ok := zhCN["content"].([]any) - if !ok { + return nil +} + +// extractFeishuPostAttachments extracts image/file attachments from post content (e.g. img elements). +func extractFeishuPostAttachments(contentMap map[string]any, messageID string) []channel.Attachment { + var result []channel.Attachment + linesRaw := getFeishuPostContentLines(contentMap) + if linesRaw == nil { + return result + } + for _, rawLine := range linesRaw { + line, ok := rawLine.([]any) + if !ok { + continue + } + for _, rawPart := range line { + part, ok := rawPart.(map[string]any) + if !ok { + continue + } + tag := strings.ToLower(strings.TrimSpace(stringValue(part["tag"]))) + if tag == "img" { + if key, ok := part["image_key"].(string); ok && strings.TrimSpace(key) != "" { + result = append(result, channel.Attachment{ + Type: channel.AttachmentImage, + PlatformKey: strings.TrimSpace(key), + SourcePlatform: Type.String(), + Metadata: map[string]any{"message_id": messageID}, + }) + } + } + if tag == "file" { + if key, ok := part["file_key"].(string); ok && strings.TrimSpace(key) != "" { + name := strings.TrimSpace(stringValue(part["file_name"])) + result = append(result, channel.Attachment{ + Type: channel.AttachmentFile, + PlatformKey: strings.TrimSpace(key), + SourcePlatform: Type.String(), + Name: name, + Metadata: map[string]any{"message_id": messageID}, + }) + } + } + } + } + return result +} + +func extractFeishuPostText(contentMap map[string]any) string { + linesRaw := getFeishuPostContentLines(contentMap) + if linesRaw == nil { return "" } parts := make([]string, 0, 8) diff --git a/internal/channel/adapters/feishu/stream.go b/internal/channel/adapters/feishu/stream.go index ce063119..9986eb92 100644 --- a/internal/channel/adapters/feishu/stream.go +++ b/internal/channel/adapters/feishu/stream.go @@ -18,8 +18,9 @@ import ( const ( feishuStreamThinkingText = "Thinking..." + feishuStreamToolHintText = "Calling tools..." feishuStreamPatchInterval = 700 * time.Millisecond - feishuStreamMaxRunes = 8000 + feishuStreamMaxRunes = 8000 ) type feishuOutboundStream struct { @@ -68,6 +69,26 @@ func (s *feishuOutboundStream) Push(ctx context.Context, event channel.StreamEve return nil } return s.patchCard(ctx, s.textBuffer.String()) + case channel.StreamEventToolCallStart: + if err := s.ensureCard(ctx, feishuStreamToolHintText); err != nil { + return err + } + return s.patchCard(ctx, feishuStreamToolHintText) + case channel.StreamEventToolCallEnd: + return nil + case channel.StreamEventAttachment: + if len(event.Attachments) == 0 { + return nil + } + media := channel.Message{ + Attachments: event.Attachments, + } + return s.adapter.Send(ctx, s.cfg, channel.OutboundMessage{ + Target: s.target, + Message: media, + }) + case channel.StreamEventAgentStart, channel.StreamEventAgentEnd, channel.StreamEventPhaseStart, channel.StreamEventPhaseEnd, channel.StreamEventProcessingStarted, channel.StreamEventProcessingCompleted, channel.StreamEventProcessingFailed: + return nil case channel.StreamEventFinal: if event.Final == nil || event.Final.Message.IsEmpty() { return nil @@ -108,7 +129,7 @@ func (s *feishuOutboundStream) Push(ctx context.Context, event channel.StreamEve } return s.patchCard(ctx, "Error: "+errText) default: - return fmt.Errorf("unsupported stream event type: %s", event.Type) + return nil } } @@ -227,9 +248,51 @@ func (s *feishuOutboundStream) patchCard(ctx context.Context, text string) error return nil } +// extractReadableFromJSON tries to extract human-readable text from JSON-like content. +// Returns the original text if not JSON or extraction fails. +func extractReadableFromJSON(text string) string { + trimmed := strings.TrimSpace(text) + if trimmed == "" { + return text + } + first := strings.TrimLeft(trimmed, " \t\n\r") + if (len(first) > 0 && first[0] != '{' && first[0] != '[') || len(first) < 2 { + return text + } + var raw map[string]any + if err := json.Unmarshal([]byte(trimmed), &raw); err != nil { + var arr []any + if err := json.Unmarshal([]byte(trimmed), &arr); err != nil { + return text + } + if len(arr) == 0 { + return text + } + if s, ok := arr[0].(string); ok && strings.TrimSpace(s) != "" { + return s + } + return text + } + for _, key := range []string{"text", "message", "content", "result", "output", "response", "answer"} { + if v, ok := raw[key]; ok && v != nil { + switch val := v.(type) { + case string: + if strings.TrimSpace(val) != "" { + return val + } + case map[string]any: + if b, err := json.Marshal(val); err == nil { + return string(b) + } + } + } + } + return text +} + func buildFeishuStreamCardContent(text string) (string, error) { - content := normalizeFeishuStreamText(text) - content = processFeishuCardMarkdown(content) + content := normalizeFeishuStreamText(extractReadableFromJSON(text)) + body := processFeishuCardMarkdown(content) card := map[string]any{ "config": map[string]any{ "wide_screen_mode": true, @@ -244,7 +307,7 @@ func buildFeishuStreamCardContent(text string) (string, error) { "is_short": false, "text": map[string]any{ "tag": "lark_md", - "content": content, + "content": body, }, }, }, diff --git a/internal/channel/adapters/feishu/stream_test.go b/internal/channel/adapters/feishu/stream_test.go new file mode 100644 index 00000000..1c44e8e2 --- /dev/null +++ b/internal/channel/adapters/feishu/stream_test.go @@ -0,0 +1,31 @@ +package feishu + +import ( + "testing" +) + +func TestExtractReadableFromJSON(t *testing.T) { + t.Parallel() + cases := []struct { + name string + in string + want string + }{ + {"plain text", "hello world", "hello world"}, + {"json with text", `{"text":"extracted"}`, "extracted"}, + {"json with message", `{"message":"ok"}`, "ok"}, + {"json with content", `{"content":"result"}`, "result"}, + {"invalid json", `{invalid`, `{invalid`}, + {"empty object", `{}`, `{}`}, + {"array of strings", `["first"]`, "first"}, + {"array empty", `[]`, `[]`}, + } + for _, tc := range cases { + t.Run(tc.name, func(t *testing.T) { + got := extractReadableFromJSON(tc.in) + if got != tc.want { + t.Errorf("extractReadableFromJSON(%q) = %q, want %q", tc.in, got, tc.want) + } + }) + } +} diff --git a/internal/channel/adapters/telegram/stream.go b/internal/channel/adapters/telegram/stream.go index 0ef9976e..5891a63c 100644 --- a/internal/channel/adapters/telegram/stream.go +++ b/internal/channel/adapters/telegram/stream.go @@ -15,6 +15,7 @@ import ( ) const telegramStreamEditThrottle = 5000 * time.Millisecond +const telegramStreamToolHintText = "Calling tools..." var testEditFunc func(bot *tgbotapi.BotAPI, chatID int64, msgID int, text string, parseMode string) error @@ -184,6 +185,15 @@ func (s *telegramOutboundStream) Push(ctx context.Context, event channel.StreamE switch event.Type { case channel.StreamEventStatus: return nil + case channel.StreamEventToolCallStart: + if err := s.ensureStreamMessage(ctx, telegramStreamToolHintText); err != nil { + return err + } + return s.editStreamMessageFinal(ctx, telegramStreamToolHintText) + case channel.StreamEventToolCallEnd: + return nil + case channel.StreamEventAttachment, channel.StreamEventProcessingFailed, channel.StreamEventAgentStart, channel.StreamEventAgentEnd, channel.StreamEventPhaseStart, channel.StreamEventPhaseEnd, channel.StreamEventProcessingStarted, channel.StreamEventProcessingCompleted: + return nil case channel.StreamEventDelta: if event.Delta == "" { return nil @@ -265,7 +275,7 @@ func (s *telegramOutboundStream) Push(ctx context.Context, event channel.StreamE } return s.editStreamMessage(ctx, display) default: - return fmt.Errorf("unsupported stream event type: %s", event.Type) + return nil } } diff --git a/internal/channel/adapters/telegram/stream_test.go b/internal/channel/adapters/telegram/stream_test.go index 106cc3aa..b2e3628c 100644 --- a/internal/channel/adapters/telegram/stream_test.go +++ b/internal/channel/adapters/telegram/stream_test.go @@ -64,7 +64,7 @@ func TestTelegramOutboundStream_PushNilAdapter(t *testing.T) { } } -func TestTelegramOutboundStream_PushUnsupportedEventType(t *testing.T) { +func TestTelegramOutboundStream_PushUnknownEventTypeSkipped(t *testing.T) { t.Parallel() adapter := NewTelegramAdapter(nil) @@ -72,11 +72,8 @@ func TestTelegramOutboundStream_PushUnsupportedEventType(t *testing.T) { ctx := context.Background() err := s.Push(ctx, channel.StreamEvent{Type: channel.StreamEventType("unknown")}) - if err == nil { - t.Fatal("Push with unknown event type should return error") - } - if !strings.Contains(err.Error(), "unsupported") { - t.Fatalf("expected unsupported error: %v", err) + if err != nil { + t.Fatalf("Push with unknown event type should be silently skipped: %v", err) } } diff --git a/internal/channel/adapters/telegram/telegram.go b/internal/channel/adapters/telegram/telegram.go index 78fa5cee..6568adb2 100644 --- a/internal/channel/adapters/telegram/telegram.go +++ b/internal/channel/adapters/telegram/telegram.go @@ -4,7 +4,9 @@ import ( "context" "errors" "fmt" + "io" "log/slog" + "net/http" "strconv" "strings" "sync" @@ -15,6 +17,7 @@ import ( "github.com/memohai/memoh/internal/channel" "github.com/memohai/memoh/internal/channel/adapters/common" + "github.com/memohai/memoh/internal/media" ) const telegramMaxMessageLength = 4096 @@ -797,6 +800,71 @@ func (a *TelegramAdapter) buildTelegramAttachment(bot *tgbotapi.BotAPI, attType return att } +// ResolveAttachment resolves a Telegram attachment reference to a byte stream. +// It supports platform_key-based references and URL fallback. +func (a *TelegramAdapter) ResolveAttachment(ctx context.Context, cfg channel.ChannelConfig, attachment channel.Attachment) (channel.AttachmentPayload, error) { + fileID := strings.TrimSpace(attachment.PlatformKey) + if fileID == "" && strings.TrimSpace(attachment.URL) == "" { + return channel.AttachmentPayload{}, fmt.Errorf("telegram attachment requires platform_key or url") + } + telegramCfg, err := parseConfig(cfg.Credentials) + if err != nil { + return channel.AttachmentPayload{}, err + } + bot, err := a.getOrCreateBot(telegramCfg.BotToken, cfg.ID) + if err != nil { + return channel.AttachmentPayload{}, err + } + downloadURL := strings.TrimSpace(attachment.URL) + if downloadURL == "" { + downloadURL, err = bot.GetFileDirectURL(fileID) + if err != nil { + return channel.AttachmentPayload{}, fmt.Errorf("resolve telegram file url: %w", err) + } + } + req, err := http.NewRequestWithContext(ctx, http.MethodGet, downloadURL, nil) + if err != nil { + return channel.AttachmentPayload{}, fmt.Errorf("build download request: %w", err) + } + client := &http.Client{Timeout: 60 * time.Second} + resp, err := client.Do(req) + if err != nil { + return channel.AttachmentPayload{}, fmt.Errorf("download attachment: %w", err) + } + if resp.StatusCode != http.StatusOK { + defer func() { + _ = resp.Body.Close() + }() + _, _ = io.Copy(io.Discard, resp.Body) + return channel.AttachmentPayload{}, fmt.Errorf("download attachment status: %d", resp.StatusCode) + } + maxBytes := media.MaxAssetBytes + if resp.ContentLength > maxBytes { + defer func() { + _ = resp.Body.Close() + }() + _, _ = io.Copy(io.Discard, resp.Body) + return channel.AttachmentPayload{}, fmt.Errorf("%w: max %d bytes", media.ErrAssetTooLarge, maxBytes) + } + mime := strings.TrimSpace(attachment.Mime) + if mime == "" { + mime = strings.TrimSpace(resp.Header.Get("Content-Type")) + if idx := strings.Index(mime, ";"); idx >= 0 { + mime = strings.TrimSpace(mime[:idx]) + } + } + size := attachment.Size + if size <= 0 && resp.ContentLength > 0 { + size = resp.ContentLength + } + return channel.AttachmentPayload{ + Reader: resp.Body, + Mime: mime, + Name: strings.TrimSpace(attachment.Name), + Size: size, + }, nil +} + func pickTelegramPhoto(items []tgbotapi.PhotoSize) tgbotapi.PhotoSize { if len(items) == 0 { return tgbotapi.PhotoSize{} diff --git a/internal/channel/adapters/telegram/telegram_test.go b/internal/channel/adapters/telegram/telegram_test.go index ae469dd5..87620bff 100644 --- a/internal/channel/adapters/telegram/telegram_test.go +++ b/internal/channel/adapters/telegram/telegram_test.go @@ -96,6 +96,19 @@ func TestBuildTelegramAttachmentIncludesPlatformReference(t *testing.T) { } } +func TestTelegramResolveAttachmentRequiresReference(t *testing.T) { + t.Parallel() + + adapter := NewTelegramAdapter(nil) + _, err := adapter.ResolveAttachment(context.Background(), channel.ChannelConfig{}, channel.Attachment{}) + if err == nil { + t.Fatal("expected error when attachment has no platform_key/url") + } + if !strings.Contains(err.Error(), "platform_key") { + t.Fatalf("expected platform_key error, got: %v", err) + } +} + func TestParseReplyToMessageID(t *testing.T) { t.Parallel() diff --git a/internal/channel/connection.go b/internal/channel/connection.go index a64a476f..bbda050f 100644 --- a/internal/channel/connection.go +++ b/internal/channel/connection.go @@ -6,6 +6,7 @@ import ( "fmt" "log/slog" "strings" + "time" ) type connectionEntry struct { @@ -14,11 +15,8 @@ type connectionEntry struct { } func (m *Manager) refresh(ctx context.Context) { - // Serialize refresh calls to prevent concurrent reconcile from starting - // duplicate adapter connections. - if !m.refreshMu.TryLock() { - return - } + // Serialize refresh calls so concurrent callers wait instead of silently skipping. + m.refreshMu.Lock() defer m.refreshMu.Unlock() if m.service == nil { @@ -41,17 +39,20 @@ func (m *Manager) refresh(ctx context.Context) { func (m *Manager) reconcile(ctx context.Context, configs []ChannelConfig) { active := map[string]ChannelConfig{} for _, cfg := range configs { - if cfg.ID == "" { - continue - } - status := strings.ToLower(strings.TrimSpace(cfg.Status)) - if status != "" && status != "active" && status != "verified" { + if cfg.ID == "" || cfg.Disabled { continue } active[cfg.ID] = cfg if err := m.ensureConnection(ctx, cfg); err != nil { + m.markConnectionStatus(cfg, false, err) if m.logger != nil { - m.logger.Error("adapter start failed", slog.String("channel", cfg.ChannelType.String()), slog.String("config_id", cfg.ID), slog.Any("error", err)) + m.logger.Error( + "adapter start failed", + slog.String("bot_id", cfg.BotID), + slog.String("channel", cfg.ChannelType.String()), + slog.String("config_id", cfg.ID), + slog.Any("error", err), + ) } } } @@ -64,19 +65,37 @@ func (m *Manager) reconcile(ctx context.Context, configs []ChannelConfig) { } if entry != nil && entry.connection != nil { if m.logger != nil { - m.logger.Info("adapter stop", slog.String("channel", entry.config.ChannelType.String()), slog.String("config_id", id)) + m.logger.Info( + "adapter stop", + slog.String("bot_id", entry.config.BotID), + slog.String("channel", entry.config.ChannelType.String()), + slog.String("config_id", id), + ) } if err := entry.connection.Stop(ctx); err != nil && !errors.Is(err, ErrStopNotSupported) && m.logger != nil { - m.logger.Warn("adapter stop failed", slog.String("config_id", id), slog.Any("error", err)) + m.logger.Warn( + "adapter stop failed", + slog.String("bot_id", entry.config.BotID), + slog.String("channel", entry.config.ChannelType.String()), + slog.String("config_id", id), + slog.Any("error", err), + ) } } delete(m.connections, id) + delete(m.connectionMeta, id) + } + for id := range m.connectionMeta { + if _, ok := active[id]; !ok { + delete(m.connectionMeta, id) + } } } func (m *Manager) ensureConnection(ctx context.Context, cfg ChannelConfig) error { _, ok := m.registry.GetReceiver(cfg.ChannelType) if !ok { + m.markConnectionStatus(cfg, false, fmt.Errorf("receiver not available")) return nil } @@ -85,6 +104,8 @@ func (m *Manager) ensureConnection(ctx context.Context, cfg ChannelConfig) error // Config unchanged — nothing to do. if entry != nil && !entry.config.UpdatedAt.Before(cfg.UpdatedAt) { + running := entry.connection != nil && entry.connection.Running() + m.setConnectionStatusLocked(entry.config, running, nil) m.mu.Unlock() return nil } @@ -100,27 +121,41 @@ func (m *Manager) ensureConnection(ctx context.Context, cfg ChannelConfig) error if oldConn != nil { if m.logger != nil { - m.logger.Info("adapter restart", slog.String("channel", cfg.ChannelType.String()), slog.String("config_id", cfg.ID)) + m.logger.Info( + "adapter restart", + slog.String("bot_id", cfg.BotID), + slog.String("channel", cfg.ChannelType.String()), + slog.String("config_id", cfg.ID), + ) } if err := oldConn.Stop(ctx); err != nil { if errors.Is(err, ErrStopNotSupported) { if m.logger != nil { - m.logger.Warn("adapter restart skipped", slog.String("channel", cfg.ChannelType.String()), slog.String("config_id", cfg.ID)) + m.logger.Warn( + "adapter restart skipped", + slog.String("bot_id", cfg.BotID), + slog.String("channel", cfg.ChannelType.String()), + slog.String("config_id", cfg.ID), + ) } // Re-insert the entry since we can't restart it. m.mu.Lock() if _, exists := m.connections[cfg.ID]; !exists { m.connections[cfg.ID] = entry + running := entry != nil && entry.connection != nil && entry.connection.Running() + m.setConnectionStatusLocked(entry.config, running, nil) } m.mu.Unlock() return nil } + m.markConnectionStatus(cfg, false, err) return err } } receiver, ok := m.registry.GetReceiver(cfg.ChannelType) if !ok { + m.markConnectionStatus(cfg, false, fmt.Errorf("receiver not available")) return nil } @@ -128,20 +163,33 @@ func (m *Manager) ensureConnection(ctx context.Context, cfg ChannelConfig) error // for this config while we were stopping the old one. m.mu.Lock() if existing, ok := m.connections[cfg.ID]; ok && existing != nil { + running := existing.connection != nil && existing.connection.Running() + m.setConnectionStatusLocked(existing.config, running, nil) m.mu.Unlock() return nil } m.mu.Unlock() if m.logger != nil { - m.logger.Info("adapter start", slog.String("channel", cfg.ChannelType.String()), slog.String("config_id", cfg.ID)) + m.logger.Info( + "adapter start", + slog.String("bot_id", cfg.BotID), + slog.String("channel", cfg.ChannelType.String()), + slog.String("config_id", cfg.ID), + ) } handler := m.handleInbound for i := len(m.middlewares) - 1; i >= 0; i-- { handler = m.middlewares[i](handler) } - conn, err := receiver.Connect(ctx, cfg, handler) + connectCtx := context.Background() + if ctx != nil { + // Decouple long-lived adapter connections from short-lived request contexts. + connectCtx = context.WithoutCancel(ctx) + } + conn, err := receiver.Connect(connectCtx, cfg, handler) if err != nil { + m.markConnectionStatus(cfg, false, err) return err } @@ -149,31 +197,131 @@ func (m *Manager) ensureConnection(ctx context.Context, cfg ChannelConfig) error // Final check: if another goroutine raced and inserted first, stop our new // connection and keep the existing one. if existing, ok := m.connections[cfg.ID]; ok && existing != nil { + running := existing.connection != nil && existing.connection.Running() + m.setConnectionStatusLocked(existing.config, running, nil) m.mu.Unlock() - _ = conn.Stop(ctx) + _ = conn.Stop(context.Background()) return nil } m.connections[cfg.ID] = &connectionEntry{ config: cfg, connection: conn, } + m.setConnectionStatusLocked(cfg, true, nil) m.mu.Unlock() return nil } +// EnsureConnection starts, restarts, or stops the connection for the given config. +// Disabled configs are stopped and removed; enabled configs are started or restarted. +func (m *Manager) EnsureConnection(ctx context.Context, cfg ChannelConfig) error { + if cfg.ID == "" { + return fmt.Errorf("config id is required") + } + if cfg.Disabled { + return m.removeConnection(ctx, cfg.ID) + } + return m.ensureConnection(ctx, cfg) +} + +// RemoveConnection stops and removes connections matching the given bot and channel type. +func (m *Manager) RemoveConnection(ctx context.Context, botID string, channelType ChannelType) { + botID = strings.TrimSpace(botID) + if botID == "" { + return + } + m.mu.Lock() + defer m.mu.Unlock() + for id, entry := range m.connections { + if entry == nil || entry.config.BotID != botID || entry.config.ChannelType != channelType { + continue + } + if entry.connection != nil { + if m.logger != nil { + m.logger.Info( + "connection remove", + slog.String("bot_id", botID), + slog.String("channel", channelType.String()), + slog.String("config_id", id), + ) + } + if err := entry.connection.Stop(ctx); err != nil && !errors.Is(err, ErrStopNotSupported) && m.logger != nil { + m.logger.Warn( + "connection stop failed", + slog.String("bot_id", botID), + slog.String("channel", channelType.String()), + slog.String("config_id", id), + slog.Any("error", err), + ) + } + } + delete(m.connections, id) + delete(m.connectionMeta, id) + } +} + +func (m *Manager) removeConnection(ctx context.Context, configID string) error { + m.mu.Lock() + entry := m.connections[configID] + if entry == nil { + delete(m.connectionMeta, configID) + m.mu.Unlock() + return nil + } + delete(m.connections, configID) + delete(m.connectionMeta, configID) + m.mu.Unlock() + + if entry.connection != nil { + if m.logger != nil { + m.logger.Info( + "connection remove", + slog.String("bot_id", entry.config.BotID), + slog.String("channel", entry.config.ChannelType.String()), + slog.String("config_id", configID), + ) + } + if err := entry.connection.Stop(ctx); err != nil && !errors.Is(err, ErrStopNotSupported) { + if m.logger != nil { + m.logger.Warn( + "connection stop failed", + slog.String("bot_id", entry.config.BotID), + slog.String("channel", entry.config.ChannelType.String()), + slog.String("config_id", configID), + slog.Any("error", err), + ) + } + return err + } + } + return nil +} + func (m *Manager) stopAll(ctx context.Context) { m.mu.Lock() defer m.mu.Unlock() for id, entry := range m.connections { if entry != nil && entry.connection != nil { if m.logger != nil { - m.logger.Info("adapter stop", slog.String("channel", entry.config.ChannelType.String()), slog.String("config_id", id)) + m.logger.Info( + "adapter stop", + slog.String("bot_id", entry.config.BotID), + slog.String("channel", entry.config.ChannelType.String()), + slog.String("config_id", id), + ) } if err := entry.connection.Stop(ctx); err != nil && !errors.Is(err, ErrStopNotSupported) && m.logger != nil { - m.logger.Warn("adapter stop failed", slog.String("config_id", id), slog.Any("error", err)) + m.logger.Warn( + "adapter stop failed", + slog.String("bot_id", entry.config.BotID), + slog.String("channel", entry.config.ChannelType.String()), + slog.String("config_id", id), + slog.Any("error", err), + ) } } delete(m.connections, id) + delete(m.connectionMeta, id) } } @@ -189,7 +337,12 @@ func (m *Manager) Stop(ctx context.Context, configID string) error { if entry == nil || entry.connection == nil { return nil } - return entry.connection.Stop(ctx) + err := entry.connection.Stop(ctx) + if err != nil { + return err + } + m.markConnectionStatus(entry.config, false, nil) + return nil } // StopByBot terminates all connections belonging to the given bot. @@ -206,7 +359,54 @@ func (m *Manager) StopByBot(ctx context.Context, botID string) error { _ = entry.connection.Stop(ctx) } delete(m.connections, id) + delete(m.connectionMeta, id) } } return nil } + +func (m *Manager) markConnectionStatus(cfg ChannelConfig, running bool, checkErr error) { + m.mu.Lock() + defer m.mu.Unlock() + m.setConnectionStatusLocked(cfg, running, checkErr) +} + +func (m *Manager) setConnectionStatusLocked(cfg ChannelConfig, running bool, checkErr error) { + if strings.TrimSpace(cfg.ID) == "" { + return + } + if m.connectionMeta == nil { + m.connectionMeta = map[string]ConnectionStatus{} + } + previous, hasPrevious := m.connectionMeta[cfg.ID] + status := ConnectionStatus{ + ConfigID: cfg.ID, + BotID: cfg.BotID, + ChannelType: cfg.ChannelType, + Running: running, + UpdatedAt: time.Now().UTC(), + } + if checkErr != nil { + status.LastError = checkErr.Error() + } + m.connectionMeta[cfg.ID] = status + if m.logger != nil { + if checkErr != nil && (!hasPrevious || previous.LastError != status.LastError || previous.Running != status.Running) { + m.logger.Warn( + "connection health check failed", + slog.String("bot_id", cfg.BotID), + slog.String("channel", cfg.ChannelType.String()), + slog.String("config_id", cfg.ID), + slog.Any("error", checkErr), + ) + } + if running && hasPrevious && strings.TrimSpace(previous.LastError) != "" { + m.logger.Info( + "connection health recovered", + slog.String("bot_id", cfg.BotID), + slog.String("channel", cfg.ChannelType.String()), + slog.String("config_id", cfg.ID), + ) + } + } +} diff --git a/internal/channel/helpers_test.go b/internal/channel/helpers_test.go index 50434c57..46d90b13 100644 --- a/internal/channel/helpers_test.go +++ b/internal/channel/helpers_test.go @@ -66,41 +66,3 @@ func TestBindingCriteriaFromIdentity(t *testing.T) { } } -func TestNormalizeChannelConfigStatus(t *testing.T) { - t.Parallel() - - tests := []struct { - name string - input string - want string - wantErr bool - }{ - {name: "default pending", input: "", want: "pending"}, - {name: "pending passthrough", input: "pending", want: "pending"}, - {name: "verified passthrough", input: "verified", want: "verified"}, - {name: "disabled passthrough", input: "disabled", want: "disabled"}, - {name: "active alias", input: "active", want: "verified"}, - {name: "inactive alias", input: "inactive", want: "disabled"}, - {name: "unknown status", input: "paused", wantErr: true}, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - got, err := normalizeChannelConfigStatus(tt.input) - if tt.wantErr { - if err == nil { - t.Fatalf("expected error, got nil") - } - return - } - if err != nil { - t.Fatalf("expected no error, got %v", err) - } - if got != tt.want { - t.Fatalf("unexpected status: got %s, want %s", got, tt.want) - } - }) - } -} diff --git a/internal/channel/inbound/channel.go b/internal/channel/inbound/channel.go index f4b9e11f..39bff54d 100644 --- a/internal/channel/inbound/channel.go +++ b/internal/channel/inbound/channel.go @@ -4,7 +4,9 @@ import ( "context" "encoding/json" "fmt" + "io" "log/slog" + "net/http" "regexp" "strings" "time" @@ -15,6 +17,7 @@ import ( "github.com/memohai/memoh/internal/channel/route" "github.com/memohai/memoh/internal/conversation" "github.com/memohai/memoh/internal/conversation/flow" + "github.com/memohai/memoh/internal/media" messagepkg "github.com/memohai/memoh/internal/message" ) @@ -33,11 +36,19 @@ type RouteResolver interface { ResolveConversation(ctx context.Context, input route.ResolveInput) (route.ResolveConversationResult, error) } +type mediaIngestor interface { + Ingest(ctx context.Context, input media.IngestInput) (media.Asset, error) + // AccessPath returns a consumer-accessible reference for a persisted asset. + // The format depends on the storage backend (e.g. container path, URL). + AccessPath(asset media.Asset) string +} + // ChannelInboundProcessor routes channel inbound messages to the chat gateway. type ChannelInboundProcessor struct { runner flow.Runner routeResolver RouteResolver message messagepkg.Writer + mediaService mediaIngestor registry *channel.Registry logger *slog.Logger jwtSecret string @@ -87,6 +98,14 @@ func (p *ChannelInboundProcessor) IdentityMiddleware() channel.Middleware { return p.identity.Middleware() } +// SetMediaService configures media ingestion support for inbound attachments. +func (p *ChannelInboundProcessor) SetMediaService(mediaService mediaIngestor) { + if p == nil { + return + } + p.mediaService = mediaService +} + // HandleInbound processes an inbound channel message through identity resolution and chat gateway. func (p *ChannelInboundProcessor) HandleInbound(ctx context.Context, cfg channel.ChannelConfig, msg channel.InboundMessage, sender channel.StreamReplySender) error { if p.runner == nil { @@ -96,7 +115,20 @@ func (p *ChannelInboundProcessor) HandleInbound(ctx context.Context, cfg channel return fmt.Errorf("reply sender not configured") } text := buildInboundQuery(msg.Message) - if strings.TrimSpace(text) == "" { + if p.logger != nil { + p.logger.Debug("inbound handle start", + slog.String("channel", msg.Channel.String()), + slog.String("message_id", strings.TrimSpace(msg.Message.ID)), + slog.String("query", strings.TrimSpace(text)), + slog.Int("attachments", len(msg.Message.Attachments)), + slog.String("conversation_type", strings.TrimSpace(msg.Conversation.Type)), + slog.String("conversation_id", strings.TrimSpace(msg.Conversation.ID)), + ) + } + if strings.TrimSpace(text) == "" && len(msg.Message.Attachments) == 0 { + if p.logger != nil { + p.logger.Debug("inbound dropped empty", slog.String("channel", msg.Channel.String())) + } return nil } state, err := p.requireIdentity(ctx, cfg, msg) @@ -123,6 +155,8 @@ func (p *ChannelInboundProcessor) HandleInbound(ctx context.Context, cfg channel } identity := state.Identity + resolvedAttachments := p.ingestInboundAttachments(ctx, cfg, msg, strings.TrimSpace(identity.BotID), msg.Message.Attachments) + attachments := mapChannelAttachments(resolvedAttachments) // Resolve or create the route via channel_routes. if p.routeResolver == nil { @@ -157,12 +191,14 @@ func (p *ChannelInboundProcessor) HandleInbound(ctx context.Context, cfg channel slog.Bool("is_mentioned", metadataBool(msg.Metadata, "is_mentioned")), slog.Bool("is_reply_to_bot", metadataBool(msg.Metadata, "is_reply_to_bot")), slog.String("conversation_type", strings.TrimSpace(msg.Conversation.Type)), + slog.String("query", strings.TrimSpace(text)), + slog.Int("attachments", len(attachments)), ) } - p.persistInboundUser(ctx, resolved.RouteID, identity, msg, text, "passive_sync") + p.persistInboundUser(ctx, resolved.RouteID, identity, msg, text, attachments, "passive_sync") return nil } - userMessagePersisted := p.persistInboundUser(ctx, resolved.RouteID, identity, msg, text, "active_chat") + userMessagePersisted := p.persistInboundUser(ctx, resolved.RouteID, identity, msg, text, attachments, "active_chat") // Issue chat token for reply routing. chatToken := "" @@ -284,6 +320,7 @@ func (p *ChannelInboundProcessor) HandleInbound(ctx context.Context, cfg channel CurrentChannel: msg.Channel.String(), Channels: []string{msg.Channel.String()}, UserMessagePersisted: userMessagePersisted, + Attachments: attachments, }) var ( @@ -507,7 +544,15 @@ func metadataBool(metadata map[string]any, key string) bool { } } -func (p *ChannelInboundProcessor) persistInboundUser(ctx context.Context, routeID string, identity InboundIdentity, msg channel.InboundMessage, query string, triggerMode string) bool { +func (p *ChannelInboundProcessor) persistInboundUser( + ctx context.Context, + routeID string, + identity InboundIdentity, + msg channel.InboundMessage, + query string, + attachments []conversation.ChatAttachment, + triggerMode string, +) bool { if p.message == nil { return false } @@ -540,6 +585,7 @@ func (p *ChannelInboundProcessor) persistInboundUser(ctx context.Context, routeI Role: "user", Content: payload, Metadata: meta, + Assets: chatAttachmentsToAssetRefs(attachments), }); err != nil && p.logger != nil { p.logger.Warn("persist inbound user message failed", slog.Any("error", err)) return false @@ -651,8 +697,15 @@ type gatewayStreamEnvelope struct { Delta string `json:"delta"` Error string `json:"error"` Message string `json:"message"` + Image string `json:"image"` Data json.RawMessage `json:"data"` Messages []conversation.ModelMessage `json:"messages"` + + ToolName string `json:"toolName"` + ToolCallID string `json:"toolCallId"` + Input json.RawMessage `json:"input"` + Result json.RawMessage `json:"result"` + Attachments json.RawMessage `json:"attachments"` } type gatewayStreamDoneData struct { @@ -685,6 +738,7 @@ func mapStreamChunkToChannelEvents(chunk conversation.StreamChunk) ([]channel.St { Type: channel.StreamEventDelta, Delta: envelope.Delta, + Phase: channel.StreamPhaseText, }, }, finalMessages, nil case "reasoning_delta": @@ -695,11 +749,95 @@ func mapStreamChunkToChannelEvents(chunk conversation.StreamChunk) ([]channel.St { Type: channel.StreamEventDelta, Delta: envelope.Delta, - Metadata: map[string]any{ - "phase": "reasoning", + Phase: channel.StreamPhaseReasoning, + }, + }, finalMessages, nil + case "tool_call_start": + return []channel.StreamEvent{ + { + Type: channel.StreamEventToolCallStart, + ToolCall: &channel.StreamToolCall{ + Name: strings.TrimSpace(envelope.ToolName), + CallID: strings.TrimSpace(envelope.ToolCallID), + Input: parseRawJSON(envelope.Input), }, }, }, finalMessages, nil + case "tool_call_end": + return []channel.StreamEvent{ + { + Type: channel.StreamEventToolCallEnd, + ToolCall: &channel.StreamToolCall{ + Name: strings.TrimSpace(envelope.ToolName), + CallID: strings.TrimSpace(envelope.ToolCallID), + Input: parseRawJSON(envelope.Input), + Result: parseRawJSON(envelope.Result), + }, + }, + }, finalMessages, nil + case "reasoning_start": + return []channel.StreamEvent{ + {Type: channel.StreamEventPhaseStart, Phase: channel.StreamPhaseReasoning}, + }, finalMessages, nil + case "reasoning_end": + return []channel.StreamEvent{ + {Type: channel.StreamEventPhaseEnd, Phase: channel.StreamPhaseReasoning}, + }, finalMessages, nil + case "text_start": + return []channel.StreamEvent{ + {Type: channel.StreamEventPhaseStart, Phase: channel.StreamPhaseText}, + }, finalMessages, nil + case "text_end": + return []channel.StreamEvent{ + {Type: channel.StreamEventPhaseEnd, Phase: channel.StreamPhaseText}, + }, finalMessages, nil + case "attachment_delta": + attachments := parseAttachmentDelta(envelope.Attachments) + if len(attachments) == 0 { + return nil, finalMessages, nil + } + return []channel.StreamEvent{ + {Type: channel.StreamEventAttachment, Attachments: attachments}, + }, finalMessages, nil + case "agent_start": + return []channel.StreamEvent{ + { + Type: channel.StreamEventAgentStart, + Metadata: map[string]any{ + "input": parseRawJSON(envelope.Input), + "data": parseRawJSON(envelope.Data), + }, + }, + }, finalMessages, nil + case "agent_end": + return []channel.StreamEvent{ + { + Type: channel.StreamEventAgentEnd, + Metadata: map[string]any{ + "result": parseRawJSON(envelope.Result), + "data": parseRawJSON(envelope.Data), + }, + }, + }, finalMessages, nil + case "processing_started": + return []channel.StreamEvent{ + {Type: channel.StreamEventProcessingStarted}, + }, finalMessages, nil + case "processing_completed": + return []channel.StreamEvent{ + {Type: channel.StreamEventProcessingCompleted}, + }, finalMessages, nil + case "processing_failed": + streamError := strings.TrimSpace(envelope.Error) + if streamError == "" { + streamError = strings.TrimSpace(envelope.Message) + } + return []channel.StreamEvent{ + { + Type: channel.StreamEventProcessingFailed, + Error: streamError, + }, + }, finalMessages, nil case "error": streamError := strings.TrimSpace(envelope.Error) if streamError == "" { @@ -720,25 +858,7 @@ func mapStreamChunkToChannelEvents(chunk conversation.StreamChunk) ([]channel.St } func buildInboundQuery(message channel.Message) string { - text := strings.TrimSpace(message.PlainText()) - if len(message.Attachments) == 0 { - return text - } - lines := make([]string, 0, len(message.Attachments)+1) - if text != "" { - lines = append(lines, text) - } - for _, att := range message.Attachments { - label := strings.TrimSpace(att.Name) - if label == "" { - label = strings.TrimSpace(att.Reference()) - } - if label == "" { - label = "unknown" - } - lines = append(lines, fmt.Sprintf("[attachment:%s] %s", att.Type, label)) - } - return strings.Join(lines, "\n") + return strings.TrimSpace(message.PlainText()) } func normalizeContentPartType(raw string) channel.MessagePartType { @@ -1043,3 +1163,298 @@ func (p *ChannelInboundProcessor) logProcessingStatusError( slog.Any("error", err), ) } + +// parseRawJSON converts raw JSON bytes to a typed value for StreamToolCall fields. +func parseRawJSON(raw json.RawMessage) any { + if len(raw) == 0 { + return nil + } + var v any + if err := json.Unmarshal(raw, &v); err != nil { + return string(raw) + } + return v +} + +// mapChannelAttachments converts channel.Attachment slice to conversation.ChatAttachment slice. +// When an attachment has been ingested (AssetID is set), the URL field contains +// the container-internal path; it is mapped to Path for downstream consumers. +func mapChannelAttachments(attachments []channel.Attachment) []conversation.ChatAttachment { + if len(attachments) == 0 { + return nil + } + result := make([]conversation.ChatAttachment, 0, len(attachments)) + for _, att := range attachments { + ca := conversation.ChatAttachment{ + Type: string(att.Type), + PlatformKey: att.PlatformKey, + AssetID: att.AssetID, + Name: att.Name, + Mime: att.Mime, + Size: att.Size, + Metadata: att.Metadata, + } + if strings.TrimSpace(att.AssetID) != "" { + ca.Path = att.URL + ca.Base64 = att.Base64 + } else { + ca.URL = att.URL + } + result = append(result, ca) + } + return result +} + +func (p *ChannelInboundProcessor) ingestInboundAttachments( + ctx context.Context, + cfg channel.ChannelConfig, + msg channel.InboundMessage, + botID string, + attachments []channel.Attachment, +) []channel.Attachment { + if len(attachments) == 0 || p == nil || p.mediaService == nil || strings.TrimSpace(botID) == "" { + return attachments + } + result := make([]channel.Attachment, 0, len(attachments)) + for _, att := range attachments { + item := att + if strings.TrimSpace(item.AssetID) != "" { + result = append(result, item) + continue + } + payload, err := p.loadInboundAttachmentPayload(ctx, cfg, msg, item) + if err != nil { + if p.logger != nil { + p.logger.Warn( + "inbound attachment ingest skipped", + slog.Any("error", err), + slog.String("attachment_type", strings.TrimSpace(string(item.Type))), + slog.String("attachment_url", strings.TrimSpace(item.URL)), + slog.String("platform_key", strings.TrimSpace(item.PlatformKey)), + ) + } + result = append(result, item) + continue + } + if strings.TrimSpace(item.Mime) == "" { + item.Mime = strings.TrimSpace(payload.mime) + } + if strings.TrimSpace(item.Name) == "" { + item.Name = strings.TrimSpace(payload.name) + } + if item.Size == 0 && payload.size > 0 { + item.Size = payload.size + } + maxBytes := media.MaxAssetBytes + asset, err := p.mediaService.Ingest(ctx, media.IngestInput{ + BotID: botID, + MediaType: mapInboundAttachmentMediaType(string(item.Type)), + Mime: strings.TrimSpace(item.Mime), + OriginalName: strings.TrimSpace(item.Name), + Metadata: item.Metadata, + Reader: payload.reader, + MaxBytes: maxBytes, + }) + if payload.reader != nil { + _ = payload.reader.Close() + } + if err != nil { + if p.logger != nil { + p.logger.Warn( + "inbound attachment ingest failed", + slog.Any("error", err), + slog.String("attachment_type", strings.TrimSpace(string(item.Type))), + slog.String("attachment_url", strings.TrimSpace(item.URL)), + slog.String("platform_key", strings.TrimSpace(item.PlatformKey)), + ) + } + result = append(result, item) + continue + } + item.AssetID = asset.ID + item.URL = p.mediaService.AccessPath(asset) + item.PlatformKey = "" + if strings.TrimSpace(item.Mime) == "" { + item.Mime = strings.TrimSpace(asset.Mime) + } + if item.Size == 0 && asset.SizeBytes > 0 { + item.Size = asset.SizeBytes + } + result = append(result, item) + } + return result +} + +type inboundAttachmentPayload struct { + reader io.ReadCloser + mime string + name string + size int64 +} + +func (p *ChannelInboundProcessor) loadInboundAttachmentPayload( + ctx context.Context, + cfg channel.ChannelConfig, + msg channel.InboundMessage, + att channel.Attachment, +) (inboundAttachmentPayload, error) { + rawURL := strings.TrimSpace(att.URL) + if rawURL != "" { + payload, err := openInboundAttachmentURL(ctx, rawURL) + if err == nil { + if strings.TrimSpace(att.Mime) != "" { + payload.mime = strings.TrimSpace(att.Mime) + } + if strings.TrimSpace(payload.name) == "" { + payload.name = strings.TrimSpace(att.Name) + } + return payload, nil + } + // When URL download fails and platform_key exists, attempt resolver fallback. + if strings.TrimSpace(att.PlatformKey) == "" { + return inboundAttachmentPayload{}, err + } + } + platformKey := strings.TrimSpace(att.PlatformKey) + if platformKey == "" { + return inboundAttachmentPayload{}, fmt.Errorf("attachment has no ingestible payload") + } + resolver := p.resolveAttachmentResolver(msg.Channel) + if resolver == nil { + return inboundAttachmentPayload{}, fmt.Errorf("attachment resolver not supported for channel: %s", msg.Channel.String()) + } + resolved, err := resolver.ResolveAttachment(ctx, cfg, att) + if err != nil { + return inboundAttachmentPayload{}, fmt.Errorf("resolve attachment by platform key: %w", err) + } + if resolved.Reader == nil { + return inboundAttachmentPayload{}, fmt.Errorf("resolved attachment reader is nil") + } + mime := strings.TrimSpace(att.Mime) + if mime == "" { + mime = strings.TrimSpace(resolved.Mime) + } + name := strings.TrimSpace(att.Name) + if name == "" { + name = strings.TrimSpace(resolved.Name) + } + return inboundAttachmentPayload{ + reader: resolved.Reader, + mime: mime, + name: name, + size: resolved.Size, + }, nil +} + +func openInboundAttachmentURL(ctx context.Context, rawURL string) (inboundAttachmentPayload, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, rawURL, nil) + if err != nil { + return inboundAttachmentPayload{}, fmt.Errorf("build request: %w", err) + } + client := &http.Client{Timeout: 20 * time.Second} + resp, err := client.Do(req) + if err != nil { + return inboundAttachmentPayload{}, fmt.Errorf("download attachment: %w", err) + } + if resp.StatusCode < http.StatusOK || resp.StatusCode >= http.StatusMultipleChoices { + _ = resp.Body.Close() + return inboundAttachmentPayload{}, fmt.Errorf("download attachment status: %d", resp.StatusCode) + } + maxBytes := media.MaxAssetBytes + if resp.ContentLength > maxBytes { + _ = resp.Body.Close() + return inboundAttachmentPayload{}, fmt.Errorf("%w: max %d bytes", media.ErrAssetTooLarge, maxBytes) + } + mime := strings.TrimSpace(resp.Header.Get("Content-Type")) + if idx := strings.Index(mime, ";"); idx >= 0 { + mime = strings.TrimSpace(mime[:idx]) + } + return inboundAttachmentPayload{ + reader: resp.Body, + mime: mime, + size: resp.ContentLength, + }, nil +} + +func (p *ChannelInboundProcessor) resolveAttachmentResolver(channelType channel.ChannelType) channel.AttachmentResolver { + if p == nil || p.registry == nil { + return nil + } + resolver, ok := p.registry.GetAttachmentResolver(channelType) + if !ok { + return nil + } + return resolver +} + +func mapInboundAttachmentMediaType(t string) media.MediaType { + switch strings.ToLower(strings.TrimSpace(t)) { + case "image", "gif": + return media.MediaTypeImage + case "audio", "voice": + return media.MediaTypeAudio + case "video": + return media.MediaTypeVideo + default: + return media.MediaTypeFile + } +} + +func chatAttachmentsToAssetRefs(attachments []conversation.ChatAttachment) []messagepkg.AssetRef { + if len(attachments) == 0 { + return nil + } + refs := make([]messagepkg.AssetRef, 0, len(attachments)) + for idx, att := range attachments { + assetID := strings.TrimSpace(att.AssetID) + if assetID == "" { + continue + } + refs = append(refs, messagepkg.AssetRef{ + AssetID: assetID, + Role: "attachment", + Ordinal: idx, + }) + } + if len(refs) == 0 { + return nil + } + return refs +} + +// parseAttachmentDelta converts raw JSON attachment data to channel Attachments. +func parseAttachmentDelta(raw json.RawMessage) []channel.Attachment { + if len(raw) == 0 { + return nil + } + var items []struct { + Type string `json:"type"` + URL string `json:"url"` + Path string `json:"path"` + PlatformKey string `json:"platform_key"` + AssetID string `json:"asset_id"` + Name string `json:"name"` + Mime string `json:"mime"` + Size int64 `json:"size"` + } + if err := json.Unmarshal(raw, &items); err != nil { + return nil + } + attachments := make([]channel.Attachment, 0, len(items)) + for _, item := range items { + url := strings.TrimSpace(item.URL) + if url == "" { + url = strings.TrimSpace(item.Path) + } + attachments = append(attachments, channel.Attachment{ + Type: channel.AttachmentType(strings.TrimSpace(item.Type)), + URL: url, + PlatformKey: strings.TrimSpace(item.PlatformKey), + AssetID: strings.TrimSpace(item.AssetID), + Name: strings.TrimSpace(item.Name), + Mime: strings.TrimSpace(item.Mime), + Size: item.Size, + }) + } + return attachments +} diff --git a/internal/channel/inbound/channel_test.go b/internal/channel/inbound/channel_test.go index 8af327ea..87bc43a6 100644 --- a/internal/channel/inbound/channel_test.go +++ b/internal/channel/inbound/channel_test.go @@ -4,7 +4,10 @@ import ( "context" "encoding/json" "errors" + "io" "log/slog" + "net/http" + "net/http/httptest" "strings" "testing" @@ -12,6 +15,7 @@ import ( "github.com/memohai/memoh/internal/channel/identities" "github.com/memohai/memoh/internal/channel/route" "github.com/memohai/memoh/internal/conversation" + "github.com/memohai/memoh/internal/media" messagepkg "github.com/memohai/memoh/internal/message" "github.com/memohai/memoh/internal/schedule" ) @@ -168,6 +172,73 @@ type fakeChatService struct { resolveResult route.ResolveConversationResult resolveErr error persisted []messagepkg.Message + persistedIn []messagepkg.PersistInput +} + +type fakeMediaIngestor struct { + nextID string + nextMime string + ingestErr error + calls int + inputs []media.IngestInput +} + +func (f *fakeMediaIngestor) Ingest(ctx context.Context, input media.IngestInput) (media.Asset, error) { + f.calls++ + f.inputs = append(f.inputs, input) + if input.Reader != nil { + _, _ = io.ReadAll(input.Reader) + } + if f.ingestErr != nil { + return media.Asset{}, f.ingestErr + } + id := strings.TrimSpace(f.nextID) + if id == "" { + id = "asset-test-id" + } + mime := strings.TrimSpace(f.nextMime) + if mime == "" { + mime = strings.TrimSpace(input.Mime) + } + return media.Asset{ + ID: id, + Mime: mime, + StorageKey: input.BotID + "/" + string(input.MediaType) + "/test/" + id, + }, nil +} + +func (f *fakeMediaIngestor) AccessPath(asset media.Asset) string { + sub := asset.StorageKey + if idx := strings.IndexByte(sub, '/'); idx >= 0 { + sub = sub[idx+1:] + } + return "/data/media/" + sub +} + +type fakeAttachmentResolverAdapter struct{} + +func (a *fakeAttachmentResolverAdapter) Type() channel.ChannelType { + return channel.ChannelType("resolver-test") +} + +func (a *fakeAttachmentResolverAdapter) Descriptor() channel.Descriptor { + return channel.Descriptor{ + Type: channel.ChannelType("resolver-test"), + DisplayName: "ResolverTest", + Capabilities: channel.ChannelCapabilities{ + Text: true, + Attachments: true, + }, + } +} + +func (a *fakeAttachmentResolverAdapter) ResolveAttachment(ctx context.Context, cfg channel.ChannelConfig, attachment channel.Attachment) (channel.AttachmentPayload, error) { + return channel.AttachmentPayload{ + Reader: io.NopCloser(strings.NewReader("resolver-bytes")), + Mime: "application/octet-stream", + Name: "resolver.bin", + Size: int64(len("resolver-bytes")), + }, nil } func (f *fakeChatService) ResolveConversation(ctx context.Context, input route.ResolveInput) (route.ResolveConversationResult, error) { @@ -178,6 +249,7 @@ func (f *fakeChatService) ResolveConversation(ctx context.Context, input route.R } func (f *fakeChatService) Persist(ctx context.Context, input messagepkg.PersistInput) (messagepkg.Message, error) { + f.persistedIn = append(f.persistedIn, input) msg := messagepkg.Message{ BotID: input.BotID, RouteID: input.RouteID, @@ -432,6 +504,125 @@ func TestChannelInboundProcessorGroupMentionTriggersReply(t *testing.T) { } } +func TestChannelInboundProcessorPersistsAttachmentAssetRefs(t *testing.T) { + channelIdentitySvc := &fakeChannelIdentityService{channelIdentity: identities.ChannelIdentity{ID: "channelIdentity-asset"}} + memberSvc := &fakeMemberService{isMember: true} + chatSvc := &fakeChatService{resolveResult: route.ResolveConversationResult{ChatID: "chat-asset", RouteID: "route-asset"}} + gateway := &fakeChatGateway{ + resp: conversation.ChatResponse{ + Messages: []conversation.ModelMessage{ + {Role: "assistant", Content: conversation.NewTextContent("ok")}, + }, + }, + } + processor := NewChannelInboundProcessor(slog.Default(), nil, chatSvc, chatSvc, gateway, channelIdentitySvc, memberSvc, nil, nil, nil, "", 0) + sender := &fakeReplySender{} + + cfg := channel.ChannelConfig{ID: "cfg-asset", BotID: "bot-1"} + msg := channel.InboundMessage{ + BotID: "bot-1", + Channel: channel.ChannelType("feishu"), + Message: channel.Message{ + ID: "msg-asset-1", + Text: "attachment test", + Attachments: []channel.Attachment{ + { + Type: channel.AttachmentImage, + URL: "https://example.com/img.png", + AssetID: "asset-1", + Name: "img.png", + Mime: "image/png", + }, + }, + }, + ReplyTarget: "chat_id:oc_asset", + Sender: channel.Identity{SubjectID: "ext-asset"}, + Conversation: channel.Conversation{ + ID: "oc_asset", + Type: "p2p", + }, + } + + if err := processor.HandleInbound(context.Background(), cfg, msg, sender); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if len(chatSvc.persistedIn) != 1 { + t.Fatalf("expected one persisted input, got %d", len(chatSvc.persistedIn)) + } + if len(chatSvc.persistedIn[0].Assets) != 1 { + t.Fatalf("expected one persisted asset ref, got %d", len(chatSvc.persistedIn[0].Assets)) + } + if got := chatSvc.persistedIn[0].Assets[0].AssetID; got != "asset-1" { + t.Fatalf("expected persisted asset id asset-1, got %q", got) + } + if len(gateway.gotReq.Attachments) != 1 { + t.Fatalf("expected one gateway attachment, got %d", len(gateway.gotReq.Attachments)) + } + if got := gateway.gotReq.Attachments[0].AssetID; got != "asset-1" { + t.Fatalf("expected gateway attachment asset_id asset-1, got %q", got) + } +} + +func TestChannelInboundProcessorIngestsPlatformKeyWithResolver(t *testing.T) { + channelIdentitySvc := &fakeChannelIdentityService{channelIdentity: identities.ChannelIdentity{ID: "channelIdentity-resolver"}} + memberSvc := &fakeMemberService{isMember: true} + chatSvc := &fakeChatService{resolveResult: route.ResolveConversationResult{ChatID: "chat-resolver", RouteID: "route-resolver"}} + gateway := &fakeChatGateway{ + resp: conversation.ChatResponse{ + Messages: []conversation.ModelMessage{ + {Role: "assistant", Content: conversation.NewTextContent("ok")}, + }, + }, + } + registry := channel.NewRegistry() + registry.MustRegister(&fakeAttachmentResolverAdapter{}) + processor := NewChannelInboundProcessor(slog.Default(), registry, chatSvc, chatSvc, gateway, channelIdentitySvc, memberSvc, nil, nil, nil, "", 0) + mediaSvc := &fakeMediaIngestor{nextID: "asset-resolved-1", nextMime: "application/octet-stream"} + processor.SetMediaService(mediaSvc) + sender := &fakeReplySender{} + + cfg := channel.ChannelConfig{ID: "cfg-resolver", BotID: "bot-1", ChannelType: channel.ChannelType("resolver-test")} + msg := channel.InboundMessage{ + BotID: "bot-1", + Channel: channel.ChannelType("resolver-test"), + Message: channel.Message{ + ID: "msg-resolver-1", + Text: "attachment resolver test", + Attachments: []channel.Attachment{ + { + Type: channel.AttachmentFile, + PlatformKey: "platform-file-1", + }, + }, + }, + ReplyTarget: "resolver-target", + Sender: channel.Identity{SubjectID: "resolver-user"}, + Conversation: channel.Conversation{ + ID: "resolver-conv", + Type: "p2p", + }, + } + + if err := processor.HandleInbound(context.Background(), cfg, msg, sender); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if mediaSvc.calls != 1 { + t.Fatalf("expected media ingest to be called once, got %d", mediaSvc.calls) + } + if len(gateway.gotReq.Attachments) != 1 { + t.Fatalf("expected one gateway attachment, got %d", len(gateway.gotReq.Attachments)) + } + if got := gateway.gotReq.Attachments[0].AssetID; got != "asset-resolved-1" { + t.Fatalf("expected resolved asset id, got %q", got) + } + if len(chatSvc.persistedIn) != 1 || len(chatSvc.persistedIn[0].Assets) != 1 { + t.Fatalf("expected one persisted asset ref, got %+v", chatSvc.persistedIn) + } + if got := chatSvc.persistedIn[0].Assets[0].AssetID; got != "asset-resolved-1" { + t.Fatalf("expected persisted asset id asset-resolved-1, got %q", got) + } +} + func TestChannelInboundProcessorPersonalGroupNonOwnerIgnored(t *testing.T) { channelIdentitySvc := &fakeChannelIdentityService{channelIdentity: identities.ChannelIdentity{ID: "channelIdentity-member"}} memberSvc := &fakeMemberService{isMember: true} @@ -704,3 +895,242 @@ func TestChannelInboundProcessorProcessingFailedNotifyErrorDoesNotOverrideChatEr t.Fatalf("unexpected processing status lifecycle: %+v", notifier.events) } } + +func TestDownloadInboundAttachmentURLTooLarge(t *testing.T) { + t.Parallel() + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/octet-stream") + w.Header().Set("Content-Length", "999999999") + _, _ = w.Write([]byte("x")) + })) + defer server.Close() + + _, err := openInboundAttachmentURL(context.Background(), server.URL) + if err == nil { + t.Fatalf("expected too-large error") + } + if !errors.Is(err, media.ErrAssetTooLarge) { + t.Fatalf("expected ErrAssetTooLarge, got %v", err) + } +} + +func TestMapStreamChunkToChannelEvents(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + chunk string + wantType channel.StreamEventType + wantDelta string + wantPhase channel.StreamPhase + wantToolName string + wantAttCount int + wantError string + wantNilEvents bool + }{ + { + name: "text_delta", + chunk: `{"type":"text_delta","delta":"hello"}`, + wantType: channel.StreamEventDelta, + wantDelta: "hello", + wantPhase: channel.StreamPhaseText, + }, + { + name: "text_delta empty", + chunk: `{"type":"text_delta","delta":""}`, + wantNilEvents: true, + }, + { + name: "reasoning_delta", + chunk: `{"type":"reasoning_delta","delta":"thinking"}`, + wantType: channel.StreamEventDelta, + wantDelta: "thinking", + wantPhase: channel.StreamPhaseReasoning, + }, + { + name: "reasoning_delta empty", + chunk: `{"type":"reasoning_delta","delta":""}`, + wantNilEvents: true, + }, + { + name: "reasoning_start", + chunk: `{"type":"reasoning_start"}`, + wantType: channel.StreamEventPhaseStart, + wantPhase: channel.StreamPhaseReasoning, + }, + { + name: "reasoning_end", + chunk: `{"type":"reasoning_end"}`, + wantType: channel.StreamEventPhaseEnd, + wantPhase: channel.StreamPhaseReasoning, + }, + { + name: "text_start", + chunk: `{"type":"text_start"}`, + wantType: channel.StreamEventPhaseStart, + wantPhase: channel.StreamPhaseText, + }, + { + name: "text_end", + chunk: `{"type":"text_end"}`, + wantType: channel.StreamEventPhaseEnd, + wantPhase: channel.StreamPhaseText, + }, + { + name: "tool_call_start", + chunk: `{"type":"tool_call_start","toolName":"search_web","toolCallId":"tc_1","input":{"query":"test"}}`, + wantType: channel.StreamEventToolCallStart, + wantToolName: "search_web", + }, + { + name: "tool_call_end", + chunk: `{"type":"tool_call_end","toolName":"search_web","toolCallId":"tc_1","input":{"query":"test"},"result":{"ok":true}}`, + wantType: channel.StreamEventToolCallEnd, + wantToolName: "search_web", + }, + { + name: "attachment_delta", + chunk: `{"type":"attachment_delta","attachments":[{"type":"image","url":"https://example.com/img.png"}]}`, + wantType: channel.StreamEventAttachment, + wantAttCount: 1, + }, + { + name: "attachment_delta empty", + chunk: `{"type":"attachment_delta","attachments":[]}`, + wantNilEvents: true, + }, + { + name: "error", + chunk: `{"type":"error","error":"something failed"}`, + wantType: channel.StreamEventError, + wantError: "something failed", + }, + { + name: "error fallback to message", + chunk: `{"type":"error","message":"fallback msg"}`, + wantType: channel.StreamEventError, + wantError: "fallback msg", + }, + { + name: "agent_start", + chunk: `{"type":"agent_start","input":{"agent":"planner"}}`, + wantType: channel.StreamEventAgentStart, + }, + { + name: "agent_end", + chunk: `{"type":"agent_end","result":{"ok":true}}`, + wantType: channel.StreamEventAgentEnd, + }, + { + name: "processing_started", + chunk: `{"type":"processing_started"}`, + wantType: channel.StreamEventProcessingStarted, + }, + { + name: "processing_completed", + chunk: `{"type":"processing_completed"}`, + wantType: channel.StreamEventProcessingCompleted, + }, + { + name: "processing_failed", + chunk: `{"type":"processing_failed","error":"failed"}`, + wantType: channel.StreamEventProcessingFailed, + wantError: "failed", + }, + { + name: "empty chunk", + chunk: ``, + wantNilEvents: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + events, _, err := mapStreamChunkToChannelEvents(conversation.StreamChunk([]byte(tt.chunk))) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if tt.wantNilEvents { + if len(events) > 0 { + t.Fatalf("expected nil/empty events, got %d", len(events)) + } + return + } + if len(events) != 1 { + t.Fatalf("expected 1 event, got %d", len(events)) + } + ev := events[0] + if ev.Type != tt.wantType { + t.Fatalf("expected type %q, got %q", tt.wantType, ev.Type) + } + if tt.wantDelta != "" && ev.Delta != tt.wantDelta { + t.Fatalf("expected delta %q, got %q", tt.wantDelta, ev.Delta) + } + if tt.wantPhase != "" && ev.Phase != tt.wantPhase { + t.Fatalf("expected phase %q, got %q", tt.wantPhase, ev.Phase) + } + if tt.wantToolName != "" { + if ev.ToolCall == nil { + t.Fatal("expected non-nil ToolCall") + } + if ev.ToolCall.Name != tt.wantToolName { + t.Fatalf("expected tool name %q, got %q", tt.wantToolName, ev.ToolCall.Name) + } + } + if tt.wantAttCount > 0 && len(ev.Attachments) != tt.wantAttCount { + t.Fatalf("expected %d attachments, got %d", tt.wantAttCount, len(ev.Attachments)) + } + if tt.wantError != "" && ev.Error != tt.wantError { + t.Fatalf("expected error %q, got %q", tt.wantError, ev.Error) + } + }) + } +} + +func TestMapStreamChunkToChannelEvents_ToolCallFields(t *testing.T) { + t.Parallel() + + chunk := `{"type":"tool_call_end","toolName":"calc","toolCallId":"c1","input":{"x":1},"result":{"sum":2}}` + events, _, err := mapStreamChunkToChannelEvents(conversation.StreamChunk([]byte(chunk))) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if len(events) != 1 { + t.Fatalf("expected 1 event, got %d", len(events)) + } + tc := events[0].ToolCall + if tc == nil { + t.Fatal("expected non-nil ToolCall") + } + if tc.Name != "calc" || tc.CallID != "c1" { + t.Fatalf("unexpected name/callID: %q / %q", tc.Name, tc.CallID) + } + if tc.Input == nil || tc.Result == nil { + t.Fatal("expected non-nil Input and Result") + } +} + +func TestMapStreamChunkToChannelEvents_FinalMessages(t *testing.T) { + t.Parallel() + + chunk := `{"type":"agent_end","messages":[{"role":"assistant","content":"done"}]}` + events, messages, err := mapStreamChunkToChannelEvents(conversation.StreamChunk([]byte(chunk))) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if len(events) != 1 { + t.Fatalf("expected 1 event, got %d", len(events)) + } + if events[0].Type != channel.StreamEventAgentEnd { + t.Fatalf("expected event type %q, got %q", channel.StreamEventAgentEnd, events[0].Type) + } + if len(messages) != 1 { + t.Fatalf("expected 1 final message, got %d", len(messages)) + } + if messages[0].Role != "assistant" { + t.Fatalf("expected role assistant, got %q", messages[0].Role) + } +} diff --git a/internal/channel/lifecycle.go b/internal/channel/lifecycle.go new file mode 100644 index 00000000..dcea4fb8 --- /dev/null +++ b/internal/channel/lifecycle.go @@ -0,0 +1,212 @@ +package channel + +import ( + "context" + "errors" + "fmt" + "strings" + "time" +) + +// LifecycleStore persists channel configs for lifecycle orchestration. +type LifecycleStore interface { + ResolveEffectiveConfig(ctx context.Context, botID string, channelType ChannelType) (ChannelConfig, error) + UpsertConfig(ctx context.Context, botID string, channelType ChannelType, req UpsertConfigRequest) (ChannelConfig, error) + UpdateConfigDisabled(ctx context.Context, botID string, channelType ChannelType, disabled bool) (ChannelConfig, error) + DeleteConfig(ctx context.Context, botID string, channelType ChannelType) error +} + +// ConnectionController controls runtime channel connections. +type ConnectionController interface { + EnsureConnection(ctx context.Context, cfg ChannelConfig) error + RemoveConnection(ctx context.Context, botID string, channelType ChannelType) +} + +// ErrEnableChannelFailed indicates that enabling the channel (e.g. EnsureConnection) failed. +var ErrEnableChannelFailed = errors.New("enable channel failed") + +// Lifecycle coordinates persisted config updates and runtime connection state. +type Lifecycle struct { + store LifecycleStore + controller ConnectionController +} + +// NewLifecycle creates a lifecycle coordinator from storage and connection controller. +func NewLifecycle(store LifecycleStore, controller ConnectionController) *Lifecycle { + return &Lifecycle{ + store: store, + controller: controller, + } +} + +// UpsertBotChannelConfig updates config and applies connection lifecycle. +// For disabled=true, it stores config and stops any active connection. +// For disabled=false, it stores config then starts connection; on start failure it rolls back. +func (s *Lifecycle) UpsertBotChannelConfig(ctx context.Context, botID string, channelType ChannelType, req UpsertConfigRequest) (ChannelConfig, error) { + if s.store == nil { + return ChannelConfig{}, fmt.Errorf("channel lifecycle store not configured") + } + disabled := false + if req.Disabled != nil { + disabled = *req.Disabled + } + if !disabled && s.controller == nil { + return ChannelConfig{}, fmt.Errorf("channel connection controller not configured") + } + + previous, hadPrevious, err := s.getPreviousConfig(ctx, botID, channelType) + if err != nil { + return ChannelConfig{}, err + } + + updated, err := s.store.UpsertConfig(ctx, botID, channelType, req) + if err != nil { + return ChannelConfig{}, err + } + + if disabled { + if s.controller != nil { + s.controller.RemoveConnection(ctx, botID, channelType) + } + return updated, nil + } + + if err := s.controller.EnsureConnection(ctx, updated); err != nil { + if rollbackErr := s.rollbackUpsert(ctx, botID, channelType, hadPrevious, previous); rollbackErr != nil { + return ChannelConfig{}, fmt.Errorf("%w (rollback failed: %v): %w", ErrEnableChannelFailed, rollbackErr, err) + } + return ChannelConfig{}, fmt.Errorf("%w: %w", ErrEnableChannelFailed, err) + } + return updated, nil +} + +// DeleteBotChannelConfig removes persisted config and stops active runtime connection. +func (s *Lifecycle) DeleteBotChannelConfig(ctx context.Context, botID string, channelType ChannelType) error { + if s.store == nil { + return fmt.Errorf("channel lifecycle store not configured") + } + if err := s.store.DeleteConfig(ctx, botID, channelType); err != nil { + return err + } + if s.controller != nil { + s.controller.RemoveConnection(ctx, botID, channelType) + } + return nil +} + +// SetBotChannelStatus updates only the disabled status and applies runtime lifecycle. +func (s *Lifecycle) SetBotChannelStatus(ctx context.Context, botID string, channelType ChannelType, disabled bool) (ChannelConfig, error) { + if s.store == nil { + return ChannelConfig{}, fmt.Errorf("channel lifecycle store not configured") + } + if s.controller == nil { + return ChannelConfig{}, fmt.Errorf("channel connection controller not configured") + } + + updated, err := s.store.UpdateConfigDisabled(ctx, botID, channelType, disabled) + if err != nil { + return ChannelConfig{}, err + } + if disabled { + s.controller.RemoveConnection(ctx, botID, channelType) + return updated, nil + } + + if err := s.controller.EnsureConnection(ctx, updated); err != nil { + if _, rollbackErr := s.store.UpdateConfigDisabled(ctx, botID, channelType, true); rollbackErr != nil { + return ChannelConfig{}, fmt.Errorf("%w (status rollback failed: %v): %w", ErrEnableChannelFailed, rollbackErr, err) + } + s.controller.RemoveConnection(ctx, botID, channelType) + return ChannelConfig{}, fmt.Errorf("%w: %w", ErrEnableChannelFailed, err) + } + return updated, nil +} + +func (s *Lifecycle) getPreviousConfig(ctx context.Context, botID string, channelType ChannelType) (ChannelConfig, bool, error) { + cfg, err := s.store.ResolveEffectiveConfig(ctx, botID, channelType) + if err == nil { + return cfg, true, nil + } + if isChannelConfigNotFound(err) { + return ChannelConfig{}, false, nil + } + return ChannelConfig{}, false, err +} + +func (s *Lifecycle) rollbackUpsert(ctx context.Context, botID string, channelType ChannelType, hadPrevious bool, previous ChannelConfig) error { + if !hadPrevious { + if err := s.store.DeleteConfig(ctx, botID, channelType); err != nil { + return err + } + if s.controller != nil { + s.controller.RemoveConnection(ctx, botID, channelType) + } + return nil + } + + restoreReq := upsertRequestFromConfig(previous) + restored, err := s.store.UpsertConfig(ctx, botID, channelType, restoreReq) + if err != nil { + return err + } + if s.controller == nil { + return nil + } + if restored.Disabled { + s.controller.RemoveConnection(ctx, botID, channelType) + return nil + } + return s.controller.EnsureConnection(ctx, restored) +} + +func isChannelConfigNotFound(err error) bool { + return errors.Is(err, ErrChannelConfigNotFound) +} + +func upsertRequestFromConfig(cfg ChannelConfig) UpsertConfigRequest { + disabled := cfg.Disabled + restored := UpsertConfigRequest{ + Credentials: cloneAnyMap(cfg.Credentials), + ExternalIdentity: strings.TrimSpace(cfg.ExternalIdentity), + SelfIdentity: cloneAnyMap(cfg.SelfIdentity), + Routing: cloneAnyMap(cfg.Routing), + Disabled: &disabled, + } + if !cfg.VerifiedAt.IsZero() { + verifiedAt := cfg.VerifiedAt.UTC() + restored.VerifiedAt = &verifiedAt + } + return restored +} + +func cloneAnyMap(input map[string]any) map[string]any { + if len(input) == 0 { + return map[string]any{} + } + out := make(map[string]any, len(input)) + for key, value := range input { + out[key] = cloneAnyValue(value) + } + return out +} + +func cloneAnyValue(value any) any { + switch v := value.(type) { + case map[string]any: + return cloneAnyMap(v) + case []any: + items := make([]any, 0, len(v)) + for _, item := range v { + items = append(items, cloneAnyValue(item)) + } + return items + case []string: + items := make([]string, len(v)) + copy(items, v) + return items + case time.Time: + return v + default: + return v + } +} diff --git a/internal/channel/lifecycle_test.go b/internal/channel/lifecycle_test.go new file mode 100644 index 00000000..9c5bb096 --- /dev/null +++ b/internal/channel/lifecycle_test.go @@ -0,0 +1,294 @@ +package channel + +import ( + "context" + "errors" + "testing" +) + +type fakeLifecycleStore struct { + resolveFunc func(ctx context.Context, botID string, channelType ChannelType) (ChannelConfig, error) + upsertFunc func(ctx context.Context, botID string, channelType ChannelType, req UpsertConfigRequest) (ChannelConfig, error) + statusFunc func(ctx context.Context, botID string, channelType ChannelType, disabled bool) (ChannelConfig, error) + deleteFunc func(ctx context.Context, botID string, channelType ChannelType) error +} + +func (f *fakeLifecycleStore) ResolveEffectiveConfig(ctx context.Context, botID string, channelType ChannelType) (ChannelConfig, error) { + if f.resolveFunc == nil { + return ChannelConfig{}, ErrChannelConfigNotFound + } + return f.resolveFunc(ctx, botID, channelType) +} + +func (f *fakeLifecycleStore) UpsertConfig(ctx context.Context, botID string, channelType ChannelType, req UpsertConfigRequest) (ChannelConfig, error) { + if f.upsertFunc == nil { + return ChannelConfig{}, nil + } + return f.upsertFunc(ctx, botID, channelType, req) +} + +func (f *fakeLifecycleStore) UpdateConfigDisabled(ctx context.Context, botID string, channelType ChannelType, disabled bool) (ChannelConfig, error) { + if f.statusFunc == nil { + return ChannelConfig{}, ErrChannelConfigNotFound + } + return f.statusFunc(ctx, botID, channelType, disabled) +} + +func (f *fakeLifecycleStore) DeleteConfig(ctx context.Context, botID string, channelType ChannelType) error { + if f.deleteFunc == nil { + return nil + } + return f.deleteFunc(ctx, botID, channelType) +} + +type fakeConnectionController struct { + ensureFunc func(ctx context.Context, cfg ChannelConfig) error + removeFunc func(ctx context.Context, botID string, channelType ChannelType) +} + +func (f *fakeConnectionController) EnsureConnection(ctx context.Context, cfg ChannelConfig) error { + if f.ensureFunc == nil { + return nil + } + return f.ensureFunc(ctx, cfg) +} + +func (f *fakeConnectionController) RemoveConnection(ctx context.Context, botID string, channelType ChannelType) { + if f.removeFunc == nil { + return + } + f.removeFunc(ctx, botID, channelType) +} + +func TestLifecycleUpsertDisabledRemovesConnection(t *testing.T) { + t.Parallel() + + removeCalled := false + store := &fakeLifecycleStore{ + upsertFunc: func(ctx context.Context, botID string, channelType ChannelType, req UpsertConfigRequest) (ChannelConfig, error) { + return ChannelConfig{ID: "cfg-1", BotID: botID, ChannelType: channelType, Disabled: true}, nil + }, + } + controller := &fakeConnectionController{ + removeFunc: func(ctx context.Context, botID string, channelType ChannelType) { + removeCalled = true + }, + } + service := NewLifecycle(store, controller) + disabled := true + + cfg, err := service.UpsertBotChannelConfig(context.Background(), "bot-1", ChannelType("telegram"), UpsertConfigRequest{ + Credentials: map[string]any{"botToken": "x"}, + Disabled: &disabled, + }) + if err != nil { + t.Fatalf("expected no error, got %v", err) + } + if !cfg.Disabled { + t.Fatalf("expected disabled config") + } + if !removeCalled { + t.Fatalf("expected remove connection to be called") + } +} + +func TestLifecycleUpsertEnableFailureRollsBackToPrevious(t *testing.T) { + t.Parallel() + + previous := ChannelConfig{ + ID: "cfg-prev", + BotID: "bot-1", + ChannelType: ChannelType("telegram"), + Credentials: map[string]any{"botToken": "old"}, + Disabled: false, + } + newConfig := ChannelConfig{ + ID: "cfg-new", + BotID: "bot-1", + ChannelType: ChannelType("telegram"), + Credentials: map[string]any{"botToken": "new"}, + Disabled: false, + } + upsertCalls := 0 + ensureCalls := 0 + store := &fakeLifecycleStore{ + resolveFunc: func(ctx context.Context, botID string, channelType ChannelType) (ChannelConfig, error) { + return previous, nil + }, + upsertFunc: func(ctx context.Context, botID string, channelType ChannelType, req UpsertConfigRequest) (ChannelConfig, error) { + upsertCalls++ + if upsertCalls == 1 { + return newConfig, nil + } + if token := ReadString(req.Credentials, "botToken"); token != "old" { + t.Fatalf("expected rollback credentials old, got %s", token) + } + return previous, nil + }, + } + controller := &fakeConnectionController{ + ensureFunc: func(ctx context.Context, cfg ChannelConfig) error { + ensureCalls++ + if ensureCalls == 1 { + return errors.New("dial failed") + } + return nil + }, + } + service := NewLifecycle(store, controller) + enabled := false + + _, err := service.UpsertBotChannelConfig(context.Background(), "bot-1", ChannelType("telegram"), UpsertConfigRequest{ + Credentials: map[string]any{"botToken": "new"}, + Disabled: &enabled, + }) + if err == nil { + t.Fatalf("expected error, got nil") + } + if upsertCalls != 2 { + t.Fatalf("expected 2 upsert calls (write + rollback), got %d", upsertCalls) + } + if ensureCalls != 2 { + t.Fatalf("expected 2 ensure calls (new + restore), got %d", ensureCalls) + } +} + +func TestLifecycleUpsertEnableFailureWithoutPreviousDeletesNewConfig(t *testing.T) { + t.Parallel() + + deleteCalls := 0 + store := &fakeLifecycleStore{ + resolveFunc: func(ctx context.Context, botID string, channelType ChannelType) (ChannelConfig, error) { + return ChannelConfig{}, ErrChannelConfigNotFound + }, + upsertFunc: func(ctx context.Context, botID string, channelType ChannelType, req UpsertConfigRequest) (ChannelConfig, error) { + return ChannelConfig{ + ID: "cfg-new", + BotID: botID, + ChannelType: channelType, + Credentials: map[string]any{"botToken": "new"}, + }, nil + }, + deleteFunc: func(ctx context.Context, botID string, channelType ChannelType) error { + deleteCalls++ + return nil + }, + } + controller := &fakeConnectionController{ + ensureFunc: func(ctx context.Context, cfg ChannelConfig) error { + return errors.New("start failed") + }, + } + service := NewLifecycle(store, controller) + enabled := false + + _, err := service.UpsertBotChannelConfig(context.Background(), "bot-1", ChannelType("telegram"), UpsertConfigRequest{ + Credentials: map[string]any{"botToken": "new"}, + Disabled: &enabled, + }) + if err == nil { + t.Fatalf("expected error, got nil") + } + if deleteCalls != 1 { + t.Fatalf("expected 1 delete call for rollback, got %d", deleteCalls) + } +} + +func TestLifecycleDeleteStopsConnection(t *testing.T) { + t.Parallel() + + removeCalled := false + store := &fakeLifecycleStore{ + deleteFunc: func(ctx context.Context, botID string, channelType ChannelType) error { + return nil + }, + } + controller := &fakeConnectionController{ + removeFunc: func(ctx context.Context, botID string, channelType ChannelType) { + removeCalled = true + }, + } + service := NewLifecycle(store, controller) + + if err := service.DeleteBotChannelConfig(context.Background(), "bot-1", ChannelType("telegram")); err != nil { + t.Fatalf("expected no error, got %v", err) + } + if !removeCalled { + t.Fatalf("expected remove connection to be called") + } +} + +func TestLifecycleSetBotChannelStatusDisable(t *testing.T) { + t.Parallel() + + removeCalled := false + store := &fakeLifecycleStore{ + statusFunc: func(ctx context.Context, botID string, channelType ChannelType, disabled bool) (ChannelConfig, error) { + if !disabled { + t.Fatalf("expected disabled=true update") + } + return ChannelConfig{ID: "cfg-1", BotID: botID, ChannelType: channelType, Disabled: true}, nil + }, + } + controller := &fakeConnectionController{ + removeFunc: func(ctx context.Context, botID string, channelType ChannelType) { + removeCalled = true + }, + } + service := NewLifecycle(store, controller) + + cfg, err := service.SetBotChannelStatus(context.Background(), "bot-1", ChannelType("telegram"), true) + if err != nil { + t.Fatalf("expected no error, got %v", err) + } + if !cfg.Disabled { + t.Fatalf("expected disabled config") + } + if !removeCalled { + t.Fatalf("expected remove connection to be called") + } +} + +func TestLifecycleSetBotChannelStatusEnableFailureRollsBack(t *testing.T) { + t.Parallel() + + statusCalls := 0 + removeCalled := false + store := &fakeLifecycleStore{ + statusFunc: func(ctx context.Context, botID string, channelType ChannelType, disabled bool) (ChannelConfig, error) { + statusCalls++ + if statusCalls == 1 && disabled { + t.Fatalf("first status update should enable config") + } + if statusCalls == 2 && !disabled { + t.Fatalf("second status update should rollback to disabled=true") + } + return ChannelConfig{ + ID: "cfg-1", + BotID: botID, + ChannelType: channelType, + Disabled: disabled, + }, nil + }, + } + controller := &fakeConnectionController{ + ensureFunc: func(ctx context.Context, cfg ChannelConfig) error { + return errors.New("start failed") + }, + removeFunc: func(ctx context.Context, botID string, channelType ChannelType) { + removeCalled = true + }, + } + service := NewLifecycle(store, controller) + + _, err := service.SetBotChannelStatus(context.Background(), "bot-1", ChannelType("telegram"), false) + if err == nil { + t.Fatalf("expected error, got nil") + } + if statusCalls != 2 { + t.Fatalf("expected 2 status updates, got %d", statusCalls) + } + if !removeCalled { + t.Fatalf("expected remove connection to be called on failed enable") + } +} diff --git a/internal/channel/manager.go b/internal/channel/manager.go index f67d2cba..cf36afce 100644 --- a/internal/channel/manager.go +++ b/internal/channel/manager.go @@ -5,6 +5,7 @@ import ( "errors" "fmt" "log/slog" + "sort" "strings" "sync" "time" @@ -44,6 +45,16 @@ type ManagerStore interface { ConfigResolver } +// ConnectionStatus describes runtime status for one configured channel connection. +type ConnectionStatus struct { + ConfigID string `json:"config_id"` + BotID string `json:"bot_id"` + ChannelType ChannelType `json:"channel_type"` + Running bool `json:"running"` + LastError string `json:"last_error,omitempty"` + UpdatedAt time.Time `json:"updated_at"` +} + // Manager coordinates channel adapters, connection lifecycle, and message dispatch. // Connection lifecycle lives in connection.go, inbound dispatch in inbound.go, // and outbound pipeline in outbound.go. @@ -63,6 +74,7 @@ type Manager struct { mu sync.Mutex refreshMu sync.Mutex connections map[string]*connectionEntry + connectionMeta map[string]ConnectionStatus } // NewManager creates a Manager with the given logger, registry, config store, and inbound processor. @@ -77,8 +89,9 @@ func NewManager(log *slog.Logger, registry *Registry, service ManagerStore, proc registry: registry, service: service, processor: processor, - refreshInterval: 30 * time.Second, + refreshInterval: 5 * time.Minute, connections: map[string]*connectionEntry{}, + connectionMeta: map[string]ConnectionStatus{}, logger: log.With(slog.String("component", "channel")), middlewares: []Middleware{}, inboundQueue: make(chan inboundTask, 256), @@ -140,6 +153,15 @@ func (m *Manager) RemoveAdapter(ctx context.Context, channelType ChannelType) { m.registry.Unregister(channelType) } +// Refresh performs a full reconcile of all adapter connections against the DB. +// Prefer EnsureConnection / RemoveConnection for targeted changes after API operations. +// Refresh is mainly used at startup and as a periodic safety net. +func (m *Manager) Refresh(ctx context.Context) { + if ctx != nil { + m.refresh(ctx) + } +} + // Start begins the periodic config refresh loop and inbound worker pool. func (m *Manager) Start(ctx context.Context) { if m.logger != nil { @@ -182,7 +204,7 @@ func (m *Manager) Send(ctx context.Context, botID string, channelType ChannelTyp if target == "" { targetChannelIdentityID := strings.TrimSpace(req.ChannelIdentityID) if targetChannelIdentityID == "" { - return fmt.Errorf("target or user_id is required") + return fmt.Errorf("target or channel_identity_id is required") } userCfg, err := m.service.GetChannelIdentityConfig(ctx, targetChannelIdentityID, channelType) if err != nil { @@ -274,3 +296,26 @@ func (m *Manager) Shutdown(ctx context.Context) error { m.stopAll(ctx) return nil } + +// ConnectionStatusesByBot returns observed channel connection statuses for a bot. +func (m *Manager) ConnectionStatusesByBot(botID string) []ConnectionStatus { + botID = strings.TrimSpace(botID) + if botID == "" { + return []ConnectionStatus{} + } + m.mu.Lock() + defer m.mu.Unlock() + items := make([]ConnectionStatus, 0, len(m.connectionMeta)) + for _, status := range m.connectionMeta { + if status.BotID == botID { + items = append(items, status) + } + } + sort.Slice(items, func(i, j int) bool { + if items[i].ChannelType == items[j].ChannelType { + return items[i].ConfigID < items[j].ConfigID + } + return items[i].ChannelType < items[j].ChannelType + }) + return items +} diff --git a/internal/channel/manager_integration_test.go b/internal/channel/manager_integration_test.go index fc296014..fd45bb3e 100644 --- a/internal/channel/manager_integration_test.go +++ b/internal/channel/manager_integration_test.go @@ -2,6 +2,7 @@ package channel import ( "context" + "errors" "fmt" "io" "log/slog" @@ -71,8 +72,10 @@ func (f *fakeInboundProcessorIntegration) HandleInbound(ctx context.Context, cfg type fakeAdapter struct { channelType ChannelType + connectErr error mu sync.Mutex started []ChannelConfig + connectCtxs []context.Context sent []OutboundMessage stops int } @@ -96,8 +99,12 @@ func (f *fakeAdapter) ResolveTarget(channelIdentityConfig map[string]any) (strin func (f *fakeAdapter) NormalizeTarget(raw string) string { return strings.TrimSpace(raw) } func (f *fakeAdapter) Connect(ctx context.Context, cfg ChannelConfig, handler InboundHandler) (Connection, error) { + if f.connectErr != nil { + return nil, f.connectErr + } f.mu.Lock() f.started = append(f.started, cfg) + f.connectCtxs = append(f.connectCtxs, ctx) f.mu.Unlock() stop := func(context.Context) error { f.mu.Lock() @@ -228,7 +235,18 @@ func TestManagerReconcileStartsAndStops(t *testing.T) { UpdatedAt: time.Now(), } manager.reconcile(context.Background(), []ChannelConfig{cfg}) + statuses := manager.ConnectionStatusesByBot("bot-1") + if len(statuses) != 1 { + t.Fatalf("expected 1 status after start, got %d", len(statuses)) + } + if !statuses[0].Running { + t.Fatalf("expected running status after start") + } manager.reconcile(context.Background(), nil) + statuses = manager.ConnectionStatusesByBot("bot-1") + if len(statuses) != 0 { + t.Fatalf("expected 0 status after remove, got %d", len(statuses)) + } adapter.mu.Lock() defer adapter.mu.Unlock() @@ -239,3 +257,71 @@ func TestManagerReconcileStartsAndStops(t *testing.T) { t.Fatalf("expected 1 stop, got %d", adapter.stops) } } + +func TestManagerConnectionStatusesByBotTracksConnectFailure(t *testing.T) { + t.Parallel() + + log := slog.New(slog.NewTextHandler(io.Discard, &slog.HandlerOptions{})) + store := &fakeConfigStore{} + reg := NewRegistry() + adapter := &fakeAdapter{ + channelType: ChannelType("test"), + connectErr: errors.New("dial failed"), + } + manager := NewManager(log, reg, store, &fakeInboundProcessorIntegration{}) + manager.RegisterAdapter(adapter) + + cfg := ChannelConfig{ + ID: "cfg-fail-1", + BotID: "bot-1", + ChannelType: ChannelType("test"), + Credentials: map[string]any{"botToken": "token"}, + UpdatedAt: time.Now(), + } + manager.reconcile(context.Background(), []ChannelConfig{cfg}) + + statuses := manager.ConnectionStatusesByBot("bot-1") + if len(statuses) != 1 { + t.Fatalf("expected 1 status, got %d", len(statuses)) + } + if statuses[0].Running { + t.Fatalf("expected non-running status on connect failure") + } + if statuses[0].LastError == "" { + t.Fatalf("expected last error on connect failure") + } +} + +func TestManagerEnsureConnectionDetachesRequestContext(t *testing.T) { + t.Parallel() + + log := slog.New(slog.NewTextHandler(io.Discard, &slog.HandlerOptions{})) + store := &fakeConfigStore{} + reg := NewRegistry() + adapter := &fakeAdapter{channelType: ChannelType("test")} + manager := NewManager(log, reg, store, &fakeInboundProcessorIntegration{}) + manager.RegisterAdapter(adapter) + + cfg := ChannelConfig{ + ID: "cfg-1", + BotID: "bot-1", + ChannelType: ChannelType("test"), + Credentials: map[string]any{"token": "x"}, + UpdatedAt: time.Now(), + } + reqCtx, cancel := context.WithCancel(context.Background()) + if err := manager.EnsureConnection(reqCtx, cfg); err != nil { + cancel() + t.Fatalf("expected no error, got %v", err) + } + cancel() + + adapter.mu.Lock() + defer adapter.mu.Unlock() + if len(adapter.connectCtxs) != 1 { + t.Fatalf("expected 1 connect context, got %d", len(adapter.connectCtxs)) + } + if err := adapter.connectCtxs[0].Err(); err != nil { + t.Fatalf("expected detached context to remain active, got %v", err) + } +} diff --git a/internal/channel/outbound.go b/internal/channel/outbound.go index 4110b6e3..357fe538 100644 --- a/internal/channel/outbound.go +++ b/internal/channel/outbound.go @@ -377,13 +377,19 @@ func normalizeAttachmentRefs(attachments []Attachment, defaultPlatform ChannelTy item := att item.URL = strings.TrimSpace(item.URL) item.PlatformKey = strings.TrimSpace(item.PlatformKey) + item.AssetID = strings.TrimSpace(item.AssetID) item.SourcePlatform = strings.TrimSpace(item.SourcePlatform) if item.SourcePlatform == "" && item.PlatformKey != "" { item.SourcePlatform = defaultPlatform.String() } - if item.URL == "" && item.PlatformKey == "" { + if item.URL == "" && item.PlatformKey == "" && item.AssetID == "" { return nil, fmt.Errorf("attachment reference is required") } + // asset_id-only attachments require media resolution before dispatch. + // Adapters expect url or platform_key; fail loudly if neither is available. + if item.URL == "" && item.PlatformKey == "" && item.AssetID != "" { + return nil, fmt.Errorf("attachment %s has asset_id but no sendable url or platform_key; media resolution required before dispatch", item.AssetID) + } normalized = append(normalized, item) } return normalized, nil @@ -412,6 +418,30 @@ func validateStreamEvent(registry *Registry, channelType ChannelType, event Stre if !caps.Streaming && !caps.BlockStreaming { return fmt.Errorf("channel does not support streaming") } + case StreamEventPhaseStart, StreamEventPhaseEnd: + if !caps.Streaming && !caps.BlockStreaming { + return fmt.Errorf("channel does not support streaming") + } + case StreamEventToolCallStart, StreamEventToolCallEnd: + if !caps.Streaming && !caps.BlockStreaming { + return fmt.Errorf("channel does not support streaming") + } + if event.ToolCall == nil { + return fmt.Errorf("stream tool call payload is required") + } + case StreamEventAttachment: + if len(event.Attachments) == 0 { + return fmt.Errorf("stream attachments are required") + } + if _, err := normalizeAttachmentRefs(event.Attachments, channelType); err != nil { + return err + } + case StreamEventAgentStart, StreamEventAgentEnd, StreamEventProcessingStarted, StreamEventProcessingCompleted: + return nil + case StreamEventProcessingFailed: + if strings.TrimSpace(event.Error) == "" { + return fmt.Errorf("processing failure error is required") + } case StreamEventFinal: if event.Final == nil { return fmt.Errorf("stream final payload is required") diff --git a/internal/channel/outbound_test.go b/internal/channel/outbound_test.go new file mode 100644 index 00000000..e509bd90 --- /dev/null +++ b/internal/channel/outbound_test.go @@ -0,0 +1,98 @@ +package channel + +import ( + "testing" +) + +type streamValidationAdapter struct { + channelType ChannelType +} + +func (a *streamValidationAdapter) Type() ChannelType { + return a.channelType +} + +func (a *streamValidationAdapter) Descriptor() Descriptor { + return Descriptor{ + Type: a.channelType, + DisplayName: "stream-validation", + Capabilities: ChannelCapabilities{ + Text: true, + Attachments: true, + Streaming: true, + BlockStreaming: true, + }, + } +} + +func newStreamValidationRegistry(t *testing.T) *Registry { + t.Helper() + registry := NewRegistry() + if err := registry.Register(&streamValidationAdapter{channelType: ChannelType("test")}); err != nil { + t.Fatalf("register adapter failed: %v", err) + } + return registry +} + +func TestValidateStreamEventSupportedTypes(t *testing.T) { + t.Parallel() + + registry := newStreamValidationRegistry(t) + channelType := ChannelType("test") + tests := []struct { + name string + event StreamEvent + }{ + {name: "status", event: StreamEvent{Type: StreamEventStatus, Status: StreamStatusStarted}}, + {name: "delta", event: StreamEvent{Type: StreamEventDelta, Delta: "hello"}}, + {name: "phase start", event: StreamEvent{Type: StreamEventPhaseStart, Phase: StreamPhaseText}}, + {name: "phase end", event: StreamEvent{Type: StreamEventPhaseEnd, Phase: StreamPhaseText}}, + {name: "tool start", event: StreamEvent{Type: StreamEventToolCallStart, ToolCall: &StreamToolCall{Name: "search"}}}, + {name: "tool end", event: StreamEvent{Type: StreamEventToolCallEnd, ToolCall: &StreamToolCall{Name: "search"}}}, + {name: "attachment", event: StreamEvent{Type: StreamEventAttachment, Attachments: []Attachment{{Type: AttachmentImage, URL: "https://example.com/img.png"}}}}, + {name: "agent start", event: StreamEvent{Type: StreamEventAgentStart}}, + {name: "agent end", event: StreamEvent{Type: StreamEventAgentEnd}}, + {name: "processing started", event: StreamEvent{Type: StreamEventProcessingStarted}}, + {name: "processing completed", event: StreamEvent{Type: StreamEventProcessingCompleted}}, + {name: "processing failed", event: StreamEvent{Type: StreamEventProcessingFailed, Error: "failed"}}, + {name: "final", event: StreamEvent{Type: StreamEventFinal, Final: &StreamFinalizePayload{Message: Message{Text: "done"}}}}, + {name: "error", event: StreamEvent{Type: StreamEventError, Error: "boom"}}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + if err := validateStreamEvent(registry, channelType, tt.event); err != nil { + t.Fatalf("expected nil error, got %v", err) + } + }) + } +} + +func TestValidateStreamEventInvalidPayload(t *testing.T) { + t.Parallel() + + registry := newStreamValidationRegistry(t) + channelType := ChannelType("test") + tests := []struct { + name string + event StreamEvent + }{ + {name: "missing status", event: StreamEvent{Type: StreamEventStatus}}, + {name: "missing tool call payload", event: StreamEvent{Type: StreamEventToolCallStart}}, + {name: "empty attachment payload", event: StreamEvent{Type: StreamEventAttachment}}, + {name: "processing failed missing error", event: StreamEvent{Type: StreamEventProcessingFailed}}, + {name: "missing final payload", event: StreamEvent{Type: StreamEventFinal}}, + {name: "missing error payload", event: StreamEvent{Type: StreamEventError}}, + {name: "unsupported type", event: StreamEvent{Type: StreamEventType("unknown")}}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + if err := validateStreamEvent(registry, channelType, tt.event); err == nil { + t.Fatalf("expected error for %s", tt.name) + } + }) + } +} diff --git a/internal/channel/registry.go b/internal/channel/registry.go index 1bfa8015..439f6adf 100644 --- a/internal/channel/registry.go +++ b/internal/channel/registry.go @@ -246,6 +246,17 @@ func (r *Registry) GetProcessingStatusNotifier(channelType ChannelType) (Process return notifier, ok } +// GetAttachmentResolver returns the AttachmentResolver for the given channel +// type, or nil if unsupported. +func (r *Registry) GetAttachmentResolver(channelType ChannelType) (AttachmentResolver, bool) { + adapter, ok := r.Get(channelType) + if !ok { + return nil, false + } + resolver, ok := adapter.(AttachmentResolver) + return resolver, ok +} + // DiscoverSelf calls the SelfDiscoverer for the given channel type if supported. func (r *Registry) DiscoverSelf(ctx context.Context, channelType ChannelType, credentials map[string]any) (map[string]any, string, error) { adapter, ok := r.Get(channelType) diff --git a/internal/channel/registry_test.go b/internal/channel/registry_test.go index c27c3875..4a45ab5b 100644 --- a/internal/channel/registry_test.go +++ b/internal/channel/registry_test.go @@ -2,6 +2,8 @@ package channel_test import ( "context" + "io" + "strings" "testing" "github.com/memohai/memoh/internal/channel" @@ -61,3 +63,41 @@ func TestDirectoryAdapter_UnknownType(t *testing.T) { t.Fatalf("DirectoryAdapter(unknown) = (%v, %v), want (nil, false)", dir, ok) } } + +type attachmentResolverMockAdapter struct{} + +func (a *attachmentResolverMockAdapter) Type() channel.ChannelType { + return channel.ChannelType("attachment-test") +} + +func (a *attachmentResolverMockAdapter) Descriptor() channel.Descriptor { + return channel.Descriptor{Type: channel.ChannelType("attachment-test"), DisplayName: "AttachmentTest"} +} + +func (a *attachmentResolverMockAdapter) ResolveAttachment(ctx context.Context, cfg channel.ChannelConfig, attachment channel.Attachment) (channel.AttachmentPayload, error) { + return channel.AttachmentPayload{ + Reader: io.NopCloser(strings.NewReader("payload")), + Mime: "text/plain", + Name: "payload.txt", + Size: 7, + }, nil +} + +func TestGetAttachmentResolver_Supported(t *testing.T) { + t.Parallel() + reg := channel.NewRegistry() + reg.MustRegister(&attachmentResolverMockAdapter{}) + resolver, ok := reg.GetAttachmentResolver(channel.ChannelType("attachment-test")) + if !ok || resolver == nil { + t.Fatalf("GetAttachmentResolver should return resolver for supported adapter") + } +} + +func TestGetAttachmentResolver_Unsupported(t *testing.T) { + t.Parallel() + reg := newTestConfigRegistry() + resolver, ok := reg.GetAttachmentResolver(testChannelType) + if ok || resolver != nil { + t.Fatalf("GetAttachmentResolver(test) = (%v, %v), want (nil, false)", resolver, ok) + } +} diff --git a/internal/channel/service.go b/internal/channel/service.go index f3ba5c1c..f83ad993 100644 --- a/internal/channel/service.go +++ b/internal/channel/service.go @@ -15,22 +15,25 @@ import ( "github.com/memohai/memoh/internal/db/sqlc" ) -// Service provides CRUD operations for channel configurations, user bindings, and sessions. -type Service struct { +// ErrChannelConfigNotFound indicates the bot has no persisted config for the channel type. +var ErrChannelConfigNotFound = errors.New("channel config not found") + +// Store provides CRUD operations for channel configurations, user bindings, and sessions. +type Store struct { queries *sqlc.Queries registry *Registry } -// NewService creates a Service backed by the given database queries and adapter registry. -func NewService(queries *sqlc.Queries, registry *Registry) *Service { +// NewStore creates a Store backed by the given database queries and adapter registry. +func NewStore(queries *sqlc.Queries, registry *Registry) *Store { if registry == nil { registry = NewRegistry() } - return &Service{queries: queries, registry: registry} + return &Store{queries: queries, registry: registry} } // UpsertConfig creates or updates a bot's channel configuration. -func (s *Service) UpsertConfig(ctx context.Context, botID string, channelType ChannelType, req UpsertConfigRequest) (ChannelConfig, error) { +func (s *Store) UpsertConfig(ctx context.Context, botID string, channelType ChannelType, req UpsertConfigRequest) (ChannelConfig, error) { if s.queries == nil { return ChannelConfig{}, fmt.Errorf("channel queries not configured") } @@ -76,9 +79,9 @@ func (s *Service) UpsertConfig(ctx context.Context, botID string, channelType Ch if err != nil { return ChannelConfig{}, err } - status, err := normalizeChannelConfigStatus(req.Status) - if err != nil { - return ChannelConfig{}, err + disabled := false + if req.Disabled != nil { + disabled = *req.Disabled } verifiedAt := pgtype.Timestamptz{Valid: false} if req.VerifiedAt != nil { @@ -95,34 +98,61 @@ func (s *Service) UpsertConfig(ctx context.Context, botID string, channelType Ch SelfIdentity: selfPayload, Routing: routingPayload, Capabilities: []byte("{}"), - Status: status, + Disabled: disabled, VerifiedAt: verifiedAt, }) if err != nil { return ChannelConfig{}, err } - return normalizeChannelConfig(row) + return normalizeChannelConfigFromRow(row) } -func normalizeChannelConfigStatus(raw string) (string, error) { - status := strings.ToLower(strings.TrimSpace(raw)) - if status == "" { - return "pending", nil +// DeleteConfig removes a bot's channel configuration. +func (s *Store) DeleteConfig(ctx context.Context, botID string, channelType ChannelType) error { + if s.queries == nil { + return fmt.Errorf("channel queries not configured") } - switch status { - case "pending", "verified", "disabled": - return status, nil - case "active": - return "verified", nil - case "inactive": - return "disabled", nil - default: - return "", fmt.Errorf("invalid channel status: %s", raw) + if channelType == "" { + return fmt.Errorf("channel type is required") } + botUUID, err := db.ParseUUID(botID) + if err != nil { + return err + } + return s.queries.DeleteBotChannelConfig(ctx, sqlc.DeleteBotChannelConfigParams{ + BotID: botUUID, + ChannelType: channelType.String(), + }) +} + +// UpdateConfigDisabled updates only the disabled flag for a bot channel config and returns latest config. +func (s *Store) UpdateConfigDisabled(ctx context.Context, botID string, channelType ChannelType, disabled bool) (ChannelConfig, error) { + if s.queries == nil { + return ChannelConfig{}, fmt.Errorf("channel queries not configured") + } + if channelType == "" { + return ChannelConfig{}, fmt.Errorf("channel type is required") + } + botUUID, err := db.ParseUUID(botID) + if err != nil { + return ChannelConfig{}, err + } + row, err := s.queries.UpdateBotChannelConfigDisabled(ctx, sqlc.UpdateBotChannelConfigDisabledParams{ + BotID: botUUID, + ChannelType: channelType.String(), + Disabled: disabled, + }) + if err != nil { + if errors.Is(err, pgx.ErrNoRows) { + return ChannelConfig{}, fmt.Errorf("%w", ErrChannelConfigNotFound) + } + return ChannelConfig{}, err + } + return normalizeChannelConfigFromRow(row) } // UpsertChannelIdentityConfig creates or updates a channel identity's channel binding. -func (s *Service) UpsertChannelIdentityConfig(ctx context.Context, channelIdentityID string, channelType ChannelType, req UpsertChannelIdentityConfigRequest) (ChannelIdentityBinding, error) { +func (s *Store) UpsertChannelIdentityConfig(ctx context.Context, channelIdentityID string, channelType ChannelType, req UpsertChannelIdentityConfigRequest) (ChannelIdentityBinding, error) { if s.queries == nil { return ChannelIdentityBinding{}, fmt.Errorf("channel queries not configured") } @@ -154,7 +184,7 @@ func (s *Service) UpsertChannelIdentityConfig(ctx context.Context, channelIdenti // ResolveEffectiveConfig returns the active channel configuration for a bot. // For configless channel types, a synthetic config is returned. -func (s *Service) ResolveEffectiveConfig(ctx context.Context, botID string, channelType ChannelType) (ChannelConfig, error) { +func (s *Store) ResolveEffectiveConfig(ctx context.Context, botID string, channelType ChannelType) (ChannelConfig, error) { if s.queries == nil { return ChannelConfig{}, fmt.Errorf("channel queries not configured") } @@ -177,16 +207,16 @@ func (s *Service) ResolveEffectiveConfig(ctx context.Context, botID string, chan ChannelType: channelType.String(), }) if err == nil { - return normalizeChannelConfig(row) + return normalizeChannelConfigFromGetRow(row) } if !errors.Is(err, pgx.ErrNoRows) { return ChannelConfig{}, err } - return ChannelConfig{}, fmt.Errorf("channel config not found") + return ChannelConfig{}, fmt.Errorf("%w", ErrChannelConfigNotFound) } // ListConfigsByType returns all channel configurations of the given type. -func (s *Service) ListConfigsByType(ctx context.Context, channelType ChannelType) ([]ChannelConfig, error) { +func (s *Store) ListConfigsByType(ctx context.Context, channelType ChannelType) ([]ChannelConfig, error) { if s.queries == nil { return nil, fmt.Errorf("channel queries not configured") } @@ -199,7 +229,7 @@ func (s *Service) ListConfigsByType(ctx context.Context, channelType ChannelType } items := make([]ChannelConfig, 0, len(rows)) for _, row := range rows { - item, err := normalizeChannelConfig(row) + item, err := normalizeChannelConfigFromListRow(row) if err != nil { return nil, err } @@ -209,7 +239,7 @@ func (s *Service) ListConfigsByType(ctx context.Context, channelType ChannelType } // GetChannelIdentityConfig returns the channel identity's channel binding for the given channel type. -func (s *Service) GetChannelIdentityConfig(ctx context.Context, channelIdentityID string, channelType ChannelType) (ChannelIdentityBinding, error) { +func (s *Store) GetChannelIdentityConfig(ctx context.Context, channelIdentityID string, channelType ChannelType) (ChannelIdentityBinding, error) { if s.queries == nil { return ChannelIdentityBinding{}, fmt.Errorf("channel queries not configured") } @@ -245,7 +275,7 @@ func (s *Service) GetChannelIdentityConfig(ctx context.Context, channelIdentityI } // ListChannelIdentityConfigsByType returns all channel identity bindings for the given channel type. -func (s *Service) ListChannelIdentityConfigsByType(ctx context.Context, channelType ChannelType) ([]ChannelIdentityBinding, error) { +func (s *Store) ListChannelIdentityConfigsByType(ctx context.Context, channelType ChannelType) ([]ChannelIdentityBinding, error) { if s.queries == nil { return nil, fmt.Errorf("channel queries not configured") } @@ -265,7 +295,7 @@ func (s *Service) ListChannelIdentityConfigsByType(ctx context.Context, channelT } // ResolveChannelIdentityBinding finds the channel identity ID whose channel binding matches the given criteria. -func (s *Service) ResolveChannelIdentityBinding(ctx context.Context, channelType ChannelType, criteria BindingCriteria) (string, error) { +func (s *Store) ResolveChannelIdentityBinding(ctx context.Context, channelType ChannelType, criteria BindingCriteria) (string, error) { rows, err := s.ListChannelIdentityConfigsByType(ctx, channelType) if err != nil { return "", err @@ -281,39 +311,67 @@ func (s *Service) ResolveChannelIdentityBinding(ctx context.Context, channelType return "", fmt.Errorf("channel user binding not found") } -func normalizeChannelConfig(row sqlc.BotChannelConfig) (ChannelConfig, error) { - credentials, err := DecodeConfigMap(row.Credentials) +func normalizeChannelConfigFromRow(row sqlc.BotChannelConfig) (ChannelConfig, error) { + return normalizeChannelConfigFields( + row.ID, row.BotID, row.ChannelType, + row.Credentials, row.ExternalIdentity, row.SelfIdentity, row.Routing, + row.Disabled, row.VerifiedAt, row.CreatedAt, row.UpdatedAt, + ) +} + +func normalizeChannelConfigFromGetRow(row sqlc.BotChannelConfig) (ChannelConfig, error) { + return normalizeChannelConfigFields( + row.ID, row.BotID, row.ChannelType, + row.Credentials, row.ExternalIdentity, row.SelfIdentity, row.Routing, + row.Disabled, row.VerifiedAt, row.CreatedAt, row.UpdatedAt, + ) +} + +func normalizeChannelConfigFromListRow(row sqlc.BotChannelConfig) (ChannelConfig, error) { + return normalizeChannelConfigFields( + row.ID, row.BotID, row.ChannelType, + row.Credentials, row.ExternalIdentity, row.SelfIdentity, row.Routing, + row.Disabled, row.VerifiedAt, row.CreatedAt, row.UpdatedAt, + ) +} + +func normalizeChannelConfigFields( + id, botID pgtype.UUID, channelType string, + credentials []byte, externalIdentity pgtype.Text, selfIdentity, routing []byte, + disabled bool, verifiedAt, createdAt, updatedAt pgtype.Timestamptz, +) (ChannelConfig, error) { + credentialsMap, err := DecodeConfigMap(credentials) if err != nil { return ChannelConfig{}, err } - selfIdentity, err := DecodeConfigMap(row.SelfIdentity) + selfIdentityMap, err := DecodeConfigMap(selfIdentity) if err != nil { return ChannelConfig{}, err } - routing, err := DecodeConfigMap(row.Routing) + routingMap, err := DecodeConfigMap(routing) if err != nil { return ChannelConfig{}, err } - verifiedAt := time.Time{} - if row.VerifiedAt.Valid { - verifiedAt = row.VerifiedAt.Time + verifiedAtTime := time.Time{} + if verifiedAt.Valid { + verifiedAtTime = verifiedAt.Time } - externalIdentity := "" - if row.ExternalIdentity.Valid { - externalIdentity = strings.TrimSpace(row.ExternalIdentity.String) + externalIdentityStr := "" + if externalIdentity.Valid { + externalIdentityStr = strings.TrimSpace(externalIdentity.String) } return ChannelConfig{ - ID: row.ID.String(), - BotID: row.BotID.String(), - ChannelType: ChannelType(row.ChannelType), - Credentials: credentials, - ExternalIdentity: externalIdentity, - SelfIdentity: selfIdentity, - Routing: routing, - Status: strings.TrimSpace(row.Status), - VerifiedAt: verifiedAt, - CreatedAt: db.TimeFromPg(row.CreatedAt), - UpdatedAt: db.TimeFromPg(row.UpdatedAt), + ID: id.String(), + BotID: botID.String(), + ChannelType: ChannelType(channelType), + Credentials: credentialsMap, + ExternalIdentity: externalIdentityStr, + SelfIdentity: selfIdentityMap, + Routing: routingMap, + Disabled: disabled, + VerifiedAt: verifiedAtTime, + CreatedAt: db.TimeFromPg(createdAt), + UpdatedAt: db.TimeFromPg(updatedAt), }, nil } diff --git a/internal/channel/types.go b/internal/channel/types.go index d54e88a6..32f9ba81 100644 --- a/internal/channel/types.go +++ b/internal/channel/types.go @@ -90,10 +90,20 @@ type OutboundMessage struct { type StreamEventType string const ( - StreamEventStatus StreamEventType = "status" - StreamEventDelta StreamEventType = "delta" - StreamEventFinal StreamEventType = "final" - StreamEventError StreamEventType = "error" + StreamEventStatus StreamEventType = "status" + StreamEventDelta StreamEventType = "delta" + StreamEventFinal StreamEventType = "final" + StreamEventError StreamEventType = "error" + StreamEventToolCallStart StreamEventType = "tool_call_start" + StreamEventToolCallEnd StreamEventType = "tool_call_end" + StreamEventPhaseStart StreamEventType = "phase_start" + StreamEventPhaseEnd StreamEventType = "phase_end" + StreamEventAttachment StreamEventType = "attachment" + StreamEventAgentStart StreamEventType = "agent_start" + StreamEventAgentEnd StreamEventType = "agent_end" + StreamEventProcessingStarted StreamEventType = "processing_started" + StreamEventProcessingCompleted StreamEventType = "processing_completed" + StreamEventProcessingFailed StreamEventType = "processing_failed" ) // StreamStatus indicates the lifecycle state of a streaming reply. @@ -110,14 +120,33 @@ type StreamFinalizePayload struct { Message Message `json:"message"` } +// StreamToolCall carries tool invocation data for tool_call_start / tool_call_end events. +type StreamToolCall struct { + Name string `json:"name"` + CallID string `json:"call_id,omitempty"` + Input any `json:"input,omitempty"` + Result any `json:"result,omitempty"` +} + +// StreamPhase labels a processing stage within a stream (e.g., reasoning, text). +type StreamPhase string + +const ( + StreamPhaseReasoning StreamPhase = "reasoning" + StreamPhaseText StreamPhase = "text" +) + // StreamEvent represents a unified stream event routed through the channel layer. type StreamEvent struct { - Type StreamEventType `json:"type"` - Status StreamStatus `json:"status,omitempty"` - Delta string `json:"delta,omitempty"` - Final *StreamFinalizePayload `json:"final,omitempty"` - Error string `json:"error,omitempty"` - Metadata map[string]any `json:"metadata,omitempty"` + Type StreamEventType `json:"type"` + Status StreamStatus `json:"status,omitempty"` + Delta string `json:"delta,omitempty"` + Final *StreamFinalizePayload `json:"final,omitempty"` + Error string `json:"error,omitempty"` + ToolCall *StreamToolCall `json:"tool_call,omitempty"` + Phase StreamPhase `json:"phase,omitempty"` + Attachments []Attachment `json:"attachments,omitempty"` + Metadata map[string]any `json:"metadata,omitempty"` } // StreamOptions configures how an outbound stream is initialized. @@ -187,6 +216,8 @@ type Attachment struct { URL string `json:"url,omitempty"` PlatformKey string `json:"platform_key,omitempty"` SourcePlatform string `json:"source_platform,omitempty"` + AssetID string `json:"asset_id,omitempty"` + Base64 string `json:"base64,omitempty"` // data URL for agent delivery Name string `json:"name,omitempty"` Size int64 `json:"size,omitempty"` Mime string `json:"mime,omitempty"` @@ -305,6 +336,7 @@ func BindingCriteriaFromIdentity(identity Identity) BindingCriteria { } // ChannelConfig holds the configuration for a bot's channel integration. +// Disabled: true means the channel is stopped (not connected); false means enabled. type ChannelConfig struct { ID string `json:"id"` BotID string `json:"bot_id"` @@ -313,7 +345,7 @@ type ChannelConfig struct { ExternalIdentity string `json:"external_identity"` SelfIdentity map[string]any `json:"self_identity"` Routing map[string]any `json:"routing"` - Status string `json:"status"` + Disabled bool `json:"disabled"` VerifiedAt time.Time `json:"verified_at"` CreatedAt time.Time `json:"created_at"` UpdatedAt time.Time `json:"updated_at"` @@ -330,12 +362,13 @@ type ChannelIdentityBinding struct { } // UpsertConfigRequest is the input for creating or updating a channel configuration. +// Disabled: true to stop the channel, false to enable it. Omitted is treated as false (enabled). type UpsertConfigRequest struct { Credentials map[string]any `json:"credentials"` ExternalIdentity string `json:"external_identity,omitempty"` SelfIdentity map[string]any `json:"self_identity,omitempty"` Routing map[string]any `json:"routing,omitempty"` - Status string `json:"status,omitempty"` + Disabled *bool `json:"disabled,omitempty"` VerifiedAt *time.Time `json:"verified_at,omitempty"` } @@ -344,6 +377,11 @@ type UpsertChannelIdentityConfigRequest struct { Config map[string]any `json:"config"` } +// UpdateChannelStatusRequest is the input for enabling/disabling a bot channel config. +type UpdateChannelStatusRequest struct { + Disabled bool `json:"disabled"` +} + // SendRequest is the input for sending an outbound message through a channel. type SendRequest struct { Target string `json:"target,omitempty"` diff --git a/internal/conversation/flow/capability_policy.go b/internal/conversation/flow/capability_policy.go new file mode 100644 index 00000000..b70bebda --- /dev/null +++ b/internal/conversation/flow/capability_policy.go @@ -0,0 +1,70 @@ +package flow + +import "github.com/memohai/memoh/internal/models" + +// attachmentModality maps an attachment type string to the input modality it requires. +var attachmentModality = map[string]string{ + "image": models.ModelInputImage, + "audio": models.ModelInputAudio, + "video": models.ModelInputVideo, + "file": models.ModelInputFile, +} + +// gatewayAttachment is the structured attachment payload sent to the agent gateway. +// Only fields consumable by the agent/LLM are serialized; internal references +// (asset_id, platform_key, url) are stripped before dispatch. +type gatewayAttachment struct { + Type string `json:"type"` + Base64 string `json:"base64,omitempty"` + Path string `json:"path,omitempty"` + Mime string `json:"mime,omitempty"` + Name string `json:"name,omitempty"` + Metadata map[string]any `json:"metadata,omitempty"` +} + +// capabilityRouteResult holds the outcome of splitting attachments by model capability. +type capabilityRouteResult struct { + // Native are attachments the model can consume directly as multimodal input. + Native []gatewayAttachment + // Fallback are attachments whose modality is unsupported; they are converted + // to container file path references for the LLM to access via tools. + Fallback []gatewayAttachment +} + +// routeAttachmentsByCapability splits attachments based on the model's supported +// input modalities. Supported modalities produce native multimodal input; unsupported +// modalities produce container path references for tool-based access. +func routeAttachmentsByCapability(modalities []string, attachments []gatewayAttachment) capabilityRouteResult { + supported := make(map[string]struct{}, len(modalities)) + for _, m := range modalities { + supported[m] = struct{}{} + } + + result := capabilityRouteResult{ + Native: make([]gatewayAttachment, 0, len(attachments)), + Fallback: make([]gatewayAttachment, 0), + } + for _, att := range attachments { + requiredModality, known := attachmentModality[att.Type] + if !known { + // Unknown attachment types always go through fallback path. + result.Fallback = append(result.Fallback, att) + continue + } + if _, ok := supported[requiredModality]; ok { + result.Native = append(result.Native, att) + } else { + result.Fallback = append(result.Fallback, att) + } + } + return result +} + +// attachmentsToAny converts typed gateway attachments to []any for JSON serialization. +func attachmentsToAny(atts []gatewayAttachment) []any { + out := make([]any, 0, len(atts)) + for _, a := range atts { + out = append(out, a) + } + return out +} diff --git a/internal/conversation/flow/capability_policy_test.go b/internal/conversation/flow/capability_policy_test.go new file mode 100644 index 00000000..d0bfa37a --- /dev/null +++ b/internal/conversation/flow/capability_policy_test.go @@ -0,0 +1,67 @@ +package flow + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestRouteAttachmentsByCapability_AllSupported(t *testing.T) { + modalities := []string{"text", "image", "audio"} + attachments := []gatewayAttachment{ + {Type: "image", Base64: "abc"}, + {Type: "audio", Path: "/data/voice.wav"}, + } + result := routeAttachmentsByCapability(modalities, attachments) + assert.Len(t, result.Native, 2) + assert.Len(t, result.Fallback, 0) +} + +func TestRouteAttachmentsByCapability_TextOnly(t *testing.T) { + modalities := []string{"text"} + attachments := []gatewayAttachment{ + {Type: "image", Base64: "abc"}, + {Type: "video", Path: "/data/video.mp4"}, + } + result := routeAttachmentsByCapability(modalities, attachments) + assert.Len(t, result.Native, 0) + assert.Len(t, result.Fallback, 2) +} + +func TestRouteAttachmentsByCapability_Mixed(t *testing.T) { + modalities := []string{"text", "image"} + attachments := []gatewayAttachment{ + {Type: "image", Base64: "abc"}, + {Type: "video", Path: "/data/video.mp4"}, + {Type: "audio", Path: "/data/audio.mp3"}, + } + result := routeAttachmentsByCapability(modalities, attachments) + assert.Len(t, result.Native, 1) + assert.Equal(t, "image", result.Native[0].Type) + assert.Len(t, result.Fallback, 2) +} + +func TestRouteAttachmentsByCapability_UnknownType(t *testing.T) { + modalities := []string{"text", "image"} + attachments := []gatewayAttachment{ + {Type: "hologram", Path: "/data/holo.dat"}, + } + result := routeAttachmentsByCapability(modalities, attachments) + assert.Len(t, result.Native, 0) + assert.Len(t, result.Fallback, 1) +} + +func TestRouteAttachmentsByCapability_Empty(t *testing.T) { + result := routeAttachmentsByCapability([]string{"text"}, nil) + assert.Len(t, result.Native, 0) + assert.Len(t, result.Fallback, 0) +} + +func TestAttachmentsToAny(t *testing.T) { + atts := []gatewayAttachment{ + {Type: "image", Base64: "abc"}, + {Type: "file", Path: "/data/doc.pdf"}, + } + result := attachmentsToAny(atts) + assert.Len(t, result, 2) +} diff --git a/internal/conversation/flow/resolver.go b/internal/conversation/flow/resolver.go index 73f77e25..a39606dc 100644 --- a/internal/conversation/flow/resolver.go +++ b/internal/conversation/flow/resolver.go @@ -177,8 +177,8 @@ type resolvedContext struct { } func (r *Resolver) resolve(ctx context.Context, req conversation.ChatRequest) (resolvedContext, error) { - if strings.TrimSpace(req.Query) == "" { - return resolvedContext{}, fmt.Errorf("query is required") + if strings.TrimSpace(req.Query) == "" && len(req.Attachments) == 0 { + return resolvedContext{}, fmt.Errorf("query or attachments is required") } if strings.TrimSpace(req.BotID) == "" { return resolvedContext{}, fmt.Errorf("bot id is required") @@ -252,7 +252,7 @@ func (r *Resolver) resolve(ctx context.Context, req conversation.ChatRequest) (r Model: gatewayModelConfig{ ModelID: chatModel.ModelID, ClientType: clientType, - Input: chatModel.Input, + Input: chatModel.InputModalities, APIKey: provider.ApiKey, BaseURL: provider.BaseUrl, }, @@ -273,7 +273,7 @@ func (r *Resolver) resolve(ctx context.Context, req conversation.ChatRequest) (r ConversationType: strings.TrimSpace(req.ConversationType), SessionToken: req.ChatToken, }, - Attachments: []any{}, + Attachments: r.routeAndMergeAttachments(chatModel, req), } return resolvedContext{payload: payload, model: chatModel, provider: provider}, nil @@ -583,6 +583,50 @@ func (r *Resolver) tryStoreStream(ctx context.Context, req conversation.ChatRequ return false, nil } +// routeAndMergeAttachments applies CapabilityFallbackPolicy to split +// request attachments by model input modalities, then merges the results +// into a single []any for the gateway request. +func (r *Resolver) routeAndMergeAttachments(model models.GetResponse, req conversation.ChatRequest) []any { + if len(req.Attachments) == 0 { + return []any{} + } + typed := make([]gatewayAttachment, 0, len(req.Attachments)) + for _, raw := range req.Attachments { + typed = append(typed, gatewayAttachment{ + Type: raw.Type, + Base64: raw.Base64, + Path: raw.Path, + Mime: raw.Mime, + Name: raw.Name, + Metadata: raw.Metadata, + }) + } + routed := routeAttachmentsByCapability(model.InputModalities, typed) + // Convert unsupported attachments to file-path references. + for i := range routed.Fallback { + if routed.Fallback[i].Path == "" && routed.Fallback[i].Base64 != "" { + // Cannot downgrade base64-only to path; keep as native so the agent can + // attempt best-effort processing or skip. + routed.Native = append(routed.Native, routed.Fallback[i]) + routed.Fallback[i] = gatewayAttachment{} + continue + } + routed.Fallback[i].Type = "file" + } + merged := make([]any, 0, len(routed.Native)+len(routed.Fallback)) + merged = append(merged, attachmentsToAny(routed.Native)...) + for _, fb := range routed.Fallback { + if fb.Type == "" { + continue + } + merged = append(merged, fb) + } + if len(merged) == 0 { + return []any{} + } + return merged +} + // --- container resolution --- func (r *Resolver) resolveContainerID(ctx context.Context, botID, explicit string) string { @@ -720,7 +764,7 @@ func (r *Resolver) persistUserMessage(ctx context.Context, req conversation.Chat return fmt.Errorf("bot id is required for persistence") } text := strings.TrimSpace(req.Query) - if text == "" { + if text == "" && len(req.Attachments) == 0 { return nil } @@ -743,6 +787,7 @@ func (r *Resolver) persistUserMessage(ctx context.Context, req conversation.Chat Role: "user", Content: content, Metadata: buildRouteMetadata(req), + Assets: chatAttachmentsToAssetRefs(req.Attachments), }) return err } @@ -758,7 +803,9 @@ func (r *Resolver) storeRound(ctx context.Context, req conversation.ChatRequest, break } } - if !req.UserMessagePersisted && !hasUserQuery && strings.TrimSpace(req.Query) != "" { + needUserInRound := !req.UserMessagePersisted && !hasUserQuery && + (strings.TrimSpace(req.Query) != "" || len(req.Attachments) > 0) + if needUserInRound { fullRound = append(fullRound, conversation.ModelMessage{ Role: "user", Content: conversation.NewTextContent(req.Query), @@ -801,10 +848,14 @@ func (r *Resolver) storeMessages(ctx context.Context, req conversation.ChatReque messageSenderUserID := "" externalMessageID := "" sourceReplyToMessageID := "" + assets := []messagepkg.AssetRef(nil) if msg.Role == "user" { messageSenderChannelIdentityID = senderChannelIdentityID messageSenderUserID = senderUserID externalMessageID = req.ExternalMessageID + if strings.TrimSpace(msg.TextContent()) == strings.TrimSpace(req.Query) { + assets = chatAttachmentsToAssetRefs(req.Attachments) + } } else if strings.TrimSpace(req.ExternalMessageID) != "" { // Assistant/tool/system outputs are linked to the inbound source message for cross-channel reply threading. sourceReplyToMessageID = req.ExternalMessageID @@ -820,12 +871,34 @@ func (r *Resolver) storeMessages(ctx context.Context, req conversation.ChatReque Role: msg.Role, Content: content, Metadata: meta, + Assets: assets, }); err != nil { r.logger.Warn("persist message failed", slog.Any("error", err)) } } } +// chatAttachmentsToAssetRefs converts ChatAttachment slice to message AssetRef slice. +// Only attachments that carry an asset_id are included; others have not been ingested yet. +func chatAttachmentsToAssetRefs(attachments []conversation.ChatAttachment) []messagepkg.AssetRef { + if len(attachments) == 0 { + return nil + } + refs := make([]messagepkg.AssetRef, 0, len(attachments)) + for i, att := range attachments { + id := strings.TrimSpace(att.AssetID) + if id == "" { + continue + } + refs = append(refs, messagepkg.AssetRef{ + AssetID: id, + Role: "attachment", + Ordinal: i, + }) + } + return refs +} + func buildRouteMetadata(req conversation.ChatRequest) map[string]any { if strings.TrimSpace(req.RouteID) == "" && strings.TrimSpace(req.CurrentChannel) == "" { return nil diff --git a/internal/conversation/types.go b/internal/conversation/types.go index c6431a28..6d7c6c09 100644 --- a/internal/conversation/types.go +++ b/internal/conversation/types.go @@ -191,6 +191,20 @@ type ToolCallFunction struct { Arguments string `json:"arguments"` } +// ChatAttachment is a media attachment carried in a chat request. +type ChatAttachment struct { + Type string `json:"type"` + Base64 string `json:"base64,omitempty"` + Path string `json:"path,omitempty"` + URL string `json:"url,omitempty"` + PlatformKey string `json:"platform_key,omitempty"` + AssetID string `json:"asset_id,omitempty"` + Name string `json:"name,omitempty"` + Mime string `json:"mime,omitempty"` + Size int64 `json:"size,omitempty"` + Metadata map[string]any `json:"metadata,omitempty"` +} + // ChatRequest is the input for Chat and StreamChat. type ChatRequest struct { BotID string `json:"-"` @@ -206,15 +220,16 @@ type ChatRequest struct { ConversationType string `json:"-"` UserMessagePersisted bool `json:"-"` - Query string `json:"query"` - Model string `json:"model,omitempty"` - Provider string `json:"provider,omitempty"` - MaxContextLoadTime int `json:"max_context_load_time,omitempty"` - Channels []string `json:"channels,omitempty"` - CurrentChannel string `json:"current_channel,omitempty"` - Messages []ModelMessage `json:"messages,omitempty"` - Skills []string `json:"skills,omitempty"` - AllowedActions []string `json:"allowed_actions,omitempty"` + Query string `json:"query"` + Model string `json:"model,omitempty"` + Provider string `json:"provider,omitempty"` + MaxContextLoadTime int `json:"max_context_load_time,omitempty"` + Channels []string `json:"channels,omitempty"` + CurrentChannel string `json:"current_channel,omitempty"` + Messages []ModelMessage `json:"messages,omitempty"` + Skills []string `json:"skills,omitempty"` + AllowedActions []string `json:"allowed_actions,omitempty"` + Attachments []ChatAttachment `json:"attachments,omitempty"` } // ChatResponse is the output of a non-streaming chat call. diff --git a/internal/db/sqlc/channels.sql.go b/internal/db/sqlc/channels.sql.go index 52c62a40..7728e640 100644 --- a/internal/db/sqlc/channels.sql.go +++ b/internal/db/sqlc/channels.sql.go @@ -11,8 +11,23 @@ import ( "github.com/jackc/pgx/v5/pgtype" ) +const deleteBotChannelConfig = `-- name: DeleteBotChannelConfig :exec +DELETE FROM bot_channel_configs +WHERE bot_id = $1 AND channel_type = $2 +` + +type DeleteBotChannelConfigParams struct { + BotID pgtype.UUID `json:"bot_id"` + ChannelType string `json:"channel_type"` +} + +func (q *Queries) DeleteBotChannelConfig(ctx context.Context, arg DeleteBotChannelConfigParams) error { + _, err := q.db.Exec(ctx, deleteBotChannelConfig, arg.BotID, arg.ChannelType) + return err +} + const getBotChannelConfig = `-- name: GetBotChannelConfig :one -SELECT id, bot_id, channel_type, credentials, external_identity, self_identity, routing, capabilities, status, verified_at, created_at, updated_at +SELECT id, bot_id, channel_type, credentials, external_identity, self_identity, routing, capabilities, disabled, verified_at, created_at, updated_at FROM bot_channel_configs WHERE bot_id = $1 AND channel_type = $2 LIMIT 1 @@ -35,7 +50,7 @@ func (q *Queries) GetBotChannelConfig(ctx context.Context, arg GetBotChannelConf &i.SelfIdentity, &i.Routing, &i.Capabilities, - &i.Status, + &i.Disabled, &i.VerifiedAt, &i.CreatedAt, &i.UpdatedAt, @@ -44,7 +59,7 @@ func (q *Queries) GetBotChannelConfig(ctx context.Context, arg GetBotChannelConf } const getBotChannelConfigByExternalIdentity = `-- name: GetBotChannelConfigByExternalIdentity :one -SELECT id, bot_id, channel_type, credentials, external_identity, self_identity, routing, capabilities, status, verified_at, created_at, updated_at +SELECT id, bot_id, channel_type, credentials, external_identity, self_identity, routing, capabilities, disabled, verified_at, created_at, updated_at FROM bot_channel_configs WHERE channel_type = $1 AND external_identity = $2 LIMIT 1 @@ -67,7 +82,7 @@ func (q *Queries) GetBotChannelConfigByExternalIdentity(ctx context.Context, arg &i.SelfIdentity, &i.Routing, &i.Capabilities, - &i.Status, + &i.Disabled, &i.VerifiedAt, &i.CreatedAt, &i.UpdatedAt, @@ -102,7 +117,7 @@ func (q *Queries) GetUserChannelBinding(ctx context.Context, arg GetUserChannelB } const listBotChannelConfigsByType = `-- name: ListBotChannelConfigsByType :many -SELECT id, bot_id, channel_type, credentials, external_identity, self_identity, routing, capabilities, status, verified_at, created_at, updated_at +SELECT id, bot_id, channel_type, credentials, external_identity, self_identity, routing, capabilities, disabled, verified_at, created_at, updated_at FROM bot_channel_configs WHERE channel_type = $1 ORDER BY created_at DESC @@ -126,7 +141,7 @@ func (q *Queries) ListBotChannelConfigsByType(ctx context.Context, channelType s &i.SelfIdentity, &i.Routing, &i.Capabilities, - &i.Status, + &i.Disabled, &i.VerifiedAt, &i.CreatedAt, &i.UpdatedAt, @@ -175,9 +190,44 @@ func (q *Queries) ListUserChannelBindingsByPlatform(ctx context.Context, channel return items, nil } +const updateBotChannelConfigDisabled = `-- name: UpdateBotChannelConfigDisabled :one +UPDATE bot_channel_configs +SET + disabled = $3, + updated_at = now() +WHERE bot_id = $1 AND channel_type = $2 +RETURNING id, bot_id, channel_type, credentials, external_identity, self_identity, routing, capabilities, disabled, verified_at, created_at, updated_at +` + +type UpdateBotChannelConfigDisabledParams struct { + BotID pgtype.UUID `json:"bot_id"` + ChannelType string `json:"channel_type"` + Disabled bool `json:"disabled"` +} + +func (q *Queries) UpdateBotChannelConfigDisabled(ctx context.Context, arg UpdateBotChannelConfigDisabledParams) (BotChannelConfig, error) { + row := q.db.QueryRow(ctx, updateBotChannelConfigDisabled, arg.BotID, arg.ChannelType, arg.Disabled) + var i BotChannelConfig + err := row.Scan( + &i.ID, + &i.BotID, + &i.ChannelType, + &i.Credentials, + &i.ExternalIdentity, + &i.SelfIdentity, + &i.Routing, + &i.Capabilities, + &i.Disabled, + &i.VerifiedAt, + &i.CreatedAt, + &i.UpdatedAt, + ) + return i, err +} + const upsertBotChannelConfig = `-- name: UpsertBotChannelConfig :one INSERT INTO bot_channel_configs ( - bot_id, channel_type, credentials, external_identity, self_identity, routing, capabilities, status, verified_at + bot_id, channel_type, credentials, external_identity, self_identity, routing, capabilities, disabled, verified_at ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) ON CONFLICT (bot_id, channel_type) @@ -187,10 +237,10 @@ DO UPDATE SET self_identity = EXCLUDED.self_identity, routing = EXCLUDED.routing, capabilities = EXCLUDED.capabilities, - status = EXCLUDED.status, + disabled = EXCLUDED.disabled, verified_at = EXCLUDED.verified_at, updated_at = now() -RETURNING id, bot_id, channel_type, credentials, external_identity, self_identity, routing, capabilities, status, verified_at, created_at, updated_at +RETURNING id, bot_id, channel_type, credentials, external_identity, self_identity, routing, capabilities, disabled, verified_at, created_at, updated_at ` type UpsertBotChannelConfigParams struct { @@ -201,7 +251,7 @@ type UpsertBotChannelConfigParams struct { SelfIdentity []byte `json:"self_identity"` Routing []byte `json:"routing"` Capabilities []byte `json:"capabilities"` - Status string `json:"status"` + Disabled bool `json:"disabled"` VerifiedAt pgtype.Timestamptz `json:"verified_at"` } @@ -214,7 +264,7 @@ func (q *Queries) UpsertBotChannelConfig(ctx context.Context, arg UpsertBotChann arg.SelfIdentity, arg.Routing, arg.Capabilities, - arg.Status, + arg.Disabled, arg.VerifiedAt, ) var i BotChannelConfig @@ -227,7 +277,7 @@ func (q *Queries) UpsertBotChannelConfig(ctx context.Context, arg UpsertBotChann &i.SelfIdentity, &i.Routing, &i.Capabilities, - &i.Status, + &i.Disabled, &i.VerifiedAt, &i.CreatedAt, &i.UpdatedAt, diff --git a/internal/db/sqlc/media.sql.go b/internal/db/sqlc/media.sql.go new file mode 100644 index 00000000..a841240f --- /dev/null +++ b/internal/db/sqlc/media.sql.go @@ -0,0 +1,527 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.30.0 +// source: media.sql + +package sqlc + +import ( + "context" + + "github.com/jackc/pgx/v5/pgtype" +) + +const createMediaAsset = `-- name: CreateMediaAsset :one +INSERT INTO media_assets ( + bot_id, storage_provider_id, content_hash, media_type, mime, + size_bytes, storage_key, original_name, width, height, duration_ms, metadata +) +VALUES ( + $1, + $2::uuid, + $3, + $4, + $5, + $6, + $7, + $8::text, + $9::integer, + $10::integer, + $11::bigint, + $12 +) +ON CONFLICT (bot_id, content_hash) DO UPDATE SET + bot_id = media_assets.bot_id +RETURNING id, bot_id, storage_provider_id, content_hash, media_type, mime, size_bytes, storage_key, original_name, width, height, duration_ms, metadata, created_at +` + +type CreateMediaAssetParams struct { + BotID pgtype.UUID `json:"bot_id"` + StorageProviderID pgtype.UUID `json:"storage_provider_id"` + ContentHash string `json:"content_hash"` + MediaType string `json:"media_type"` + Mime string `json:"mime"` + SizeBytes int64 `json:"size_bytes"` + StorageKey string `json:"storage_key"` + OriginalName pgtype.Text `json:"original_name"` + Width pgtype.Int4 `json:"width"` + Height pgtype.Int4 `json:"height"` + DurationMs pgtype.Int8 `json:"duration_ms"` + Metadata []byte `json:"metadata"` +} + +func (q *Queries) CreateMediaAsset(ctx context.Context, arg CreateMediaAssetParams) (MediaAsset, error) { + row := q.db.QueryRow(ctx, createMediaAsset, + arg.BotID, + arg.StorageProviderID, + arg.ContentHash, + arg.MediaType, + arg.Mime, + arg.SizeBytes, + arg.StorageKey, + arg.OriginalName, + arg.Width, + arg.Height, + arg.DurationMs, + arg.Metadata, + ) + var i MediaAsset + err := row.Scan( + &i.ID, + &i.BotID, + &i.StorageProviderID, + &i.ContentHash, + &i.MediaType, + &i.Mime, + &i.SizeBytes, + &i.StorageKey, + &i.OriginalName, + &i.Width, + &i.Height, + &i.DurationMs, + &i.Metadata, + &i.CreatedAt, + ) + return i, err +} + +const createMessageAsset = `-- name: CreateMessageAsset :one +INSERT INTO bot_history_message_assets (message_id, asset_id, role, ordinal) +VALUES ($1, $2, $3, $4) +ON CONFLICT (message_id, asset_id) DO UPDATE SET + role = EXCLUDED.role, + ordinal = EXCLUDED.ordinal +RETURNING id, message_id, asset_id, role, ordinal, created_at +` + +type CreateMessageAssetParams struct { + MessageID pgtype.UUID `json:"message_id"` + AssetID pgtype.UUID `json:"asset_id"` + Role string `json:"role"` + Ordinal int32 `json:"ordinal"` +} + +func (q *Queries) CreateMessageAsset(ctx context.Context, arg CreateMessageAssetParams) (BotHistoryMessageAsset, error) { + row := q.db.QueryRow(ctx, createMessageAsset, + arg.MessageID, + arg.AssetID, + arg.Role, + arg.Ordinal, + ) + var i BotHistoryMessageAsset + err := row.Scan( + &i.ID, + &i.MessageID, + &i.AssetID, + &i.Role, + &i.Ordinal, + &i.CreatedAt, + ) + return i, err +} + +const createStorageProvider = `-- name: CreateStorageProvider :one +INSERT INTO storage_providers (name, provider, config) +VALUES ($1, $2, $3) +RETURNING id, name, provider, config, created_at, updated_at +` + +type CreateStorageProviderParams struct { + Name string `json:"name"` + Provider string `json:"provider"` + Config []byte `json:"config"` +} + +func (q *Queries) CreateStorageProvider(ctx context.Context, arg CreateStorageProviderParams) (StorageProvider, error) { + row := q.db.QueryRow(ctx, createStorageProvider, arg.Name, arg.Provider, arg.Config) + var i StorageProvider + err := row.Scan( + &i.ID, + &i.Name, + &i.Provider, + &i.Config, + &i.CreatedAt, + &i.UpdatedAt, + ) + return i, err +} + +const deleteMediaAsset = `-- name: DeleteMediaAsset :exec +DELETE FROM media_assets WHERE id = $1 +` + +func (q *Queries) DeleteMediaAsset(ctx context.Context, id pgtype.UUID) error { + _, err := q.db.Exec(ctx, deleteMediaAsset, id) + return err +} + +const deleteMessageAssets = `-- name: DeleteMessageAssets :exec +DELETE FROM bot_history_message_assets WHERE message_id = $1 +` + +func (q *Queries) DeleteMessageAssets(ctx context.Context, messageID pgtype.UUID) error { + _, err := q.db.Exec(ctx, deleteMessageAssets, messageID) + return err +} + +const getBotStorageBinding = `-- name: GetBotStorageBinding :one +SELECT id, bot_id, storage_provider_id, base_path, created_at, updated_at FROM bot_storage_bindings WHERE bot_id = $1 +` + +func (q *Queries) GetBotStorageBinding(ctx context.Context, botID pgtype.UUID) (BotStorageBinding, error) { + row := q.db.QueryRow(ctx, getBotStorageBinding, botID) + var i BotStorageBinding + err := row.Scan( + &i.ID, + &i.BotID, + &i.StorageProviderID, + &i.BasePath, + &i.CreatedAt, + &i.UpdatedAt, + ) + return i, err +} + +const getMediaAssetByHash = `-- name: GetMediaAssetByHash :one +SELECT id, bot_id, storage_provider_id, content_hash, media_type, mime, size_bytes, storage_key, original_name, width, height, duration_ms, metadata, created_at FROM media_assets +WHERE bot_id = $1 AND content_hash = $2 +` + +type GetMediaAssetByHashParams struct { + BotID pgtype.UUID `json:"bot_id"` + ContentHash string `json:"content_hash"` +} + +func (q *Queries) GetMediaAssetByHash(ctx context.Context, arg GetMediaAssetByHashParams) (MediaAsset, error) { + row := q.db.QueryRow(ctx, getMediaAssetByHash, arg.BotID, arg.ContentHash) + var i MediaAsset + err := row.Scan( + &i.ID, + &i.BotID, + &i.StorageProviderID, + &i.ContentHash, + &i.MediaType, + &i.Mime, + &i.SizeBytes, + &i.StorageKey, + &i.OriginalName, + &i.Width, + &i.Height, + &i.DurationMs, + &i.Metadata, + &i.CreatedAt, + ) + return i, err +} + +const getMediaAssetByID = `-- name: GetMediaAssetByID :one +SELECT id, bot_id, storage_provider_id, content_hash, media_type, mime, size_bytes, storage_key, original_name, width, height, duration_ms, metadata, created_at FROM media_assets WHERE id = $1 +` + +func (q *Queries) GetMediaAssetByID(ctx context.Context, id pgtype.UUID) (MediaAsset, error) { + row := q.db.QueryRow(ctx, getMediaAssetByID, id) + var i MediaAsset + err := row.Scan( + &i.ID, + &i.BotID, + &i.StorageProviderID, + &i.ContentHash, + &i.MediaType, + &i.Mime, + &i.SizeBytes, + &i.StorageKey, + &i.OriginalName, + &i.Width, + &i.Height, + &i.DurationMs, + &i.Metadata, + &i.CreatedAt, + ) + return i, err +} + +const getStorageProviderByID = `-- name: GetStorageProviderByID :one +SELECT id, name, provider, config, created_at, updated_at FROM storage_providers WHERE id = $1 +` + +func (q *Queries) GetStorageProviderByID(ctx context.Context, id pgtype.UUID) (StorageProvider, error) { + row := q.db.QueryRow(ctx, getStorageProviderByID, id) + var i StorageProvider + err := row.Scan( + &i.ID, + &i.Name, + &i.Provider, + &i.Config, + &i.CreatedAt, + &i.UpdatedAt, + ) + return i, err +} + +const getStorageProviderByName = `-- name: GetStorageProviderByName :one +SELECT id, name, provider, config, created_at, updated_at FROM storage_providers WHERE name = $1 +` + +func (q *Queries) GetStorageProviderByName(ctx context.Context, name string) (StorageProvider, error) { + row := q.db.QueryRow(ctx, getStorageProviderByName, name) + var i StorageProvider + err := row.Scan( + &i.ID, + &i.Name, + &i.Provider, + &i.Config, + &i.CreatedAt, + &i.UpdatedAt, + ) + return i, err +} + +const listMediaAssetsByBotID = `-- name: ListMediaAssetsByBotID :many +SELECT id, bot_id, storage_provider_id, content_hash, media_type, mime, size_bytes, storage_key, original_name, width, height, duration_ms, metadata, created_at FROM media_assets +WHERE bot_id = $1 +ORDER BY created_at DESC +` + +func (q *Queries) ListMediaAssetsByBotID(ctx context.Context, botID pgtype.UUID) ([]MediaAsset, error) { + rows, err := q.db.Query(ctx, listMediaAssetsByBotID, botID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []MediaAsset + for rows.Next() { + var i MediaAsset + if err := rows.Scan( + &i.ID, + &i.BotID, + &i.StorageProviderID, + &i.ContentHash, + &i.MediaType, + &i.Mime, + &i.SizeBytes, + &i.StorageKey, + &i.OriginalName, + &i.Width, + &i.Height, + &i.DurationMs, + &i.Metadata, + &i.CreatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listMessageAssets = `-- name: ListMessageAssets :many +SELECT + ma.id AS rel_id, + ma.message_id, + ma.asset_id, + ma.role, + ma.ordinal, + a.media_type, + a.mime, + a.size_bytes, + a.storage_key, + a.original_name, + a.width, + a.height, + a.duration_ms, + a.metadata AS asset_metadata +FROM bot_history_message_assets ma +JOIN media_assets a ON a.id = ma.asset_id +WHERE ma.message_id = $1 +ORDER BY ma.ordinal ASC +` + +type ListMessageAssetsRow struct { + RelID pgtype.UUID `json:"rel_id"` + MessageID pgtype.UUID `json:"message_id"` + AssetID pgtype.UUID `json:"asset_id"` + Role string `json:"role"` + Ordinal int32 `json:"ordinal"` + MediaType string `json:"media_type"` + Mime string `json:"mime"` + SizeBytes int64 `json:"size_bytes"` + StorageKey string `json:"storage_key"` + OriginalName pgtype.Text `json:"original_name"` + Width pgtype.Int4 `json:"width"` + Height pgtype.Int4 `json:"height"` + DurationMs pgtype.Int8 `json:"duration_ms"` + AssetMetadata []byte `json:"asset_metadata"` +} + +func (q *Queries) ListMessageAssets(ctx context.Context, messageID pgtype.UUID) ([]ListMessageAssetsRow, error) { + rows, err := q.db.Query(ctx, listMessageAssets, messageID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []ListMessageAssetsRow + for rows.Next() { + var i ListMessageAssetsRow + if err := rows.Scan( + &i.RelID, + &i.MessageID, + &i.AssetID, + &i.Role, + &i.Ordinal, + &i.MediaType, + &i.Mime, + &i.SizeBytes, + &i.StorageKey, + &i.OriginalName, + &i.Width, + &i.Height, + &i.DurationMs, + &i.AssetMetadata, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listMessageAssetsBatch = `-- name: ListMessageAssetsBatch :many +SELECT + ma.id AS rel_id, + ma.message_id, + ma.asset_id, + ma.role, + ma.ordinal, + a.media_type, + a.mime, + a.size_bytes, + a.storage_key, + a.original_name, + a.width, + a.height, + a.duration_ms, + a.metadata AS asset_metadata +FROM bot_history_message_assets ma +JOIN media_assets a ON a.id = ma.asset_id +WHERE ma.message_id = ANY($1::uuid[]) +ORDER BY ma.message_id, ma.ordinal ASC +` + +type ListMessageAssetsBatchRow struct { + RelID pgtype.UUID `json:"rel_id"` + MessageID pgtype.UUID `json:"message_id"` + AssetID pgtype.UUID `json:"asset_id"` + Role string `json:"role"` + Ordinal int32 `json:"ordinal"` + MediaType string `json:"media_type"` + Mime string `json:"mime"` + SizeBytes int64 `json:"size_bytes"` + StorageKey string `json:"storage_key"` + OriginalName pgtype.Text `json:"original_name"` + Width pgtype.Int4 `json:"width"` + Height pgtype.Int4 `json:"height"` + DurationMs pgtype.Int8 `json:"duration_ms"` + AssetMetadata []byte `json:"asset_metadata"` +} + +func (q *Queries) ListMessageAssetsBatch(ctx context.Context, messageIds []pgtype.UUID) ([]ListMessageAssetsBatchRow, error) { + rows, err := q.db.Query(ctx, listMessageAssetsBatch, messageIds) + if err != nil { + return nil, err + } + defer rows.Close() + var items []ListMessageAssetsBatchRow + for rows.Next() { + var i ListMessageAssetsBatchRow + if err := rows.Scan( + &i.RelID, + &i.MessageID, + &i.AssetID, + &i.Role, + &i.Ordinal, + &i.MediaType, + &i.Mime, + &i.SizeBytes, + &i.StorageKey, + &i.OriginalName, + &i.Width, + &i.Height, + &i.DurationMs, + &i.AssetMetadata, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const listStorageProviders = `-- name: ListStorageProviders :many +SELECT id, name, provider, config, created_at, updated_at FROM storage_providers ORDER BY created_at DESC +` + +func (q *Queries) ListStorageProviders(ctx context.Context) ([]StorageProvider, error) { + rows, err := q.db.Query(ctx, listStorageProviders) + if err != nil { + return nil, err + } + defer rows.Close() + var items []StorageProvider + for rows.Next() { + var i StorageProvider + if err := rows.Scan( + &i.ID, + &i.Name, + &i.Provider, + &i.Config, + &i.CreatedAt, + &i.UpdatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const upsertBotStorageBinding = `-- name: UpsertBotStorageBinding :one +INSERT INTO bot_storage_bindings (bot_id, storage_provider_id, base_path) +VALUES ($1, $2, $3) +ON CONFLICT (bot_id) DO UPDATE SET + storage_provider_id = EXCLUDED.storage_provider_id, + base_path = EXCLUDED.base_path, + updated_at = now() +RETURNING id, bot_id, storage_provider_id, base_path, created_at, updated_at +` + +type UpsertBotStorageBindingParams struct { + BotID pgtype.UUID `json:"bot_id"` + StorageProviderID pgtype.UUID `json:"storage_provider_id"` + BasePath string `json:"base_path"` +} + +func (q *Queries) UpsertBotStorageBinding(ctx context.Context, arg UpsertBotStorageBindingParams) (BotStorageBinding, error) { + row := q.db.QueryRow(ctx, upsertBotStorageBinding, arg.BotID, arg.StorageProviderID, arg.BasePath) + var i BotStorageBinding + err := row.Scan( + &i.ID, + &i.BotID, + &i.StorageProviderID, + &i.BasePath, + &i.CreatedAt, + &i.UpdatedAt, + ) + return i, err +} diff --git a/internal/db/sqlc/models.go b/internal/db/sqlc/models.go index caf27f9a..141cc9b9 100644 --- a/internal/db/sqlc/models.go +++ b/internal/db/sqlc/models.go @@ -37,7 +37,7 @@ type BotChannelConfig struct { SelfIdentity []byte `json:"self_identity"` Routing []byte `json:"routing"` Capabilities []byte `json:"capabilities"` - Status string `json:"status"` + Disabled bool `json:"disabled"` VerifiedAt pgtype.Timestamptz `json:"verified_at"` CreatedAt pgtype.Timestamptz `json:"created_at"` UpdatedAt pgtype.Timestamptz `json:"updated_at"` @@ -72,6 +72,15 @@ type BotHistoryMessage struct { CreatedAt pgtype.Timestamptz `json:"created_at"` } +type BotHistoryMessageAsset struct { + ID pgtype.UUID `json:"id"` + MessageID pgtype.UUID `json:"message_id"` + AssetID pgtype.UUID `json:"asset_id"` + Role string `json:"role"` + Ordinal int32 `json:"ordinal"` + CreatedAt pgtype.Timestamptz `json:"created_at"` +} + type BotMember struct { BotID pgtype.UUID `json:"bot_id"` UserID pgtype.UUID `json:"user_id"` @@ -89,6 +98,15 @@ type BotPreauthKey struct { CreatedAt pgtype.Timestamptz `json:"created_at"` } +type BotStorageBinding struct { + ID pgtype.UUID `json:"id"` + BotID pgtype.UUID `json:"bot_id"` + StorageProviderID pgtype.UUID `json:"storage_provider_id"` + BasePath string `json:"base_path"` + CreatedAt pgtype.Timestamptz `json:"created_at"` + UpdatedAt pgtype.Timestamptz `json:"updated_at"` +} + type ChannelIdentity struct { ID pgtype.UUID `json:"id"` UserID pgtype.UUID `json:"user_id"` @@ -167,16 +185,33 @@ type McpConnection struct { UpdatedAt pgtype.Timestamptz `json:"updated_at"` } +type MediaAsset struct { + ID pgtype.UUID `json:"id"` + BotID pgtype.UUID `json:"bot_id"` + StorageProviderID pgtype.UUID `json:"storage_provider_id"` + ContentHash string `json:"content_hash"` + MediaType string `json:"media_type"` + Mime string `json:"mime"` + SizeBytes int64 `json:"size_bytes"` + StorageKey string `json:"storage_key"` + OriginalName pgtype.Text `json:"original_name"` + Width pgtype.Int4 `json:"width"` + Height pgtype.Int4 `json:"height"` + DurationMs pgtype.Int8 `json:"duration_ms"` + Metadata []byte `json:"metadata"` + CreatedAt pgtype.Timestamptz `json:"created_at"` +} + type Model struct { - ID pgtype.UUID `json:"id"` - ModelID string `json:"model_id"` - Name pgtype.Text `json:"name"` - LlmProviderID pgtype.UUID `json:"llm_provider_id"` - Dimensions pgtype.Int4 `json:"dimensions"` - IsMultimodal bool `json:"is_multimodal"` - Type string `json:"type"` - CreatedAt pgtype.Timestamptz `json:"created_at"` - UpdatedAt pgtype.Timestamptz `json:"updated_at"` + ID pgtype.UUID `json:"id"` + ModelID string `json:"model_id"` + Name pgtype.Text `json:"name"` + LlmProviderID pgtype.UUID `json:"llm_provider_id"` + Dimensions pgtype.Int4 `json:"dimensions"` + InputModalities []string `json:"input_modalities"` + Type string `json:"type"` + CreatedAt pgtype.Timestamptz `json:"created_at"` + UpdatedAt pgtype.Timestamptz `json:"updated_at"` } type ModelVariant struct { @@ -221,6 +256,15 @@ type Snapshot struct { CreatedAt pgtype.Timestamptz `json:"created_at"` } +type StorageProvider struct { + ID pgtype.UUID `json:"id"` + Name string `json:"name"` + Provider string `json:"provider"` + Config []byte `json:"config"` + CreatedAt pgtype.Timestamptz `json:"created_at"` + UpdatedAt pgtype.Timestamptz `json:"updated_at"` +} + type Subagent struct { ID pgtype.UUID `json:"id"` Name string `json:"name"` diff --git a/internal/db/sqlc/models.sql.go b/internal/db/sqlc/models.sql.go index fd4fd53e..e6dca657 100644 --- a/internal/db/sqlc/models.sql.go +++ b/internal/db/sqlc/models.sql.go @@ -98,7 +98,7 @@ func (q *Queries) CreateLlmProvider(ctx context.Context, arg CreateLlmProviderPa } const createModel = `-- name: CreateModel :one -INSERT INTO models (model_id, name, llm_provider_id, dimensions, is_multimodal, type) +INSERT INTO models (model_id, name, llm_provider_id, dimensions, input_modalities, type) VALUES ( $1, $2, @@ -107,16 +107,16 @@ VALUES ( $5, $6 ) -RETURNING id, model_id, name, llm_provider_id, dimensions, is_multimodal, type, created_at, updated_at +RETURNING id, model_id, name, llm_provider_id, dimensions, input_modalities, type, created_at, updated_at ` type CreateModelParams struct { - ModelID string `json:"model_id"` - Name pgtype.Text `json:"name"` - LlmProviderID pgtype.UUID `json:"llm_provider_id"` - Dimensions pgtype.Int4 `json:"dimensions"` - IsMultimodal bool `json:"is_multimodal"` - Type string `json:"type"` + ModelID string `json:"model_id"` + Name pgtype.Text `json:"name"` + LlmProviderID pgtype.UUID `json:"llm_provider_id"` + Dimensions pgtype.Int4 `json:"dimensions"` + InputModalities []string `json:"input_modalities"` + Type string `json:"type"` } func (q *Queries) CreateModel(ctx context.Context, arg CreateModelParams) (Model, error) { @@ -125,7 +125,7 @@ func (q *Queries) CreateModel(ctx context.Context, arg CreateModelParams) (Model arg.Name, arg.LlmProviderID, arg.Dimensions, - arg.IsMultimodal, + arg.InputModalities, arg.Type, ) var i Model @@ -135,7 +135,7 @@ func (q *Queries) CreateModel(ctx context.Context, arg CreateModelParams) (Model &i.Name, &i.LlmProviderID, &i.Dimensions, - &i.IsMultimodal, + &i.InputModalities, &i.Type, &i.CreatedAt, &i.UpdatedAt, @@ -249,7 +249,7 @@ func (q *Queries) GetLlmProviderByName(ctx context.Context, name string) (LlmPro } const getModelByID = `-- name: GetModelByID :one -SELECT id, model_id, name, llm_provider_id, dimensions, is_multimodal, type, created_at, updated_at FROM models WHERE id = $1 +SELECT id, model_id, name, llm_provider_id, dimensions, input_modalities, type, created_at, updated_at FROM models WHERE id = $1 ` func (q *Queries) GetModelByID(ctx context.Context, id pgtype.UUID) (Model, error) { @@ -261,7 +261,7 @@ func (q *Queries) GetModelByID(ctx context.Context, id pgtype.UUID) (Model, erro &i.Name, &i.LlmProviderID, &i.Dimensions, - &i.IsMultimodal, + &i.InputModalities, &i.Type, &i.CreatedAt, &i.UpdatedAt, @@ -270,7 +270,7 @@ func (q *Queries) GetModelByID(ctx context.Context, id pgtype.UUID) (Model, erro } const getModelByModelID = `-- name: GetModelByModelID :one -SELECT id, model_id, name, llm_provider_id, dimensions, is_multimodal, type, created_at, updated_at FROM models WHERE model_id = $1 +SELECT id, model_id, name, llm_provider_id, dimensions, input_modalities, type, created_at, updated_at FROM models WHERE model_id = $1 ` func (q *Queries) GetModelByModelID(ctx context.Context, modelID string) (Model, error) { @@ -282,7 +282,7 @@ func (q *Queries) GetModelByModelID(ctx context.Context, modelID string) (Model, &i.Name, &i.LlmProviderID, &i.Dimensions, - &i.IsMultimodal, + &i.InputModalities, &i.Type, &i.CreatedAt, &i.UpdatedAt, @@ -394,7 +394,7 @@ func (q *Queries) ListModelVariantsByModelUUID(ctx context.Context, modelUuid pg } const listModels = `-- name: ListModels :many -SELECT id, model_id, name, llm_provider_id, dimensions, is_multimodal, type, created_at, updated_at FROM models +SELECT id, model_id, name, llm_provider_id, dimensions, input_modalities, type, created_at, updated_at FROM models ORDER BY created_at DESC ` @@ -413,7 +413,7 @@ func (q *Queries) ListModels(ctx context.Context) ([]Model, error) { &i.Name, &i.LlmProviderID, &i.Dimensions, - &i.IsMultimodal, + &i.InputModalities, &i.Type, &i.CreatedAt, &i.UpdatedAt, @@ -429,7 +429,7 @@ func (q *Queries) ListModels(ctx context.Context) ([]Model, error) { } const listModelsByClientType = `-- name: ListModelsByClientType :many -SELECT m.id, m.model_id, m.name, m.llm_provider_id, m.dimensions, m.is_multimodal, m.type, m.created_at, m.updated_at FROM models AS m +SELECT m.id, m.model_id, m.name, m.llm_provider_id, m.dimensions, m.input_modalities, m.type, m.created_at, m.updated_at FROM models AS m JOIN llm_providers AS p ON p.id = m.llm_provider_id WHERE p.client_type = $1 ORDER BY m.created_at DESC @@ -450,7 +450,7 @@ func (q *Queries) ListModelsByClientType(ctx context.Context, clientType string) &i.Name, &i.LlmProviderID, &i.Dimensions, - &i.IsMultimodal, + &i.InputModalities, &i.Type, &i.CreatedAt, &i.UpdatedAt, @@ -466,7 +466,7 @@ func (q *Queries) ListModelsByClientType(ctx context.Context, clientType string) } const listModelsByProviderID = `-- name: ListModelsByProviderID :many -SELECT id, model_id, name, llm_provider_id, dimensions, is_multimodal, type, created_at, updated_at FROM models +SELECT id, model_id, name, llm_provider_id, dimensions, input_modalities, type, created_at, updated_at FROM models WHERE llm_provider_id = $1 ORDER BY created_at DESC ` @@ -486,7 +486,7 @@ func (q *Queries) ListModelsByProviderID(ctx context.Context, llmProviderID pgty &i.Name, &i.LlmProviderID, &i.Dimensions, - &i.IsMultimodal, + &i.InputModalities, &i.Type, &i.CreatedAt, &i.UpdatedAt, @@ -502,7 +502,7 @@ func (q *Queries) ListModelsByProviderID(ctx context.Context, llmProviderID pgty } const listModelsByProviderIDAndType = `-- name: ListModelsByProviderIDAndType :many -SELECT id, model_id, name, llm_provider_id, dimensions, is_multimodal, type, created_at, updated_at FROM models +SELECT id, model_id, name, llm_provider_id, dimensions, input_modalities, type, created_at, updated_at FROM models WHERE llm_provider_id = $1 AND type = $2 ORDER BY created_at DESC @@ -528,7 +528,7 @@ func (q *Queries) ListModelsByProviderIDAndType(ctx context.Context, arg ListMod &i.Name, &i.LlmProviderID, &i.Dimensions, - &i.IsMultimodal, + &i.InputModalities, &i.Type, &i.CreatedAt, &i.UpdatedAt, @@ -544,7 +544,7 @@ func (q *Queries) ListModelsByProviderIDAndType(ctx context.Context, arg ListMod } const listModelsByType = `-- name: ListModelsByType :many -SELECT id, model_id, name, llm_provider_id, dimensions, is_multimodal, type, created_at, updated_at FROM models +SELECT id, model_id, name, llm_provider_id, dimensions, input_modalities, type, created_at, updated_at FROM models WHERE type = $1 ORDER BY created_at DESC ` @@ -564,7 +564,7 @@ func (q *Queries) ListModelsByType(ctx context.Context, type_ string) ([]Model, &i.Name, &i.LlmProviderID, &i.Dimensions, - &i.IsMultimodal, + &i.InputModalities, &i.Type, &i.CreatedAt, &i.UpdatedAt, @@ -630,20 +630,20 @@ SET name = $1, llm_provider_id = $2, dimensions = $3, - is_multimodal = $4, + input_modalities = $4, type = $5, updated_at = now() WHERE id = $6 -RETURNING id, model_id, name, llm_provider_id, dimensions, is_multimodal, type, created_at, updated_at +RETURNING id, model_id, name, llm_provider_id, dimensions, input_modalities, type, created_at, updated_at ` type UpdateModelParams struct { - Name pgtype.Text `json:"name"` - LlmProviderID pgtype.UUID `json:"llm_provider_id"` - Dimensions pgtype.Int4 `json:"dimensions"` - IsMultimodal bool `json:"is_multimodal"` - Type string `json:"type"` - ID pgtype.UUID `json:"id"` + Name pgtype.Text `json:"name"` + LlmProviderID pgtype.UUID `json:"llm_provider_id"` + Dimensions pgtype.Int4 `json:"dimensions"` + InputModalities []string `json:"input_modalities"` + Type string `json:"type"` + ID pgtype.UUID `json:"id"` } func (q *Queries) UpdateModel(ctx context.Context, arg UpdateModelParams) (Model, error) { @@ -651,7 +651,7 @@ func (q *Queries) UpdateModel(ctx context.Context, arg UpdateModelParams) (Model arg.Name, arg.LlmProviderID, arg.Dimensions, - arg.IsMultimodal, + arg.InputModalities, arg.Type, arg.ID, ) @@ -662,7 +662,7 @@ func (q *Queries) UpdateModel(ctx context.Context, arg UpdateModelParams) (Model &i.Name, &i.LlmProviderID, &i.Dimensions, - &i.IsMultimodal, + &i.InputModalities, &i.Type, &i.CreatedAt, &i.UpdatedAt, @@ -677,21 +677,21 @@ SET name = $2, llm_provider_id = $3, dimensions = $4, - is_multimodal = $5, + input_modalities = $5, type = $6, updated_at = now() WHERE model_id = $7 -RETURNING id, model_id, name, llm_provider_id, dimensions, is_multimodal, type, created_at, updated_at +RETURNING id, model_id, name, llm_provider_id, dimensions, input_modalities, type, created_at, updated_at ` type UpdateModelByModelIDParams struct { - NewModelID string `json:"new_model_id"` - Name pgtype.Text `json:"name"` - LlmProviderID pgtype.UUID `json:"llm_provider_id"` - Dimensions pgtype.Int4 `json:"dimensions"` - IsMultimodal bool `json:"is_multimodal"` - Type string `json:"type"` - ModelID string `json:"model_id"` + NewModelID string `json:"new_model_id"` + Name pgtype.Text `json:"name"` + LlmProviderID pgtype.UUID `json:"llm_provider_id"` + Dimensions pgtype.Int4 `json:"dimensions"` + InputModalities []string `json:"input_modalities"` + Type string `json:"type"` + ModelID string `json:"model_id"` } func (q *Queries) UpdateModelByModelID(ctx context.Context, arg UpdateModelByModelIDParams) (Model, error) { @@ -700,7 +700,7 @@ func (q *Queries) UpdateModelByModelID(ctx context.Context, arg UpdateModelByMod arg.Name, arg.LlmProviderID, arg.Dimensions, - arg.IsMultimodal, + arg.InputModalities, arg.Type, arg.ModelID, ) @@ -711,7 +711,7 @@ func (q *Queries) UpdateModelByModelID(ctx context.Context, arg UpdateModelByMod &i.Name, &i.LlmProviderID, &i.Dimensions, - &i.IsMultimodal, + &i.InputModalities, &i.Type, &i.CreatedAt, &i.UpdatedAt, diff --git a/internal/embeddings/bootstrap.go b/internal/embeddings/bootstrap.go index a3e3b281..95ff5ede 100644 --- a/internal/embeddings/bootstrap.go +++ b/internal/embeddings/bootstrap.go @@ -42,7 +42,7 @@ func CollectEmbeddingVectors(ctx context.Context, service *models.Service) (map[ if model.Dimensions > 0 && model.ModelID != "" { vectors[model.ModelID] = model.Dimensions } - if model.IsMultimodal { + if model.IsMultimodal() { if multimodalModel.ModelID == "" { multimodalModel = model } diff --git a/internal/embeddings/resolver.go b/internal/embeddings/resolver.go index f7ebf0ca..d9393638 100644 --- a/internal/embeddings/resolver.go +++ b/internal/embeddings/resolver.go @@ -192,10 +192,10 @@ func (r *Resolver) selectEmbeddingModel(ctx context.Context, req Request) (model if model.Type != models.ModelTypeEmbedding { continue } - if req.Type == TypeMultimodal && !model.IsMultimodal { + if req.Type == TypeMultimodal && !model.IsMultimodal() { continue } - if req.Type == TypeText && model.IsMultimodal { + if req.Type == TypeText && model.IsMultimodal() { continue } filtered = append(filtered, model) diff --git a/internal/handlers/channel.go b/internal/handlers/channel.go index 26476b24..3ff34e80 100644 --- a/internal/handlers/channel.go +++ b/internal/handlers/channel.go @@ -11,12 +11,12 @@ import ( ) type ChannelHandler struct { - service *channel.Service + store *channel.Store registry *channel.Registry } -func NewChannelHandler(service *channel.Service, registry *channel.Registry) *ChannelHandler { - return &ChannelHandler{service: service, registry: registry} +func NewChannelHandler(store *channel.Store, registry *channel.Registry) *ChannelHandler { + return &ChannelHandler{store: store, registry: registry} } func (h *ChannelHandler) Register(e *echo.Echo) { @@ -48,7 +48,7 @@ func (h *ChannelHandler) GetChannelIdentityConfig(c echo.Context) error { if err != nil { return echo.NewHTTPError(http.StatusBadRequest, err.Error()) } - resp, err := h.service.GetChannelIdentityConfig(c.Request().Context(), channelIdentityID, channelType) + resp, err := h.store.GetChannelIdentityConfig(c.Request().Context(), channelIdentityID, channelType) if err != nil { if strings.Contains(err.Error(), "not found") { return echo.NewHTTPError(http.StatusNotFound, err.Error()) @@ -84,7 +84,7 @@ func (h *ChannelHandler) UpsertChannelIdentityConfig(c echo.Context) error { if req.Config == nil { req.Config = map[string]any{} } - resp, err := h.service.UpsertChannelIdentityConfig(c.Request().Context(), channelIdentityID, channelType, req) + resp, err := h.store.UpsertChannelIdentityConfig(c.Request().Context(), channelIdentityID, channelType, req) if err != nil { return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) } diff --git a/internal/handlers/local_channel.go b/internal/handlers/local_channel.go index 6eb524ec..1b87ea11 100644 --- a/internal/handlers/local_channel.go +++ b/internal/handlers/local_channel.go @@ -22,7 +22,7 @@ import ( type LocalChannelHandler struct { channelType channel.ChannelType channelManager *channel.Manager - channelService *channel.Service + channelStore *channel.Store chatService *conversation.Service routeHub *local.RouteHub botService *bots.Service @@ -30,11 +30,11 @@ type LocalChannelHandler struct { } // NewLocalChannelHandler creates a local channel handler. -func NewLocalChannelHandler(channelType channel.ChannelType, channelManager *channel.Manager, channelService *channel.Service, chatService *conversation.Service, routeHub *local.RouteHub, botService *bots.Service, accountService *accounts.Service) *LocalChannelHandler { +func NewLocalChannelHandler(channelType channel.ChannelType, channelManager *channel.Manager, channelStore *channel.Store, chatService *conversation.Service, routeHub *local.RouteHub, botService *bots.Service, accountService *accounts.Service) *LocalChannelHandler { return &LocalChannelHandler{ channelType: channelType, channelManager: channelManager, - channelService: channelService, + channelStore: channelStore, chatService: chatService, routeHub: routeHub, botService: botService, @@ -129,7 +129,7 @@ func (h *LocalChannelHandler) PostMessage(c echo.Context) error { if err := h.ensureBotParticipant(c.Request().Context(), botID, channelIdentityID); err != nil { return err } - if h.channelManager == nil || h.channelService == nil { + if h.channelManager == nil || h.channelStore == nil { return echo.NewHTTPError(http.StatusInternalServerError, "channel manager not configured") } var req localMessageRequest @@ -139,7 +139,7 @@ func (h *LocalChannelHandler) PostMessage(c echo.Context) error { if req.Message.IsEmpty() { return echo.NewHTTPError(http.StatusBadRequest, "message is required") } - cfg, err := h.channelService.ResolveEffectiveConfig(c.Request().Context(), botID, h.channelType) + cfg, err := h.channelStore.ResolveEffectiveConfig(c.Request().Context(), botID, h.channelType) if err != nil { return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) } diff --git a/internal/handlers/message.go b/internal/handlers/message.go index 7ac36048..175c960a 100644 --- a/internal/handlers/message.go +++ b/internal/handlers/message.go @@ -3,9 +3,11 @@ package handlers import ( "bufio" "context" + "encoding/base64" "encoding/json" "errors" "fmt" + "io" "log/slog" "net/http" "strconv" @@ -19,6 +21,7 @@ import ( "github.com/memohai/memoh/internal/channel/identities" "github.com/memohai/memoh/internal/conversation" "github.com/memohai/memoh/internal/conversation/flow" + "github.com/memohai/memoh/internal/media" messagepkg "github.com/memohai/memoh/internal/message" messageevent "github.com/memohai/memoh/internal/message/event" ) @@ -29,6 +32,7 @@ type MessageHandler struct { conversationService conversation.Accessor messageService messagepkg.Service messageEvents messageevent.Subscriber + mediaService *media.Service botService *bots.Service accountService *accounts.Service channelIdentitySvc *identities.Service @@ -53,6 +57,11 @@ func NewMessageHandler(log *slog.Logger, runner flow.Runner, conversationService } } +// SetMediaService sets the optional media service for asset serving. +func (h *MessageHandler) SetMediaService(svc *media.Service) { + h.mediaService = svc +} + // Register registers all conversation routes. func (h *MessageHandler) Register(e *echo.Echo) { // Bot-scoped message container (single shared history per bot). @@ -62,6 +71,7 @@ func (h *MessageHandler) Register(e *echo.Echo) { botGroup.GET("/messages", h.ListMessages) botGroup.GET("/messages/events", h.StreamMessageEvents) botGroup.DELETE("/messages", h.DeleteMessages) + botGroup.GET("/media/:asset_id", h.ServeMedia) } // --- Messages --- @@ -87,8 +97,8 @@ func (h *MessageHandler) SendMessage(c echo.Context) error { if err := c.Bind(&req); err != nil { return echo.NewHTTPError(http.StatusBadRequest, err.Error()) } - if req.Query == "" { - return echo.NewHTTPError(http.StatusBadRequest, "query is required") + if strings.TrimSpace(req.Query) == "" && len(req.Attachments) == 0 { + return echo.NewHTTPError(http.StatusBadRequest, "query or attachments is required") } req.BotID = botID req.ChatID = botID @@ -105,6 +115,9 @@ func (h *MessageHandler) SendMessage(c echo.Context) error { req.Channels = []string{req.CurrentChannel} } channelIdentityID = h.resolveWebChannelIdentity(c.Request().Context(), channelIdentityID, &req) + if req.Attachments, err = h.ingestInlineAttachments(c.Request().Context(), botID, req.Attachments); err != nil { + return echo.NewHTTPError(http.StatusBadRequest, err.Error()) + } if h.runner == nil { return echo.NewHTTPError(http.StatusInternalServerError, "conversation runner not configured") @@ -137,8 +150,8 @@ func (h *MessageHandler) StreamMessage(c echo.Context) error { if err := c.Bind(&req); err != nil { return echo.NewHTTPError(http.StatusBadRequest, err.Error()) } - if req.Query == "" { - return echo.NewHTTPError(http.StatusBadRequest, "query is required") + if strings.TrimSpace(req.Query) == "" && len(req.Attachments) == 0 { + return echo.NewHTTPError(http.StatusBadRequest, "query or attachments is required") } req.BotID = botID req.ChatID = botID @@ -155,6 +168,9 @@ func (h *MessageHandler) StreamMessage(c echo.Context) error { req.Channels = []string{req.CurrentChannel} } channelIdentityID = h.resolveWebChannelIdentity(c.Request().Context(), channelIdentityID, &req) + if req.Attachments, err = h.ingestInlineAttachments(c.Request().Context(), botID, req.Attachments); err != nil { + return echo.NewHTTPError(http.StatusBadRequest, err.Error()) + } if h.runner == nil { return echo.NewHTTPError(http.StatusInternalServerError, "conversation runner not configured") @@ -244,6 +260,92 @@ func writeSSEJSON(writer *bufio.Writer, flusher http.Flusher, payload any) error return writeSSEData(writer, flusher, string(data)) } +func (h *MessageHandler) ingestInlineAttachments(ctx context.Context, botID string, attachments []conversation.ChatAttachment) ([]conversation.ChatAttachment, error) { + if len(attachments) == 0 || h.mediaService == nil { + return attachments, nil + } + result := make([]conversation.ChatAttachment, 0, len(attachments)) + for _, att := range attachments { + item := att + if strings.TrimSpace(item.AssetID) != "" || strings.TrimSpace(item.Base64) == "" { + result = append(result, item) + continue + } + mediaType := mapAttachmentMediaType(item.Type) + maxBytes := media.MaxAssetBytes + raw, err := decodeAttachmentBase64(item.Base64, maxBytes) + if err != nil { + return nil, fmt.Errorf("invalid attachment base64: %w", err) + } + asset, err := h.mediaService.Ingest(ctx, media.IngestInput{ + BotID: botID, + MediaType: mediaType, + Mime: strings.TrimSpace(item.Mime), + OriginalName: strings.TrimSpace(item.Name), + Metadata: item.Metadata, + Reader: raw, + MaxBytes: maxBytes, + }) + if err != nil { + return nil, fmt.Errorf("ingest attachment failed: %w", err) + } + item.AssetID = asset.ID + item.Path = h.mediaService.AccessPath(asset) + mime := strings.TrimSpace(item.Mime) + if mime == "" { + mime = strings.TrimSpace(asset.Mime) + } + item.Base64 = normalizeBase64DataURL(item.Base64, mime) + if strings.TrimSpace(item.Mime) == "" { + item.Mime = asset.Mime + } + result = append(result, item) + } + return result, nil +} + +func decodeAttachmentBase64(input string, maxBytes int64) (io.Reader, error) { + value := strings.TrimSpace(input) + if value == "" { + return nil, fmt.Errorf("base64 payload is empty") + } + if strings.HasPrefix(strings.ToLower(value), "data:") { + if idx := strings.Index(value, ","); idx >= 0 { + value = value[idx+1:] + } + } + decoder := base64.NewDecoder(base64.StdEncoding, strings.NewReader(value)) + return io.LimitReader(decoder, maxBytes+1), nil +} + +func normalizeBase64DataURL(input, mime string) string { + value := strings.TrimSpace(input) + if value == "" { + return "" + } + if strings.HasPrefix(strings.ToLower(value), "data:") { + return value + } + mime = strings.TrimSpace(mime) + if mime == "" { + mime = "application/octet-stream" + } + return "data:" + mime + ";base64," + value +} + +func mapAttachmentMediaType(t string) media.MediaType { + switch strings.ToLower(strings.TrimSpace(t)) { + case "image", "gif": + return media.MediaTypeImage + case "audio", "voice": + return media.MediaTypeAudio + case "video": + return media.MediaTypeVideo + default: + return media.MediaTypeFile + } +} + func parseSinceParam(raw string) (time.Time, bool, error) { trimmed := strings.TrimSpace(raw) if trimmed == "" { @@ -558,3 +660,54 @@ func (h *MessageHandler) requireReadable(ctx context.Context, conversationID, ch } return nil } + +// ServeMedia streams a media asset by bot_id + asset_id with read-access authorization. +func (h *MessageHandler) ServeMedia(c echo.Context) error { + channelIdentityID, err := h.requireChannelIdentityID(c) + if err != nil { + return err + } + botID := strings.TrimSpace(c.Param("bot_id")) + if botID == "" { + return echo.NewHTTPError(http.StatusBadRequest, "bot id is required") + } + assetID := strings.TrimSpace(c.Param("asset_id")) + if assetID == "" { + return echo.NewHTTPError(http.StatusBadRequest, "asset id is required") + } + if _, err := h.authorizeBotAccess(c.Request().Context(), channelIdentityID, botID); err != nil { + return err + } + if err := h.requireReadable(c.Request().Context(), botID, channelIdentityID); err != nil { + return err + } + if h.mediaService == nil { + return echo.NewHTTPError(http.StatusInternalServerError, "media service not configured") + } + reader, asset, err := h.mediaService.Open(c.Request().Context(), assetID) + if err != nil { + if errors.Is(err, media.ErrAssetNotFound) { + return echo.NewHTTPError(http.StatusNotFound, "asset not found") + } + return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) + } + defer reader.Close() + // Verify asset belongs to the authorized bot. + if strings.TrimSpace(asset.BotID) != botID { + return echo.NewHTTPError(http.StatusForbidden, "asset does not belong to bot") + } + contentType := asset.Mime + if contentType == "" { + contentType = "application/octet-stream" + } + c.Response().Header().Set("Content-Type", contentType) + c.Response().Header().Set("Cache-Control", "private, max-age=86400") + if asset.OriginalName != "" { + c.Response().Header().Set("Content-Disposition", fmt.Sprintf("inline; filename=%q", asset.OriginalName)) + } + c.Response().WriteHeader(http.StatusOK) + if _, err := io.Copy(c.Response().Writer, reader); err != nil { + h.logger.Warn("serve media stream failed", slog.Any("error", err)) + } + return nil +} diff --git a/internal/handlers/message_test.go b/internal/handlers/message_test.go new file mode 100644 index 00000000..da40d056 --- /dev/null +++ b/internal/handlers/message_test.go @@ -0,0 +1,57 @@ +package handlers + +import ( + "encoding/base64" + "io" + "strings" + "testing" +) + +func TestDecodeAttachmentBase64(t *testing.T) { + t.Parallel() + + data := []byte("hello") + encoded := base64.StdEncoding.EncodeToString(data) + decoded, err := decodeAttachmentBase64(encoded, 16) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + got, err := io.ReadAll(decoded) + if err != nil { + t.Fatalf("read decoded failed: %v", err) + } + if string(got) != "hello" { + t.Fatalf("unexpected decoded value: %q", string(got)) + } +} + +func TestDecodeAttachmentBase64DataURL(t *testing.T) { + t.Parallel() + + encoded := "data:text/plain;base64," + base64.StdEncoding.EncodeToString([]byte("payload")) + decoded, err := decodeAttachmentBase64(encoded, 32) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + got, err := io.ReadAll(decoded) + if err != nil { + t.Fatalf("read decoded failed: %v", err) + } + if string(got) != "payload" { + t.Fatalf("unexpected decoded value: %q", string(got)) + } +} + +func TestNormalizeBase64DataURL(t *testing.T) { + t.Parallel() + + raw := base64.StdEncoding.EncodeToString([]byte(strings.Repeat("a", 4))) + got := normalizeBase64DataURL(raw, "image/png") + if !strings.HasPrefix(got, "data:image/png;base64,") { + t.Fatalf("expected data url prefix, got %q", got) + } + existing := "data:text/plain;base64,AAA=" + if normalizeBase64DataURL(existing, "image/png") != existing { + t.Fatalf("expected existing data url unchanged") + } +} diff --git a/internal/handlers/users.go b/internal/handlers/users.go index c258e37f..637980c3 100644 --- a/internal/handlers/users.go +++ b/internal/handlers/users.go @@ -25,7 +25,8 @@ type UsersHandler struct { channelIdentityService *identities.Service botService *bots.Service routeService route.Service - channelService *channel.Service + channelStore *channel.Store + channelLifecycle *channel.Lifecycle channelManager *channel.Manager registry *channel.Registry logger *slog.Logger @@ -37,7 +38,7 @@ type listMyIdentitiesResponse struct { } // NewUsersHandler creates a UsersHandler with channel identity support. -func NewUsersHandler(log *slog.Logger, service *accounts.Service, channelIdentityService *identities.Service, botService *bots.Service, routeService route.Service, channelService *channel.Service, channelManager *channel.Manager, registry *channel.Registry) *UsersHandler { +func NewUsersHandler(log *slog.Logger, service *accounts.Service, channelIdentityService *identities.Service, botService *bots.Service, routeService route.Service, channelStore *channel.Store, channelLifecycle *channel.Lifecycle, channelManager *channel.Manager, registry *channel.Registry) *UsersHandler { if log == nil { log = slog.Default() } @@ -46,7 +47,8 @@ func NewUsersHandler(log *slog.Logger, service *accounts.Service, channelIdentit channelIdentityService: channelIdentityService, botService: botService, routeService: routeService, - channelService: channelService, + channelStore: channelStore, + channelLifecycle: channelLifecycle, channelManager: channelManager, registry: registry, logger: log.With(slog.String("handler", "users")), @@ -70,8 +72,6 @@ func (h *UsersHandler) Register(e *echo.Echo) { botGroup.GET("", h.ListBots) botGroup.GET("/:id", h.GetBot) botGroup.GET("/:id/checks", h.ListBotChecks) - botGroup.GET("/:id/checks/keys", h.ListBotCheckKeys) - botGroup.GET("/:id/checks/run/:key", h.RunBotCheck) botGroup.PUT("/:id", h.UpdateBot) botGroup.PUT("/:id/owner", h.TransferBotOwner) botGroup.DELETE("/:id", h.DeleteBot) @@ -80,6 +80,8 @@ func (h *UsersHandler) Register(e *echo.Echo) { botGroup.DELETE("/:id/members/:user_id", h.DeleteBotMember) botGroup.GET("/:id/channel/:platform", h.GetBotChannelConfig) botGroup.PUT("/:id/channel/:platform", h.UpsertBotChannelConfig) + botGroup.PATCH("/:id/channel/:platform/status", h.UpdateBotChannelStatus) + botGroup.DELETE("/:id/channel/:platform", h.DeleteBotChannelConfig) botGroup.POST("/:id/channel/:platform/send", h.SendBotMessage) botGroup.POST("/:id/channel/:platform/send_chat", h.SendBotMessageSession) } @@ -544,63 +546,6 @@ func (h *UsersHandler) ListBotChecks(c echo.Context) error { return c.JSON(http.StatusOK, bots.ListChecksResponse{Items: items}) } -// ListBotCheckKeys godoc -// @Summary List available check keys -// @Description Returns all check keys available for a bot (builtin + MCP connections) -// @Tags bots -// @Param id path string true "Bot ID" -// @Success 200 {object} bots.ListCheckKeysResponse -// @Router /bots/{id}/checks/keys [get] -func (h *UsersHandler) ListBotCheckKeys(c echo.Context) error { - channelIdentityID, err := h.requireChannelIdentityID(c) - if err != nil { - return err - } - botID := strings.TrimSpace(c.Param("id")) - if botID == "" { - return echo.NewHTTPError(http.StatusBadRequest, "bot id is required") - } - if _, err := h.authorizeBotAccess(c.Request().Context(), channelIdentityID, botID); err != nil { - return err - } - keys, err := h.botService.ListCheckKeys(c.Request().Context(), botID) - if err != nil { - return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) - } - return c.JSON(http.StatusOK, bots.ListCheckKeysResponse{Keys: keys}) -} - -// RunBotCheck godoc -// @Summary Run a single bot check -// @Description Evaluate one check key for a bot -// @Tags bots -// @Param id path string true "Bot ID" -// @Param key path string true "Check key" -// @Success 200 {object} bots.BotCheck -// @Router /bots/{id}/checks/run/{key} [get] -func (h *UsersHandler) RunBotCheck(c echo.Context) error { - channelIdentityID, err := h.requireChannelIdentityID(c) - if err != nil { - return err - } - botID := strings.TrimSpace(c.Param("id")) - if botID == "" { - return echo.NewHTTPError(http.StatusBadRequest, "bot id is required") - } - if _, err := h.authorizeBotAccess(c.Request().Context(), channelIdentityID, botID); err != nil { - return err - } - key := strings.TrimSpace(c.Param("key")) - if key == "" { - return echo.NewHTTPError(http.StatusBadRequest, "check key is required") - } - result, err := h.botService.RunCheck(c.Request().Context(), botID, key) - if err != nil { - return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) - } - return c.JSON(http.StatusOK, result) -} - // UpdateBot godoc // @Summary Update bot details // @Description Update bot profile (owner/admin only) @@ -847,7 +792,10 @@ func (h *UsersHandler) GetBotChannelConfig(c echo.Context) error { if err != nil { return echo.NewHTTPError(http.StatusBadRequest, err.Error()) } - resp, err := h.channelService.ResolveEffectiveConfig(c.Request().Context(), botID, channelType) + if h.channelStore == nil { + return echo.NewHTTPError(http.StatusInternalServerError, "channel store not configured") + } + resp, err := h.channelStore.ResolveEffectiveConfig(c.Request().Context(), botID, channelType) if err != nil { if strings.Contains(err.Error(), "not found") { return echo.NewHTTPError(http.StatusNotFound, err.Error()) @@ -893,13 +841,106 @@ func (h *UsersHandler) UpsertBotChannelConfig(c echo.Context) error { if req.Credentials == nil { req.Credentials = map[string]any{} } - resp, err := h.channelService.UpsertConfig(c.Request().Context(), botID, channelType, req) + if h.channelLifecycle == nil { + return echo.NewHTTPError(http.StatusInternalServerError, "channel lifecycle not configured") + } + resp, err := h.channelLifecycle.UpsertBotChannelConfig(c.Request().Context(), botID, channelType, req) if err != nil { - return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) + status := http.StatusInternalServerError + if errors.Is(err, channel.ErrEnableChannelFailed) { + status = http.StatusBadRequest + } + return echo.NewHTTPError(status, err.Error()) } return c.JSON(http.StatusOK, resp) } +// UpdateBotChannelStatus godoc +// @Summary Update bot channel status +// @Description Update bot channel enabled/disabled status +// @Tags bots +// @Param id path string true "Bot ID" +// @Param platform path string true "Channel platform" +// @Param payload body channel.UpdateChannelStatusRequest true "Channel status payload" +// @Success 200 {object} channel.ChannelConfig +// @Failure 400 {object} ErrorResponse +// @Failure 403 {object} ErrorResponse +// @Failure 404 {object} ErrorResponse +// @Failure 500 {object} ErrorResponse +// @Router /bots/{id}/channel/{platform}/status [patch] +func (h *UsersHandler) UpdateBotChannelStatus(c echo.Context) error { + channelIdentityID, err := h.requireChannelIdentityID(c) + if err != nil { + return err + } + botID := strings.TrimSpace(c.Param("id")) + if botID == "" { + return echo.NewHTTPError(http.StatusBadRequest, "bot id is required") + } + if _, err := h.authorizeBotAccess(c.Request().Context(), channelIdentityID, botID); err != nil { + return err + } + channelType, err := h.registry.ParseChannelType(c.Param("platform")) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, err.Error()) + } + var req channel.UpdateChannelStatusRequest + if err := c.Bind(&req); err != nil { + return echo.NewHTTPError(http.StatusBadRequest, err.Error()) + } + if h.channelLifecycle == nil { + return echo.NewHTTPError(http.StatusInternalServerError, "channel lifecycle not configured") + } + resp, err := h.channelLifecycle.SetBotChannelStatus(c.Request().Context(), botID, channelType, req.Disabled) + if err != nil { + if errors.Is(err, channel.ErrChannelConfigNotFound) { + return echo.NewHTTPError(http.StatusNotFound, err.Error()) + } + status := http.StatusInternalServerError + if errors.Is(err, channel.ErrEnableChannelFailed) { + status = http.StatusBadRequest + } + return echo.NewHTTPError(status, err.Error()) + } + return c.JSON(http.StatusOK, resp) +} + +// DeleteBotChannelConfig godoc +// @Summary Delete bot channel config +// @Description Remove bot channel configuration +// @Tags bots +// @Param id path string true "Bot ID" +// @Param platform path string true "Channel platform" +// @Success 204 "No Content" +// @Failure 400 {object} ErrorResponse +// @Failure 403 {object} ErrorResponse +// @Failure 500 {object} ErrorResponse +// @Router /bots/{id}/channel/{platform} [delete] +func (h *UsersHandler) DeleteBotChannelConfig(c echo.Context) error { + channelIdentityID, err := h.requireChannelIdentityID(c) + if err != nil { + return err + } + botID := strings.TrimSpace(c.Param("id")) + if botID == "" { + return echo.NewHTTPError(http.StatusBadRequest, "bot id is required") + } + if _, err := h.authorizeBotAccess(c.Request().Context(), channelIdentityID, botID); err != nil { + return err + } + channelType, err := h.registry.ParseChannelType(c.Param("platform")) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, err.Error()) + } + if h.channelLifecycle == nil { + return echo.NewHTTPError(http.StatusInternalServerError, "channel lifecycle not configured") + } + if err := h.channelLifecycle.DeleteBotChannelConfig(c.Request().Context(), botID, channelType); err != nil { + return echo.NewHTTPError(http.StatusInternalServerError, err.Error()) + } + return c.NoContent(http.StatusNoContent) +} + // SendBotMessage godoc // @Summary Send message via bot channel // @Description Send a message using bot channel configuration diff --git a/internal/healthcheck/adapter.go b/internal/healthcheck/adapter.go new file mode 100644 index 00000000..ed969989 --- /dev/null +++ b/internal/healthcheck/adapter.go @@ -0,0 +1,39 @@ +package healthcheck + +import ( + "context" + + "github.com/memohai/memoh/internal/bots" +) + +// RuntimeCheckerAdapter bridges Checker to bots.RuntimeChecker. +type RuntimeCheckerAdapter struct { + checker Checker +} + +// NewRuntimeCheckerAdapter creates a runtime checker bridge. +func NewRuntimeCheckerAdapter(checker Checker) *RuntimeCheckerAdapter { + return &RuntimeCheckerAdapter{checker: checker} +} + +// ListChecks evaluates checks and maps healthcheck results to bots check shape. +func (a *RuntimeCheckerAdapter) ListChecks(ctx context.Context, botID string) []bots.BotCheck { + if a == nil || a.checker == nil { + return []bots.BotCheck{} + } + items := a.checker.ListChecks(ctx, botID) + result := make([]bots.BotCheck, 0, len(items)) + for _, item := range items { + result = append(result, bots.BotCheck{ + ID: item.ID, + Type: item.Type, + TitleKey: item.TitleKey, + Subtitle: item.Subtitle, + Status: item.Status, + Summary: item.Summary, + Detail: item.Detail, + Metadata: item.Metadata, + }) + } + return result +} diff --git a/internal/healthcheck/adapter_test.go b/internal/healthcheck/adapter_test.go new file mode 100644 index 00000000..489dd66b --- /dev/null +++ b/internal/healthcheck/adapter_test.go @@ -0,0 +1,57 @@ +package healthcheck + +import ( + "context" + "testing" +) + +type testChecker struct { + items []CheckResult +} + +func (c *testChecker) ListChecks(ctx context.Context, botID string) []CheckResult { + return c.items +} + +func TestRuntimeCheckerAdapterListChecks(t *testing.T) { + t.Parallel() + + adapter := NewRuntimeCheckerAdapter(&testChecker{ + items: []CheckResult{ + { + ID: "mcp.connection.conn-1", + Type: "mcp.connection", + TitleKey: "bots.checks.titles.mcpConnection", + Subtitle: "demo", + Status: "ok", + Summary: "healthy", + Detail: "", + Metadata: map[string]any{"tool_count": 2}, + }, + }, + }) + + items := adapter.ListChecks(context.Background(), "bot-1") + if len(items) != 1 { + t.Fatalf("expected 1 item, got %d", len(items)) + } + if items[0].ID != "mcp.connection.conn-1" { + t.Fatalf("unexpected id: %s", items[0].ID) + } + if items[0].TitleKey != "bots.checks.titles.mcpConnection" { + t.Fatalf("unexpected title key: %s", items[0].TitleKey) + } + if items[0].Status != "ok" { + t.Fatalf("unexpected status: %s", items[0].Status) + } +} + +func TestRuntimeCheckerAdapterNilChecker(t *testing.T) { + t.Parallel() + + var adapter *RuntimeCheckerAdapter + items := adapter.ListChecks(context.Background(), "bot-1") + if len(items) != 0 { + t.Fatalf("expected empty items, got %d", len(items)) + } +} diff --git a/internal/healthcheck/checker.go b/internal/healthcheck/checker.go new file mode 100644 index 00000000..b7547149 --- /dev/null +++ b/internal/healthcheck/checker.go @@ -0,0 +1,31 @@ +package healthcheck + +import "context" + +const ( + // StatusOK indicates check passed. + StatusOK = "ok" + // StatusWarn indicates check completed with warning. + StatusWarn = "warn" + // StatusError indicates check failed. + StatusError = "error" + // StatusUnknown indicates check result is not yet known. + StatusUnknown = "unknown" +) + +// CheckResult is one runtime check item produced by a checker. +type CheckResult struct { + ID string + Type string + TitleKey string + Subtitle string + Status string + Summary string + Detail string + Metadata map[string]any +} + +// Checker evaluates one or more runtime checks for a bot. +type Checker interface { + ListChecks(ctx context.Context, botID string) []CheckResult +} diff --git a/internal/healthcheck/checkers/channel/checker.go b/internal/healthcheck/checkers/channel/checker.go new file mode 100644 index 00000000..3200ce32 --- /dev/null +++ b/internal/healthcheck/checkers/channel/checker.go @@ -0,0 +1,137 @@ +package channelchecker + +import ( + "context" + "fmt" + "log/slog" + "sort" + "strings" + + "github.com/memohai/memoh/internal/channel" + "github.com/memohai/memoh/internal/healthcheck" +) + +const ( + checkTypeChannelConnection = "channel.connection" + titleKeyChannelConnection = "bots.checks.titles.channelConnection" +) + +// ConnectionObserver reads runtime channel connection statuses. +type ConnectionObserver interface { + ConnectionStatusesByBot(botID string) []channel.ConnectionStatus +} + +// Checker evaluates channel connection health checks. +type Checker struct { + logger *slog.Logger + observer ConnectionObserver +} + +// NewChecker creates a channel health checker. +func NewChecker(log *slog.Logger, observer ConnectionObserver) *Checker { + if log == nil { + log = slog.Default() + } + return &Checker{ + logger: log.With(slog.String("checker", "healthcheck_channel")), + observer: observer, + } +} + +// ListChecks evaluates channel connection statuses for a bot. +func (c *Checker) ListChecks(ctx context.Context, botID string) []healthcheck.CheckResult { + if ctx == nil { + ctx = context.Background() + } + // Connection observer is context-free; best effort early cancellation guard. + if err := ctx.Err(); err != nil { + return []healthcheck.CheckResult{} + } + botID = strings.TrimSpace(botID) + if botID == "" { + return []healthcheck.CheckResult{} + } + if c.observer == nil { + if c.logger != nil { + c.logger.Warn( + "channel healthcheck dependency is unavailable", + slog.String("bot_id", botID), + ) + } + return []healthcheck.CheckResult{ + { + ID: checkTypeChannelConnection + ".service", + Type: checkTypeChannelConnection, + TitleKey: titleKeyChannelConnection, + Status: healthcheck.StatusWarn, + Summary: "Channel checker service is not available.", + Detail: "connection observer is nil", + }, + } + } + + statuses := c.observer.ConnectionStatusesByBot(botID) + if len(statuses) == 0 { + return []healthcheck.CheckResult{} + } + sort.Slice(statuses, func(i, j int) bool { + if statuses[i].ChannelType == statuses[j].ChannelType { + return statuses[i].ConfigID < statuses[j].ConfigID + } + return statuses[i].ChannelType < statuses[j].ChannelType + }) + + checks := make([]healthcheck.CheckResult, 0, len(statuses)) + for idx, status := range statuses { + channelType := strings.TrimSpace(status.ChannelType.String()) + if channelType == "" { + channelType = "unknown" + } + checkID := buildCheckID(status.ConfigID, idx) + subtitle := buildSubtitle(channelType, status.ConfigID) + item := healthcheck.CheckResult{ + ID: checkID, + Type: checkTypeChannelConnection, + TitleKey: titleKeyChannelConnection, + Subtitle: subtitle, + Status: healthcheck.StatusError, + Summary: fmt.Sprintf("Channel %s connection is down.", channelType), + Metadata: map[string]any{ + "config_id": status.ConfigID, + "channel_type": channelType, + "running": status.Running, + }, + } + if status.UpdatedAt.Unix() > 0 { + item.Metadata["updated_at"] = status.UpdatedAt.UTC().Format("2006-01-02T15:04:05Z") + } + if status.Running { + item.Status = healthcheck.StatusOK + item.Summary = fmt.Sprintf("Channel %s is connected.", channelType) + } else if strings.TrimSpace(status.LastError) != "" { + item.Summary = fmt.Sprintf("Channel %s connection failed.", channelType) + item.Detail = strings.TrimSpace(status.LastError) + } + checks = append(checks, item) + } + return checks +} + +func buildCheckID(configID string, idx int) string { + configID = strings.TrimSpace(configID) + if configID != "" { + return checkTypeChannelConnection + "." + configID + } + return fmt.Sprintf("%s.unknown_%d", checkTypeChannelConnection, idx+1) +} + +func buildSubtitle(channelType, configID string) string { + configID = strings.TrimSpace(configID) + if configID == "" { + return channelType + } + if len(configID) > 8 { + configID = configID[:8] + } + return channelType + " (" + configID + ")" +} diff --git a/internal/healthcheck/checkers/channel/checker_test.go b/internal/healthcheck/checkers/channel/checker_test.go new file mode 100644 index 00000000..ebe5ca3e --- /dev/null +++ b/internal/healthcheck/checkers/channel/checker_test.go @@ -0,0 +1,89 @@ +package channelchecker + +import ( + "context" + "io" + "log/slog" + "testing" + "time" + + "github.com/memohai/memoh/internal/channel" +) + +type fakeConnectionObserver struct { + items []channel.ConnectionStatus +} + +func (f *fakeConnectionObserver) ConnectionStatusesByBot(botID string) []channel.ConnectionStatus { + return f.items +} + +func newTestLogger() *slog.Logger { + return slog.New(slog.NewTextHandler(io.Discard, nil)) +} + +func TestCheckerListChecks(t *testing.T) { + t.Parallel() + + now := time.Now().UTC() + checker := NewChecker(newTestLogger(), &fakeConnectionObserver{ + items: []channel.ConnectionStatus{ + { + ConfigID: "cfg-1", + BotID: "bot-1", + ChannelType: channel.ChannelType("telegram"), + Running: true, + UpdatedAt: now, + }, + { + ConfigID: "cfg-2", + BotID: "bot-1", + ChannelType: channel.ChannelType("feishu"), + Running: false, + LastError: "connect timeout", + UpdatedAt: now, + }, + }, + }) + + items := checker.ListChecks(context.Background(), "bot-1") + if len(items) != 2 { + t.Fatalf("expected 2 checks, got %d", len(items)) + } + + var okFound bool + var errFound bool + for _, item := range items { + if item.ID == "channel.connection.cfg-1" { + okFound = true + if item.Status != "ok" { + t.Fatalf("expected ok for cfg-1, got %s", item.Status) + } + } + if item.ID == "channel.connection.cfg-2" { + errFound = true + if item.Status != "error" { + t.Fatalf("expected error for cfg-2, got %s", item.Status) + } + if item.Detail != "connect timeout" { + t.Fatalf("unexpected detail: %s", item.Detail) + } + } + } + if !okFound || !errFound { + t.Fatalf("expected checks for both configs") + } +} + +func TestCheckerNilObserver(t *testing.T) { + t.Parallel() + + checker := NewChecker(newTestLogger(), nil) + items := checker.ListChecks(context.Background(), "bot-1") + if len(items) != 1 { + t.Fatalf("expected service warning check, got %d", len(items)) + } + if items[0].Status != "warn" { + t.Fatalf("expected warn status, got %s", items[0].Status) + } +} diff --git a/internal/healthcheck/checkers/mcp/checker.go b/internal/healthcheck/checkers/mcp/checker.go new file mode 100644 index 00000000..0145bc13 --- /dev/null +++ b/internal/healthcheck/checkers/mcp/checker.go @@ -0,0 +1,222 @@ +package mcpchecker + +import ( + "context" + "fmt" + "log/slog" + "sort" + "strconv" + "strings" + "time" + + "github.com/memohai/memoh/internal/healthcheck" + "github.com/memohai/memoh/internal/mcp" +) + +const ( + checkTypeMCPConnection = "mcp.connection" + titleKeyMCPConnection = "bots.checks.titles.mcpConnection" + defaultCheckTimeout = 8 * time.Second + fallbackConnectionLabel = "MCP" +) + +// ConnectionLister lists active MCP connections for a bot. +type ConnectionLister interface { + ListActiveByBot(ctx context.Context, botID string) ([]mcp.Connection, error) +} + +// ToolLister lists tools for a bot session. +type ToolLister interface { + ListTools(ctx context.Context, session mcp.ToolSessionContext) ([]mcp.ToolDescriptor, error) +} + +// Checker evaluates MCP connection health checks. +type Checker struct { + logger *slog.Logger + connections ConnectionLister + tools ToolLister + timeout time.Duration +} + +// NewChecker creates an MCP health checker. +func NewChecker(log *slog.Logger, connections ConnectionLister, tools ToolLister) *Checker { + if log == nil { + log = slog.Default() + } + return &Checker{ + logger: log.With(slog.String("checker", "healthcheck_mcp")), + connections: connections, + tools: tools, + timeout: defaultCheckTimeout, + } +} + +// ListChecks evaluates all active MCP connections for a bot. +func (c *Checker) ListChecks(ctx context.Context, botID string) []healthcheck.CheckResult { + if ctx == nil { + ctx = context.Background() + } + botID = strings.TrimSpace(botID) + if botID == "" { + return []healthcheck.CheckResult{} + } + if c.connections == nil || c.tools == nil { + if c.logger != nil { + c.logger.Warn( + "mcp healthcheck dependencies are unavailable", + slog.String("bot_id", botID), + slog.Bool("has_connection_lister", c.connections != nil), + slog.Bool("has_tool_lister", c.tools != nil), + ) + } + return []healthcheck.CheckResult{ + { + ID: checkTypeMCPConnection + ".service", + Type: checkTypeMCPConnection, + TitleKey: titleKeyMCPConnection, + Status: healthcheck.StatusWarn, + Summary: "MCP checker service is not available.", + Detail: "connection lister or tool lister is nil", + }, + } + } + + items, err := c.connections.ListActiveByBot(ctx, botID) + if err != nil { + if c.logger != nil { + c.logger.Warn( + "mcp healthcheck list connections failed", + slog.String("bot_id", botID), + slog.Any("error", err), + ) + } + return []healthcheck.CheckResult{ + { + ID: checkTypeMCPConnection + ".list", + Type: checkTypeMCPConnection, + TitleKey: titleKeyMCPConnection, + Status: healthcheck.StatusError, + Summary: "Failed to list MCP connections.", + Detail: err.Error(), + }, + } + } + if len(items) == 0 { + return []healthcheck.CheckResult{} + } + + sort.Slice(items, func(i, j int) bool { + leftName := strings.TrimSpace(items[i].Name) + rightName := strings.TrimSpace(items[j].Name) + if leftName == rightName { + return strings.TrimSpace(items[i].ID) < strings.TrimSpace(items[j].ID) + } + return leftName < rightName + }) + + probeCtx, cancel := context.WithTimeout(ctx, c.timeout) + defer cancel() + + tools, err := c.tools.ListTools(probeCtx, mcp.ToolSessionContext{BotID: botID}) + if err != nil { + if c.logger != nil { + c.logger.Warn( + "mcp healthcheck list tools failed", + slog.String("bot_id", botID), + slog.Any("error", err), + ) + } + checks := make([]healthcheck.CheckResult, 0, len(items)) + for idx, conn := range items { + checks = append(checks, healthcheck.CheckResult{ + ID: buildCheckID(conn, idx), + Type: checkTypeMCPConnection, + TitleKey: titleKeyMCPConnection, + Subtitle: displayConnectionName(conn.Name), + Status: healthcheck.StatusError, + Summary: fmt.Sprintf("MCP server %q is not reachable.", displayConnectionName(conn.Name)), + Detail: err.Error(), + Metadata: map[string]any{ + "connection_id": strings.TrimSpace(conn.ID), + "name": strings.TrimSpace(conn.Name), + "type": strings.TrimSpace(conn.Type), + }, + }) + } + return checks + } + + results := make([]healthcheck.CheckResult, 0, len(items)) + for idx, conn := range items { + connName := displayConnectionName(conn.Name) + prefix := sanitizeToolPrefix(conn.Name) + toolCount := 0 + if prefix != "" { + toolPrefix := prefix + "." + for _, tool := range tools { + if strings.HasPrefix(strings.TrimSpace(tool.Name), toolPrefix) { + toolCount++ + } + } + } + + item := healthcheck.CheckResult{ + ID: buildCheckID(conn, idx), + Type: checkTypeMCPConnection, + TitleKey: titleKeyMCPConnection, + Subtitle: connName, + Status: healthcheck.StatusWarn, + Summary: fmt.Sprintf("MCP server %q is reachable but no tools found.", connName), + Detail: "The server responded but exposed no tools for this connection.", + Metadata: map[string]any{ + "connection_id": strings.TrimSpace(conn.ID), + "name": strings.TrimSpace(conn.Name), + "type": strings.TrimSpace(conn.Type), + "tool_count": toolCount, + }, + } + if toolCount > 0 { + item.Status = healthcheck.StatusOK + item.Summary = fmt.Sprintf("MCP server %q is healthy (%d tools).", connName, toolCount) + item.Detail = "" + } + results = append(results, item) + } + return results +} + +func buildCheckID(conn mcp.Connection, idx int) string { + connectionID := strings.TrimSpace(conn.ID) + if connectionID != "" { + return checkTypeMCPConnection + "." + connectionID + } + return checkTypeMCPConnection + ".unknown_" + strconv.Itoa(idx+1) +} + +func displayConnectionName(raw string) string { + name := strings.TrimSpace(raw) + if name == "" { + return fallbackConnectionLabel + } + return name +} + +func sanitizeToolPrefix(raw string) string { + raw = strings.TrimSpace(strings.ToLower(raw)) + if raw == "" { + return "mcp" + } + builder := strings.Builder{} + for _, ch := range raw { + if (ch >= 'a' && ch <= 'z') || (ch >= '0' && ch <= '9') || ch == '_' || ch == '-' { + builder.WriteRune(ch) + continue + } + builder.WriteRune('_') + } + normalized := strings.Trim(builder.String(), "._-") + if normalized == "" { + return "mcp" + } + return normalized +} diff --git a/internal/healthcheck/checkers/mcp/checker_test.go b/internal/healthcheck/checkers/mcp/checker_test.go new file mode 100644 index 00000000..068d3a0b --- /dev/null +++ b/internal/healthcheck/checkers/mcp/checker_test.go @@ -0,0 +1,112 @@ +package mcpchecker + +import ( + "context" + "errors" + "io" + "log/slog" + "testing" + + "github.com/memohai/memoh/internal/mcp" +) + +type fakeConnectionLister struct { + items []mcp.Connection + err error +} + +func (f *fakeConnectionLister) ListActiveByBot(ctx context.Context, botID string) ([]mcp.Connection, error) { + if f.err != nil { + return nil, f.err + } + return f.items, nil +} + +type fakeToolLister struct { + items []mcp.ToolDescriptor + err error +} + +func (f *fakeToolLister) ListTools(ctx context.Context, session mcp.ToolSessionContext) ([]mcp.ToolDescriptor, error) { + if f.err != nil { + return nil, f.err + } + return f.items, nil +} + +func newTestLogger() *slog.Logger { + return slog.New(slog.NewTextHandler(io.Discard, nil)) +} + +func TestCheckerListChecks(t *testing.T) { + t.Parallel() + + checker := NewChecker( + newTestLogger(), + &fakeConnectionLister{ + items: []mcp.Connection{ + {ID: "conn-1", Name: "Hello World", Type: "http"}, + {ID: "conn-2", Name: "NoTools", Type: "sse"}, + }, + }, + &fakeToolLister{ + items: []mcp.ToolDescriptor{ + {Name: "hello_world.ping"}, + {Name: "hello_world.echo"}, + }, + }, + ) + + items := checker.ListChecks(context.Background(), "bot-1") + if len(items) != 2 { + t.Fatalf("expected 2 checks, got %d", len(items)) + } + if items[0].ID != "mcp.connection.conn-1" && items[1].ID != "mcp.connection.conn-1" { + t.Fatalf("expected check id for conn-1") + } + + var healthyFound bool + var noToolsFound bool + for _, item := range items { + if item.Subtitle == "Hello World" { + healthyFound = true + if item.Status != "ok" { + t.Fatalf("expected ok status for Hello World, got %s", item.Status) + } + } + if item.Subtitle == "NoTools" { + noToolsFound = true + if item.Status != "warn" { + t.Fatalf("expected warn status for NoTools, got %s", item.Status) + } + } + } + if !healthyFound || !noToolsFound { + t.Fatalf("expected both connection checks") + } +} + +func TestCheckerListChecksToolListError(t *testing.T) { + t.Parallel() + + checker := NewChecker( + newTestLogger(), + &fakeConnectionLister{ + items: []mcp.Connection{ + {ID: "conn-1", Name: "ErrConn", Type: "http"}, + }, + }, + &fakeToolLister{err: errors.New("gateway down")}, + ) + + items := checker.ListChecks(context.Background(), "bot-1") + if len(items) != 1 { + t.Fatalf("expected 1 check, got %d", len(items)) + } + if items[0].Status != "error" { + t.Fatalf("expected error status, got %s", items[0].Status) + } + if items[0].Detail == "" { + t.Fatalf("expected non-empty detail") + } +} diff --git a/internal/mcp/checker.go b/internal/mcp/checker.go deleted file mode 100644 index 8ccb529c..00000000 --- a/internal/mcp/checker.go +++ /dev/null @@ -1,140 +0,0 @@ -package mcp - -import ( - "context" - "fmt" - "log/slog" - "strings" - "time" - - "github.com/memohai/memoh/internal/bots" -) - -const mcpCheckTimeout = 8 * time.Second - -// ConnectionChecker implements bots.RuntimeChecker for MCP connections. -type ConnectionChecker struct { - logger *slog.Logger - connections *ConnectionService - gateway *ToolGatewayService -} - -// NewConnectionChecker creates an MCP runtime checker. -func NewConnectionChecker(log *slog.Logger, connections *ConnectionService, gateway *ToolGatewayService) *ConnectionChecker { - if log == nil { - log = slog.Default() - } - return &ConnectionChecker{ - logger: log.With(slog.String("checker", "mcp")), - connections: connections, - gateway: gateway, - } -} - -// CheckKeys returns check keys for each active MCP connection of a bot. -func (c *ConnectionChecker) CheckKeys(ctx context.Context, botID string) []string { - if c.connections == nil { - return nil - } - items, err := c.connections.ListActiveByBot(ctx, botID) - if err != nil { - c.logger.Warn("mcp checker: list connections failed", - slog.String("bot_id", botID), slog.Any("error", err)) - return nil - } - keys := make([]string, 0, len(items)) - for _, conn := range items { - keys = append(keys, "mcp."+sanitizeCheckKey(conn.Name)) - } - return keys -} - -// RunCheck probes a single MCP connection identified by check key. -func (c *ConnectionChecker) RunCheck(ctx context.Context, botID, key string) bots.BotCheck { - connName := strings.TrimPrefix(key, "mcp.") - check := bots.BotCheck{ - CheckKey: key, - Status: bots.BotCheckStatusUnknown, - Summary: fmt.Sprintf("MCP server %q is being checked.", connName), - } - - if c.connections == nil || c.gateway == nil { - check.Status = bots.BotCheckStatusWarn - check.Summary = fmt.Sprintf("MCP server %q cannot be checked.", connName) - check.Detail = "service not available" - return check - } - - conn, err := c.findConnectionByKey(ctx, botID, connName) - if err != nil { - check.Status = bots.BotCheckStatusError - check.Summary = fmt.Sprintf("MCP server %q not found.", connName) - check.Detail = err.Error() - return check - } - check.Metadata = map[string]any{ - "connection_id": conn.ID, - "name": conn.Name, - "type": conn.Type, - } - - probeCtx, cancel := context.WithTimeout(ctx, mcpCheckTimeout) - defer cancel() - - session := ToolSessionContext{BotID: botID} - tools, err := c.gateway.ListTools(probeCtx, session) - if err != nil { - check.Status = bots.BotCheckStatusError - check.Summary = fmt.Sprintf("MCP server %q is not reachable.", connName) - check.Detail = err.Error() - return check - } - - prefix := sanitizeCheckKey(conn.Name) + "." - toolCount := 0 - for _, t := range tools { - if strings.HasPrefix(t.Name, prefix) { - toolCount++ - } - } - - if toolCount > 0 { - check.Status = bots.BotCheckStatusOK - check.Summary = fmt.Sprintf("MCP server %q is healthy (%d tools).", connName, toolCount) - check.Metadata["tool_count"] = toolCount - } else { - check.Status = bots.BotCheckStatusWarn - check.Summary = fmt.Sprintf("MCP server %q is reachable but no tools found.", connName) - check.Detail = "The server responded but exposed no tools for this connection." - } - return check -} - -func (c *ConnectionChecker) findConnectionByKey(ctx context.Context, botID, sanitizedName string) (Connection, error) { - items, err := c.connections.ListActiveByBot(ctx, botID) - if err != nil { - return Connection{}, err - } - for _, conn := range items { - if sanitizeCheckKey(conn.Name) == sanitizedName { - return conn, nil - } - } - return Connection{}, fmt.Errorf("connection %q not found", sanitizedName) -} - -func sanitizeCheckKey(raw string) string { - raw = strings.TrimSpace(strings.ToLower(raw)) - if raw == "" { - return "unknown" - } - b := strings.Builder{} - for _, ch := range raw { - if (ch >= 'a' && ch <= 'z') || (ch >= '0' && ch <= '9') || ch == '_' || ch == '-' { - b.WriteRune(ch) - } else { - b.WriteRune('_') - } - } - return strings.Trim(b.String(), "_-") -} diff --git a/internal/media/errors.go b/internal/media/errors.go new file mode 100644 index 00000000..c00ced9b --- /dev/null +++ b/internal/media/errors.go @@ -0,0 +1,14 @@ +package media + +import "errors" + +var ( + // ErrAssetNotFound indicates the requested media asset does not exist. + ErrAssetNotFound = errors.New("media asset not found") + // ErrProviderUnavailable indicates the storage provider is not configured or reachable. + ErrProviderUnavailable = errors.New("storage provider unavailable") + // ErrAssetTooLarge indicates the payload exceeds the configured max asset size. + ErrAssetTooLarge = errors.New("media asset too large") + // ErrPathTraversal indicates a storage key attempted directory traversal. + ErrPathTraversal = errors.New("path traversal is forbidden") +) diff --git a/internal/media/limits.go b/internal/media/limits.go new file mode 100644 index 00000000..35d128b3 --- /dev/null +++ b/internal/media/limits.go @@ -0,0 +1,33 @@ +package media + +import ( + "fmt" + "io" +) + +const ( + // MaxAssetBytes is the global max accepted payload size. + MaxAssetBytes int64 = 200 * 1024 * 1024 +) + +// ReadAllWithLimit reads from reader and rejects payloads larger than maxBytes. +func ReadAllWithLimit(reader io.Reader, maxBytes int64) ([]byte, error) { + if reader == nil { + return nil, fmt.Errorf("reader is required") + } + if maxBytes <= 0 { + return nil, fmt.Errorf("max bytes must be greater than 0") + } + limited := &io.LimitedReader{ + R: reader, + N: maxBytes + 1, + } + data, err := io.ReadAll(limited) + if err != nil { + return nil, err + } + if int64(len(data)) > maxBytes { + return nil, fmt.Errorf("%w: max %d bytes", ErrAssetTooLarge, maxBytes) + } + return data, nil +} diff --git a/internal/media/limits_test.go b/internal/media/limits_test.go new file mode 100644 index 00000000..305f2d66 --- /dev/null +++ b/internal/media/limits_test.go @@ -0,0 +1,60 @@ +package media + +import ( + "bytes" + "errors" + "testing" +) + +func TestReadAllWithLimit(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + payload []byte + maxBytes int64 + wantErr bool + errTooBig bool + }{ + { + name: "within limit", + payload: []byte("hello"), + maxBytes: 8, + }, + { + name: "over limit", + payload: []byte("0123456789"), + maxBytes: 5, + wantErr: true, + errTooBig: true, + }, + { + name: "exact limit", + payload: []byte("12345"), + maxBytes: 5, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + got, err := ReadAllWithLimit(bytes.NewReader(tt.payload), tt.maxBytes) + if tt.wantErr { + if err == nil { + t.Fatalf("expected error") + } + if tt.errTooBig && !errors.Is(err, ErrAssetTooLarge) { + t.Fatalf("expected ErrAssetTooLarge, got %v", err) + } + return + } + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if string(got) != string(tt.payload) { + t.Fatalf("unexpected payload: %q", string(got)) + } + }) + } +} diff --git a/internal/media/providers/containerfs/provider.go b/internal/media/providers/containerfs/provider.go new file mode 100644 index 00000000..aac2ced2 --- /dev/null +++ b/internal/media/providers/containerfs/provider.go @@ -0,0 +1,112 @@ +// Package containerfs implements media.StorageProvider for bot containers +// backed by host-side bind mounts. Writing to /bots//media/ +// on the host makes the file available at /data/media/ inside the container. +package containerfs + +import ( + "context" + "fmt" + "io" + "os" + "path/filepath" + "strings" +) + +const containerMediaRoot = "/data/media" + +// Provider stores media assets via the host-side bind mount path +// that maps to /data inside bot containers. +type Provider struct { + dataRoot string +} + +// New creates a container-based storage provider. +// dataRoot is the host directory that contains per-bot data (e.g. "data"). +func New(dataRoot string) (*Provider, error) { + abs, err := filepath.Abs(dataRoot) + if err != nil { + return nil, fmt.Errorf("resolve data root: %w", err) + } + return &Provider{dataRoot: abs}, nil +} + +// Put writes data to the host bind mount path for the bot container. +func (p *Provider) Put(_ context.Context, key string, reader io.Reader) error { + dest, err := p.hostPath(key) + if err != nil { + return err + } + if err := os.MkdirAll(filepath.Dir(dest), 0o755); err != nil { + return fmt.Errorf("create parent dir: %w", err) + } + f, err := os.Create(dest) + if err != nil { + return fmt.Errorf("create file: %w", err) + } + defer f.Close() + if _, err := io.Copy(f, reader); err != nil { + return fmt.Errorf("write file: %w", err) + } + return nil +} + +// Open reads a file from the host bind mount path. +func (p *Provider) Open(_ context.Context, key string) (io.ReadCloser, error) { + dest, err := p.hostPath(key) + if err != nil { + return nil, err + } + f, err := os.Open(dest) + if err != nil { + return nil, fmt.Errorf("open file: %w", err) + } + return f, nil +} + +// Delete removes a file from the host bind mount path. +func (p *Provider) Delete(_ context.Context, key string) error { + dest, err := p.hostPath(key) + if err != nil { + return err + } + if err := os.Remove(dest); err != nil && !os.IsNotExist(err) { + return fmt.Errorf("delete file: %w", err) + } + return nil +} + +// AccessPath returns the container-internal path for a storage key. +// Key format: "/" → "/data/media/". +func (p *Provider) AccessPath(key string) string { + sub := key + if idx := strings.IndexByte(sub, '/'); idx >= 0 { + sub = sub[idx+1:] + } + return containerMediaRoot + "/" + sub +} + +// hostPath converts a storage key into the host-side file path. +// Key format: "/" → "/bots//media/". +func (p *Provider) hostPath(key string) (string, error) { + clean := filepath.Clean(key) + if filepath.IsAbs(clean) { + return "", fmt.Errorf("absolute key is forbidden: %s", key) + } + if strings.HasPrefix(clean, ".."+string(filepath.Separator)) || clean == ".." { + return "", fmt.Errorf("path traversal is forbidden: %s", key) + } + idx := strings.IndexByte(clean, filepath.Separator) + if idx <= 0 { + return "", fmt.Errorf("storage key must contain bot_id prefix: %s", key) + } + botID := clean[:idx] + subPath := clean[idx+1:] + if strings.TrimSpace(botID) == "" || strings.TrimSpace(subPath) == "" { + return "", fmt.Errorf("invalid storage key: %s", key) + } + joined := filepath.Join(p.dataRoot, "bots", botID, "media", subPath) + if !strings.HasPrefix(joined, p.dataRoot+string(filepath.Separator)) { + return "", fmt.Errorf("path escapes data root: %s", key) + } + return joined, nil +} diff --git a/internal/media/providers/containerfs/provider_test.go b/internal/media/providers/containerfs/provider_test.go new file mode 100644 index 00000000..346ded1c --- /dev/null +++ b/internal/media/providers/containerfs/provider_test.go @@ -0,0 +1,116 @@ +package containerfs + +import ( + "bytes" + "context" + "io" + "os" + "path/filepath" + "testing" +) + +func TestProvider_HostPath(t *testing.T) { + t.Parallel() + p := &Provider{dataRoot: "/srv/data"} + + tests := []struct { + key string + want string + wantErr bool + }{ + {key: "bot-1/image/ab12/ab12cd.png", want: "/srv/data/bots/bot-1/media/image/ab12/ab12cd.png"}, + {key: "/absolute/path", wantErr: true}, + {key: "../escape", wantErr: true}, + {key: "nosubpath", wantErr: true}, + {key: "", wantErr: true}, + } + for _, tt := range tests { + got, err := p.hostPath(tt.key) + if tt.wantErr { + if err == nil { + t.Errorf("hostPath(%q) expected error", tt.key) + } + continue + } + if err != nil { + t.Errorf("hostPath(%q) unexpected error: %v", tt.key, err) + continue + } + if got != tt.want { + t.Errorf("hostPath(%q) = %q, want %q", tt.key, got, tt.want) + } + } +} + +func TestProvider_AccessPath(t *testing.T) { + t.Parallel() + p := &Provider{dataRoot: "/srv/data"} + + tests := []struct { + key string + want string + }{ + {key: "bot-1/image/ab12/ab12cd.png", want: "/data/media/image/ab12/ab12cd.png"}, + {key: "bot-1/file/xx/doc.pdf", want: "/data/media/file/xx/doc.pdf"}, + } + for _, tt := range tests { + got := p.AccessPath(tt.key) + if got != tt.want { + t.Errorf("AccessPath(%q) = %q, want %q", tt.key, got, tt.want) + } + } +} + +func TestProvider_PutOpenDelete(t *testing.T) { + t.Parallel() + tmpDir := t.TempDir() + p, err := New(tmpDir) + if err != nil { + t.Fatalf("New failed: %v", err) + } + + key := "bot-1/image/ab/test.png" + data := []byte("hello media content") + + if err := p.Put(context.Background(), key, bytes.NewReader(data)); err != nil { + t.Fatalf("Put failed: %v", err) + } + + hostFile := filepath.Join(tmpDir, "bots", "bot-1", "media", "image", "ab", "test.png") + if _, err := os.Stat(hostFile); err != nil { + t.Fatalf("file not found on host: %v", err) + } + + reader, err := p.Open(context.Background(), key) + if err != nil { + t.Fatalf("Open failed: %v", err) + } + got, _ := io.ReadAll(reader) + reader.Close() + if !bytes.Equal(got, data) { + t.Errorf("Open returned %q, want %q", got, data) + } + + if err := p.Delete(context.Background(), key); err != nil { + t.Fatalf("Delete failed: %v", err) + } + if _, err := os.Stat(hostFile); !os.IsNotExist(err) { + t.Fatalf("file should be deleted: %v", err) + } +} + +func TestProvider_PathTraversal(t *testing.T) { + t.Parallel() + p := &Provider{dataRoot: "/srv/data"} + + bad := []string{ + "../etc/passwd", + "/absolute/key", + "bot-1/../../escape", + } + for _, key := range bad { + if _, err := p.hostPath(key); err == nil { + t.Errorf("hostPath(%q) should reject traversal", key) + } + } +} diff --git a/internal/media/service.go b/internal/media/service.go new file mode 100644 index 00000000..30d911c1 --- /dev/null +++ b/internal/media/service.go @@ -0,0 +1,356 @@ +package media + +import ( + "context" + "crypto/sha256" + "encoding/hex" + "encoding/json" + "errors" + "fmt" + "io" + "log/slog" + "os" + "path" + "strings" + + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgtype" + + dbpkg "github.com/memohai/memoh/internal/db" + "github.com/memohai/memoh/internal/db/sqlc" +) + +// Service provides media asset persistence operations. +type Service struct { + queries *sqlc.Queries + provider StorageProvider + logger *slog.Logger +} + +// NewService creates a media service with the given storage provider. +func NewService(log *slog.Logger, queries *sqlc.Queries, provider StorageProvider) *Service { + if log == nil { + log = slog.Default() + } + return &Service{ + queries: queries, + provider: provider, + logger: log.With(slog.String("service", "media")), + } +} + +// Ingest persists a new media asset. It hashes the content, deduplicates by +// (bot_id, content_hash), stores the bytes via the provider, and writes the +// DB record. Returns the asset (existing or newly created). +func (s *Service) Ingest(ctx context.Context, input IngestInput) (Asset, error) { + if s.provider == nil { + return Asset{}, ErrProviderUnavailable + } + if strings.TrimSpace(input.BotID) == "" { + return Asset{}, fmt.Errorf("bot id is required") + } + if input.Reader == nil { + return Asset{}, fmt.Errorf("reader is required") + } + + maxBytes := input.MaxBytes + if maxBytes <= 0 { + maxBytes = MaxAssetBytes + } + contentHash, sizeBytes, tempPath, err := spoolAndHashWithLimit(input.Reader, maxBytes) + if err != nil { + return Asset{}, fmt.Errorf("read input: %w", err) + } + defer func() { + _ = os.Remove(tempPath) + }() + + pgBotID, err := dbpkg.ParseUUID(input.BotID) + if err != nil { + return Asset{}, fmt.Errorf("invalid bot id: %w", err) + } + + // Dedup: only create when hash truly not found; propagate other DB errors. + existing, err := s.queries.GetMediaAssetByHash(ctx, sqlc.GetMediaAssetByHashParams{ + BotID: pgBotID, + ContentHash: contentHash, + }) + if err == nil { + return convertAsset(existing), nil + } + if !errors.Is(err, pgx.ErrNoRows) { + return Asset{}, fmt.Errorf("check existing asset: %w", err) + } + + ext := extensionFromMime(input.Mime) + storageKey := path.Join( + input.BotID, + string(input.MediaType), + contentHash[:4], + contentHash+ext, + ) + + tempFile, err := os.Open(tempPath) + if err != nil { + return Asset{}, fmt.Errorf("open temp file: %w", err) + } + defer func() { + _ = tempFile.Close() + }() + if err := s.provider.Put(ctx, storageKey, tempFile); err != nil { + return Asset{}, fmt.Errorf("store media: %w", err) + } + + metaBytes, err := json.Marshal(nonNilMap(input.Metadata)) + if err != nil { + metaBytes = []byte("{}") + } + + row, err := s.queries.CreateMediaAsset(ctx, sqlc.CreateMediaAssetParams{ + BotID: pgBotID, + ContentHash: contentHash, + MediaType: string(input.MediaType), + Mime: coalesce(input.Mime, "application/octet-stream"), + SizeBytes: sizeBytes, + StorageKey: storageKey, + OriginalName: pgtype.Text{ + String: input.OriginalName, + Valid: strings.TrimSpace(input.OriginalName) != "", + }, + Width: toPgInt4(input.Width), + Height: toPgInt4(input.Height), + DurationMs: toPgInt8(input.DurationMs), + Metadata: metaBytes, + }) + if err != nil { + return Asset{}, fmt.Errorf("create asset record: %w", err) + } + return convertAsset(row), nil +} + +// Open returns a reader for the media asset identified by ID. +func (s *Service) Open(ctx context.Context, assetID string) (io.ReadCloser, Asset, error) { + if s.provider == nil { + return nil, Asset{}, ErrProviderUnavailable + } + pgID, err := dbpkg.ParseUUID(assetID) + if err != nil { + return nil, Asset{}, fmt.Errorf("invalid asset id: %w", err) + } + row, err := s.queries.GetMediaAssetByID(ctx, pgID) + if err != nil { + if errors.Is(err, pgx.ErrNoRows) { + return nil, Asset{}, ErrAssetNotFound + } + return nil, Asset{}, fmt.Errorf("get asset: %w", err) + } + asset := convertAsset(row) + reader, err := s.provider.Open(ctx, asset.StorageKey) + if err != nil { + return nil, Asset{}, fmt.Errorf("open storage: %w", err) + } + return reader, asset, nil +} + +// GetByID returns an asset by its ID. +func (s *Service) GetByID(ctx context.Context, assetID string) (Asset, error) { + pgID, err := dbpkg.ParseUUID(assetID) + if err != nil { + return Asset{}, fmt.Errorf("invalid asset id: %w", err) + } + row, err := s.queries.GetMediaAssetByID(ctx, pgID) + if err != nil { + if errors.Is(err, pgx.ErrNoRows) { + return Asset{}, ErrAssetNotFound + } + return Asset{}, fmt.Errorf("get asset: %w", err) + } + return convertAsset(row), nil +} + +// LinkToMessage creates a message-asset relationship. +func (s *Service) LinkToMessage(ctx context.Context, messageID, assetID, role string, ordinal int) error { + pgMsgID, err := dbpkg.ParseUUID(messageID) + if err != nil { + return fmt.Errorf("invalid message id: %w", err) + } + pgAssetID, err := dbpkg.ParseUUID(assetID) + if err != nil { + return fmt.Errorf("invalid asset id: %w", err) + } + if strings.TrimSpace(role) == "" { + role = "attachment" + } + _, err = s.queries.CreateMessageAsset(ctx, sqlc.CreateMessageAssetParams{ + MessageID: pgMsgID, + AssetID: pgAssetID, + Role: role, + Ordinal: int32(ordinal), + }) + return err +} + +// ListMessageAssets returns all assets linked to a message. +func (s *Service) ListMessageAssets(ctx context.Context, messageID string) ([]Asset, error) { + pgMsgID, err := dbpkg.ParseUUID(messageID) + if err != nil { + return nil, fmt.Errorf("invalid message id: %w", err) + } + rows, err := s.queries.ListMessageAssets(ctx, pgMsgID) + if err != nil { + return nil, err + } + assets := make([]Asset, 0, len(rows)) + for _, row := range rows { + assets = append(assets, Asset{ + ID: row.AssetID.String(), + MediaType: MediaType(row.MediaType), + Mime: row.Mime, + SizeBytes: row.SizeBytes, + StorageKey: row.StorageKey, + OriginalName: dbpkg.TextToString(row.OriginalName), + Width: int(row.Width.Int32), + Height: int(row.Height.Int32), + DurationMs: row.DurationMs.Int64, + }) + } + return assets, nil +} + +// AccessPath returns a consumer-accessible reference for a persisted asset. +// Delegates to the storage provider to compute the format-appropriate path. +func (s *Service) AccessPath(asset Asset) string { + if s.provider == nil { + return "" + } + return s.provider.AccessPath(asset.StorageKey) +} + +// --- helpers --- + +func convertAsset(row sqlc.MediaAsset) Asset { + a := Asset{ + ID: row.ID.String(), + BotID: row.BotID.String(), + ContentHash: row.ContentHash, + MediaType: MediaType(row.MediaType), + Mime: row.Mime, + SizeBytes: row.SizeBytes, + StorageKey: row.StorageKey, + CreatedAt: row.CreatedAt.Time, + } + if row.StorageProviderID.Valid { + a.StorageProviderID = row.StorageProviderID.String() + } + if row.OriginalName.Valid { + a.OriginalName = row.OriginalName.String + } + if row.Width.Valid { + a.Width = int(row.Width.Int32) + } + if row.Height.Valid { + a.Height = int(row.Height.Int32) + } + if row.DurationMs.Valid { + a.DurationMs = row.DurationMs.Int64 + } + var meta map[string]any + if len(row.Metadata) > 0 { + _ = json.Unmarshal(row.Metadata, &meta) + } + a.Metadata = meta + return a +} + +func extensionFromMime(mime string) string { + switch strings.ToLower(strings.TrimSpace(mime)) { + case "image/png": + return ".png" + case "image/jpeg", "image/jpg": + return ".jpg" + case "image/gif": + return ".gif" + case "image/webp": + return ".webp" + case "audio/mpeg", "audio/mp3": + return ".mp3" + case "audio/wav": + return ".wav" + case "audio/ogg": + return ".ogg" + case "video/mp4": + return ".mp4" + case "video/webm": + return ".webm" + case "application/pdf": + return ".pdf" + default: + return ".bin" + } +} + +func nonNilMap(m map[string]any) map[string]any { + if m == nil { + return map[string]any{} + } + return m +} + +func coalesce(values ...string) string { + for _, v := range values { + if strings.TrimSpace(v) != "" { + return v + } + } + return "" +} + +func toPgInt4(v int) pgtype.Int4 { + if v == 0 { + return pgtype.Int4{} + } + return pgtype.Int4{Int32: int32(v), Valid: true} +} + +func toPgInt8(v int64) pgtype.Int8 { + if v == 0 { + return pgtype.Int8{} + } + return pgtype.Int8{Int64: v, Valid: true} +} + +func spoolAndHashWithLimit(reader io.Reader, maxBytes int64) (string, int64, string, error) { + if reader == nil { + return "", 0, "", fmt.Errorf("reader is required") + } + if maxBytes <= 0 { + return "", 0, "", fmt.Errorf("max bytes must be greater than 0") + } + tempFile, err := os.CreateTemp("", "memoh-media-*") + if err != nil { + return "", 0, "", fmt.Errorf("create temp file: %w", err) + } + tempPath := tempFile.Name() + keepFile := false + defer func() { + _ = tempFile.Close() + if !keepFile { + _ = os.Remove(tempPath) + } + }() + + hasher := sha256.New() + limited := &io.LimitedReader{R: reader, N: maxBytes + 1} + written, err := io.Copy(io.MultiWriter(tempFile, hasher), limited) + if err != nil { + return "", 0, "", fmt.Errorf("copy to temp file: %w", err) + } + if written > maxBytes { + return "", 0, "", fmt.Errorf("%w: max %d bytes", ErrAssetTooLarge, maxBytes) + } + if written == 0 { + return "", 0, "", fmt.Errorf("asset payload is empty") + } + keepFile = true + return hex.EncodeToString(hasher.Sum(nil)), written, tempPath, nil +} diff --git a/internal/media/types.go b/internal/media/types.go new file mode 100644 index 00000000..767486fb --- /dev/null +++ b/internal/media/types.go @@ -0,0 +1,71 @@ +package media + +import ( + "context" + "io" + "time" +) + +// MediaType classifies the kind of media asset. +type MediaType string + +const ( + MediaTypeImage MediaType = "image" + MediaTypeAudio MediaType = "audio" + MediaTypeVideo MediaType = "video" + MediaTypeFile MediaType = "file" +) + +// Asset is the domain representation of a persisted media object. +type Asset struct { + ID string `json:"id"` + BotID string `json:"bot_id"` + StorageProviderID string `json:"storage_provider_id,omitempty"` + ContentHash string `json:"content_hash"` + MediaType MediaType `json:"media_type"` + Mime string `json:"mime"` + SizeBytes int64 `json:"size_bytes"` + StorageKey string `json:"storage_key"` + OriginalName string `json:"original_name,omitempty"` + Width int `json:"width,omitempty"` + Height int `json:"height,omitempty"` + DurationMs int64 `json:"duration_ms,omitempty"` + Metadata map[string]any `json:"metadata,omitempty"` + CreatedAt time.Time `json:"created_at"` +} + +// IngestInput carries the data needed to persist a new media asset. +type IngestInput struct { + BotID string + MediaType MediaType + Mime string + OriginalName string + Width int + Height int + DurationMs int64 + Metadata map[string]any + // Reader provides the raw bytes; caller is responsible for closing. + Reader io.Reader + // MaxBytes optionally overrides the media-type default size limit. + MaxBytes int64 +} + +// MessageAssetLink represents the relationship between a message and an asset. +type MessageAssetLink struct { + AssetID string `json:"asset_id"` + Role string `json:"role"` + Ordinal int `json:"ordinal"` +} + +// StorageProvider abstracts object storage operations. +type StorageProvider interface { + // Put writes data to storage under the given key. + Put(ctx context.Context, key string, reader io.Reader) error + // Open returns a reader for the given storage key. + Open(ctx context.Context, key string) (io.ReadCloser, error) + // Delete removes the object at key. + Delete(ctx context.Context, key string) error + // AccessPath returns a consumer-accessible reference for a storage key. + // The format depends on the backend (e.g. container path, signed URL). + AccessPath(key string) string +} diff --git a/internal/message/service.go b/internal/message/service.go index 5760dd70..849cf9c8 100644 --- a/internal/message/service.go +++ b/internal/message/service.go @@ -85,6 +85,36 @@ func (s *DBService) Persist(ctx context.Context, input PersistInput) (Message, e } result := toMessageFromCreate(row) + + // Persist asset links if provided. + for _, ref := range input.Assets { + pgMsgID := row.ID + pgAssetID, assetErr := dbpkg.ParseUUID(ref.AssetID) + if assetErr != nil { + s.logger.Warn("skip invalid asset ref", slog.String("asset_id", ref.AssetID), slog.Any("error", assetErr)) + continue + } + role := ref.Role + if strings.TrimSpace(role) == "" { + role = "attachment" + } + if _, assetErr := s.queries.CreateMessageAsset(ctx, sqlc.CreateMessageAssetParams{ + MessageID: pgMsgID, + AssetID: pgAssetID, + Role: role, + Ordinal: int32(ref.Ordinal), + }); assetErr != nil { + s.logger.Warn("create message asset link failed", slog.String("message_id", result.ID), slog.Any("error", assetErr)) + } + } + + // Enrich assets before publishing so SSE consumers see them immediately. + if len(input.Assets) > 0 { + enriched := []Message{result} + s.enrichAssets(ctx, enriched) + result = enriched[0] + } + s.publishMessageCreated(result) return result, nil } @@ -99,7 +129,9 @@ func (s *DBService) List(ctx context.Context, botID string) ([]Message, error) { if err != nil { return nil, err } - return toMessagesFromList(rows), nil + msgs := toMessagesFromList(rows) + s.enrichAssets(ctx, msgs) + return msgs, nil } // ListSince returns bot messages since a given time. @@ -115,7 +147,9 @@ func (s *DBService) ListSince(ctx context.Context, botID string, since time.Time if err != nil { return nil, err } - return toMessagesFromSince(rows), nil + msgs := toMessagesFromSince(rows) + s.enrichAssets(ctx, msgs) + return msgs, nil } // ListLatest returns the latest N bot messages (newest first in DB; caller may reverse for ASC). @@ -131,7 +165,9 @@ func (s *DBService) ListLatest(ctx context.Context, botID string, limit int32) ( if err != nil { return nil, err } - return toMessagesFromLatest(rows), nil + msgs := toMessagesFromLatest(rows) + s.enrichAssets(ctx, msgs) + return msgs, nil } // ListBefore returns up to limit messages older than before (created_at < before), ordered oldest-first. @@ -148,7 +184,9 @@ func (s *DBService) ListBefore(ctx context.Context, botID string, before time.Ti if err != nil { return nil, err } - return toMessagesFromBefore(rows), nil + msgs := toMessagesFromBefore(rows) + s.enrichAssets(ctx, msgs) + return msgs, nil } // DeleteByBot deletes all messages for a bot. @@ -372,3 +410,48 @@ func (s *DBService) publishMessageCreated(message Message) { Data: payload, }) } + +// enrichAssets batch-loads asset links for a list of messages. +func (s *DBService) enrichAssets(ctx context.Context, messages []Message) { + if len(messages) == 0 { + return + } + ids := make([]pgtype.UUID, 0, len(messages)) + for _, m := range messages { + pgID, err := dbpkg.ParseUUID(m.ID) + if err != nil { + continue + } + ids = append(ids, pgID) + } + if len(ids) == 0 { + return + } + rows, err := s.queries.ListMessageAssetsBatch(ctx, ids) + if err != nil { + s.logger.Warn("enrich assets failed", slog.Any("error", err)) + return + } + assetMap := map[string][]MessageAsset{} + for _, row := range rows { + msgID := row.MessageID.String() + assetMap[msgID] = append(assetMap[msgID], MessageAsset{ + AssetID: row.AssetID.String(), + Role: row.Role, + Ordinal: int(row.Ordinal), + MediaType: row.MediaType, + Mime: row.Mime, + SizeBytes: row.SizeBytes, + StorageKey: row.StorageKey, + OriginalName: dbpkg.TextToString(row.OriginalName), + Width: int(row.Width.Int32), + Height: int(row.Height.Int32), + DurationMs: row.DurationMs.Int64, + }) + } + for i := range messages { + if assets, ok := assetMap[messages[i].ID]; ok { + messages[i].Assets = assets + } + } +} diff --git a/internal/message/types.go b/internal/message/types.go index 28225238..5e0be8ed 100644 --- a/internal/message/types.go +++ b/internal/message/types.go @@ -6,6 +6,21 @@ import ( "time" ) +// MessageAsset carries media asset metadata attached to a message. +type MessageAsset struct { + AssetID string `json:"asset_id"` + Role string `json:"role"` + Ordinal int `json:"ordinal"` + MediaType string `json:"media_type"` + Mime string `json:"mime"` + SizeBytes int64 `json:"size_bytes"` + StorageKey string `json:"storage_key"` + OriginalName string `json:"original_name,omitempty"` + Width int `json:"width,omitempty"` + Height int `json:"height,omitempty"` + DurationMs int64 `json:"duration_ms,omitempty"` +} + // Message represents a single persisted bot message. type Message struct { ID string `json:"id"` @@ -21,9 +36,17 @@ type Message struct { Role string `json:"role"` Content json.RawMessage `json:"content"` Metadata map[string]any `json:"metadata,omitempty"` + Assets []MessageAsset `json:"assets,omitempty"` CreatedAt time.Time `json:"created_at"` } +// AssetRef links a media asset to a persisted message. +type AssetRef struct { + AssetID string `json:"asset_id"` + Role string `json:"role"` + Ordinal int `json:"ordinal"` +} + // PersistInput is the input for persisting a message. type PersistInput struct { BotID string @@ -36,6 +59,7 @@ type PersistInput struct { Role string Content json.RawMessage Metadata map[string]any + Assets []AssetRef } // Writer defines write behavior needed by the inbound router. diff --git a/internal/models/models.go b/internal/models/models.go index 18b7fdbd..623c0705 100644 --- a/internal/models/models.go +++ b/internal/models/models.go @@ -39,11 +39,15 @@ func (s *Service) Create(ctx context.Context, req AddRequest) (AddResponse, erro return AddResponse{}, fmt.Errorf("invalid llm provider ID: %w", err) } + inputMod := []string{} + if model.Type == ModelTypeChat { + inputMod = normalizeModalities(model.InputModalities, []string{ModelInputText}) + } params := sqlc.CreateModelParams{ - ModelID: model.ModelID, - LlmProviderID: llmProviderID, - IsMultimodal: model.IsMultimodal, - Type: string(model.Type), + ModelID: model.ModelID, + LlmProviderID: llmProviderID, + InputModalities: inputMod, + Type: string(model.Type), } // Handle optional name field @@ -194,10 +198,14 @@ func (s *Service) UpdateByID(ctx context.Context, id string, req UpdateRequest) return GetResponse{}, fmt.Errorf("validation failed: %w", err) } + inputMod := []string{} + if model.Type == ModelTypeChat { + inputMod = normalizeModalities(model.InputModalities, []string{ModelInputText}) + } params := sqlc.UpdateModelParams{ - ID: uuid, - IsMultimodal: model.IsMultimodal, - Type: string(model.Type), + ID: uuid, + InputModalities: inputMod, + Type: string(model.Type), } llmProviderID, err := db.ParseUUID(model.LlmProviderID) @@ -233,11 +241,15 @@ func (s *Service) UpdateByModelID(ctx context.Context, modelID string, req Updat return GetResponse{}, fmt.Errorf("validation failed: %w", err) } + inputMod := []string{} + if model.Type == ModelTypeChat { + inputMod = normalizeModalities(model.InputModalities, []string{ModelInputText}) + } params := sqlc.UpdateModelByModelIDParams{ - ModelID: modelID, - NewModelID: model.ModelID, - IsMultimodal: model.IsMultimodal, - Type: string(model.Type), + ModelID: modelID, + NewModelID: model.ModelID, + InputModalities: inputMod, + Type: string(model.Type), } llmProviderID, err := db.ParseUUID(model.LlmProviderID) @@ -317,12 +329,13 @@ func convertToGetResponse(dbModel sqlc.Model) GetResponse { resp := GetResponse{ ModelID: dbModel.ModelID, Model: Model{ - ModelID: dbModel.ModelID, - IsMultimodal: dbModel.IsMultimodal, - Input: modelInputFromMultimodal(dbModel.IsMultimodal), - Type: ModelType(dbModel.Type), + ModelID: dbModel.ModelID, + Type: ModelType(dbModel.Type), }, } + if resp.Model.Type == ModelTypeChat { + resp.Model.InputModalities = normalizeModalities(dbModel.InputModalities, []string{ModelInputText}) + } if dbModel.LlmProviderID.Valid { resp.Model.LlmProviderID = dbModel.LlmProviderID.String() @@ -347,12 +360,12 @@ func convertToGetResponseList(dbModels []sqlc.Model) []GetResponse { return responses } -// modelInputFromMultimodal builds the input list based on multimodal support. -func modelInputFromMultimodal(isMultimodal bool) []string { - if isMultimodal { - return []string{ModelInputText, ModelInputImage} +// normalizeModalities returns modalities if non-empty, otherwise the provided fallback. +func normalizeModalities(modalities []string, fallback []string) []string { + if len(modalities) == 0 { + return fallback } - return []string{ModelInputText} + return modalities } func isValidClientType(clientType ClientType) bool { diff --git a/internal/models/models_test.go b/internal/models/models_test.go index 34d45b5b..762ad822 100644 --- a/internal/models/models_test.go +++ b/internal/models/models_test.go @@ -114,6 +114,17 @@ func TestModel_Validate(t *testing.T) { }, wantErr: false, }, + { + name: "valid chat model with modalities", + model: models.Model{ + ModelID: "gpt-4o", + Name: "GPT-4o", + LlmProviderID: "11111111-1111-1111-1111-111111111111", + InputModalities: []string{"text", "image", "audio"}, + Type: models.ModelTypeChat, + }, + wantErr: false, + }, { name: "valid embedding model", model: models.Model{ @@ -169,6 +180,16 @@ func TestModel_Validate(t *testing.T) { }, wantErr: true, }, + { + name: "invalid input modality", + model: models.Model{ + ModelID: "gpt-4", + LlmProviderID: "11111111-1111-1111-1111-111111111111", + Type: models.ModelTypeChat, + InputModalities: []string{"text", "smell"}, + }, + wantErr: true, + }, } for _, tt := range tests { @@ -183,6 +204,57 @@ func TestModel_Validate(t *testing.T) { } } +func TestModel_IsMultimodal(t *testing.T) { + tests := []struct { + name string + model models.Model + expected bool + }{ + { + name: "text only", + model: models.Model{ + InputModalities: []string{"text"}, + }, + expected: false, + }, + { + name: "text and image", + model: models.Model{ + InputModalities: []string{"text", "image"}, + }, + expected: true, + }, + { + name: "text image audio video", + model: models.Model{ + InputModalities: []string{"text", "image", "audio", "video"}, + }, + expected: true, + }, + { + name: "empty modalities", + model: models.Model{}, + expected: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.expected, tt.model.IsMultimodal()) + }) + } +} + +func TestModel_HasInputModality(t *testing.T) { + m := models.Model{ + InputModalities: []string{"text", "image", "audio"}, + } + assert.True(t, m.HasInputModality("text")) + assert.True(t, m.HasInputModality("image")) + assert.True(t, m.HasInputModality("audio")) + assert.False(t, m.HasInputModality("video")) + assert.False(t, m.HasInputModality("file")) +} + func TestModelTypes(t *testing.T) { t.Run("ModelType constants", func(t *testing.T) { assert.Equal(t, models.ModelType("chat"), models.ModelTypeChat) diff --git a/internal/models/types.go b/internal/models/types.go index a1419dd5..afa024ca 100644 --- a/internal/models/types.go +++ b/internal/models/types.go @@ -2,6 +2,7 @@ package models import ( "errors" + "fmt" "github.com/google/uuid" ) @@ -16,6 +17,9 @@ const ( const ( ModelInputText = "text" ModelInputImage = "image" + ModelInputAudio = "audio" + ModelInputVideo = "video" + ModelInputFile = "file" ) type ClientType string @@ -34,13 +38,18 @@ const ( ) type Model struct { - ModelID string `json:"model_id"` - Name string `json:"name"` - LlmProviderID string `json:"llm_provider_id"` - IsMultimodal bool `json:"is_multimodal"` - Input []string `json:"input"` - Type ModelType `json:"type"` - Dimensions int `json:"dimensions"` + ModelID string `json:"model_id"` + Name string `json:"name"` + LlmProviderID string `json:"llm_provider_id"` + InputModalities []string `json:"input_modalities,omitempty"` + Type ModelType `json:"type"` + Dimensions int `json:"dimensions"` +} + +// validInputModalities is the set of recognised input modality tokens. +var validInputModalities = map[string]struct{}{ + ModelInputText: {}, ModelInputImage: {}, ModelInputAudio: {}, + ModelInputVideo: {}, ModelInputFile: {}, } func (m *Model) Validate() error { @@ -59,10 +68,37 @@ func (m *Model) Validate() error { if m.Type == ModelTypeEmbedding && m.Dimensions <= 0 { return errors.New("dimensions must be greater than 0") } - + // Input modalities only apply to chat models. + if m.Type == ModelTypeChat { + for _, mod := range m.InputModalities { + if _, ok := validInputModalities[mod]; !ok { + return fmt.Errorf("invalid input modality: %s", mod) + } + } + } return nil } +// HasInputModality checks whether the model supports a given input modality. +func (m *Model) HasInputModality(mod string) bool { + for _, v := range m.InputModalities { + if v == mod { + return true + } + } + return false +} + +// IsMultimodal returns true if the model supports any input modality beyond text. +func (m *Model) IsMultimodal() bool { + for _, v := range m.InputModalities { + if v != ModelInputText { + return true + } + } + return false +} + type AddRequest Model type AddResponse struct { diff --git a/internal/schedule/service_integration_test.go b/internal/schedule/service_integration_test.go new file mode 100644 index 00000000..c54efe69 --- /dev/null +++ b/internal/schedule/service_integration_test.go @@ -0,0 +1,157 @@ +package schedule_test + +import ( + "context" + "encoding/json" + "log/slog" + "os" + "strings" + "testing" + + "github.com/jackc/pgx/v5/pgtype" + "github.com/jackc/pgx/v5/pgxpool" + + "github.com/memohai/memoh/internal/boot" + "github.com/memohai/memoh/internal/db" + "github.com/memohai/memoh/internal/db/sqlc" + "github.com/memohai/memoh/internal/schedule" +) + +func setupScheduleIntegrationTest(t *testing.T) (*schedule.Service, *sqlc.Queries, *pgxpool.Pool, *mockTriggerer, func()) { + t.Helper() + + dsn := os.Getenv("TEST_POSTGRES_DSN") + if dsn == "" { + t.Skip("skip integration test: TEST_POSTGRES_DSN is not set") + } + + ctx := context.Background() + pool, err := pgxpool.New(ctx, dsn) + if err != nil { + t.Skipf("skip integration test: cannot connect to database: %v", err) + } + if err := pool.Ping(ctx); err != nil { + pool.Close() + t.Skipf("skip integration test: database ping failed: %v", err) + } + + queries := sqlc.New(pool) + mock := &mockTriggerer{} + logger := slog.New(slog.NewTextHandler(os.Stdout, &slog.HandlerOptions{Level: slog.LevelDebug})) + cfg := &boot.RuntimeConfig{JwtSecret: "integration-test-jwt-secret"} + svc := schedule.NewService(logger, queries, mock, cfg) + + return svc, queries, pool, mock, func() { pool.Close() } +} + +type mockTriggerer struct { + called bool + botID string + payload schedule.TriggerPayload + token string +} + +func (m *mockTriggerer) TriggerSchedule(_ context.Context, botID string, payload schedule.TriggerPayload, token string) error { + m.called = true + m.botID = botID + m.payload = payload + m.token = token + return nil +} + +func createUserBotAndSchedule(ctx context.Context, t *testing.T, queries *sqlc.Queries) (ownerUserID, botID, scheduleID string) { + t.Helper() + + userRow, err := queries.CreateUser(ctx, sqlc.CreateUserParams{ + IsActive: true, + Metadata: []byte("{}"), + }) + if err != nil { + t.Fatalf("create user: %v", err) + } + ownerUserID = userRow.ID.String() + + pgOwnerID, err := db.ParseUUID(ownerUserID) + if err != nil { + t.Fatalf("parse owner uuid: %v", err) + } + meta, _ := json.Marshal(map[string]any{"source": "schedule-integration-test"}) + botRow, err := queries.CreateBot(ctx, sqlc.CreateBotParams{ + OwnerUserID: pgOwnerID, + Type: "personal", + DisplayName: pgtype.Text{String: "schedule-test-bot", Valid: true}, + AvatarUrl: pgtype.Text{}, + IsActive: true, + Metadata: meta, + Status: "ready", + }) + if err != nil { + t.Fatalf("create bot: %v", err) + } + botID = botRow.ID.String() + + pgBotID, err := db.ParseUUID(botID) + if err != nil { + t.Fatalf("parse bot uuid: %v", err) + } + schedRow, err := queries.CreateSchedule(ctx, sqlc.CreateScheduleParams{ + Name: "integration-daily", + Description: "daily job for integration test", + Pattern: "0 0 * * *", + MaxCalls: pgtype.Int4{Valid: false}, + Enabled: true, + Command: "run daily report", + BotID: pgBotID, + }) + if err != nil { + t.Fatalf("create schedule: %v", err) + } + scheduleID = schedRow.ID.String() + return ownerUserID, botID, scheduleID +} + +func cleanupScheduleTestData(ctx context.Context, t *testing.T, queries *sqlc.Queries, pool *pgxpool.Pool, ownerUserID, botID, scheduleID string) { + t.Helper() + schedID, _ := db.ParseUUID(scheduleID) + _ = queries.DeleteSchedule(ctx, schedID) + botUUID, _ := db.ParseUUID(botID) + _ = queries.DeleteBotByID(ctx, botUUID) + userUUID, _ := db.ParseUUID(ownerUserID) + _, _ = pool.Exec(ctx, "DELETE FROM users WHERE id = $1", userUUID) +} + +func TestIntegrationTrigger_CallsTriggererWithCorrectPayload(t *testing.T) { + svc, queries, pool, mock, cleanup := setupScheduleIntegrationTest(t) + defer cleanup() + + ctx := context.Background() + ownerUserID, botID, scheduleID := createUserBotAndSchedule(ctx, t, queries) + defer cleanupScheduleTestData(ctx, t, queries, pool, ownerUserID, botID, scheduleID) + + err := svc.Trigger(ctx, scheduleID) + if err != nil { + t.Fatalf("Trigger failed: %v", err) + } + + if !mock.called { + t.Fatal("triggerer was not called") + } + if mock.botID != botID { + t.Errorf("triggerer botID = %s, want %s", mock.botID, botID) + } + if mock.payload.ID != scheduleID { + t.Errorf("payload.ID = %s, want %s", mock.payload.ID, scheduleID) + } + if mock.payload.Name != "integration-daily" { + t.Errorf("payload.Name = %s, want integration-daily", mock.payload.Name) + } + if mock.payload.Command != "run daily report" { + t.Errorf("payload.Command = %s, want run daily report", mock.payload.Command) + } + if mock.payload.OwnerUserID != ownerUserID { + t.Errorf("payload.OwnerUserID = %s, want %s", mock.payload.OwnerUserID, ownerUserID) + } + if !strings.HasPrefix(mock.token, "Bearer ") { + t.Errorf("token should have Bearer prefix, got: %s", mock.token) + } +} diff --git a/packages/cli/src/cli/channel.ts b/packages/cli/src/cli/channel.ts index 5fe95076..2b7bbd0f 100644 --- a/packages/cli/src/cli/channel.ts +++ b/packages/cli/src/cli/channel.ts @@ -4,38 +4,77 @@ import inquirer from 'inquirer' import ora from 'ora' import { table } from 'table' -import { - getChannels, - getChannelsByPlatform, - getBotsByIdChannelByPlatform, - putBotsByIdChannelByPlatform, - getUsersMeChannelsByPlatform, - putUsersMeChannelsByPlatform, - type HandlersChannelMeta, -} from '@memoh/sdk' +import { apiRequest } from '../core/api' import { ensureAuth, getErrorMessage, resolveBotId } from './shared' -const renderChannelsTable = (items: HandlersChannelMeta[]) => { +type ChannelFieldSchema = { + type: 'string' | 'secret' | 'bool' | 'number' | 'enum' + required: boolean + title?: string + description?: string + enum?: string[] + example?: unknown +} + +type ChannelConfigSchema = { + version: number + fields: Record +} + +type ChannelMeta = { + type: string + display_name: string + configless: boolean + capabilities: Record + config_schema: ChannelConfigSchema + user_config_schema: ChannelConfigSchema +} + +type ChannelUserBinding = { + id: string + channel_type: string + user_id: string + config: Record + created_at: string + updated_at: string +} + +type ChannelConfig = { + id: string + bot_id: string + channel_type: string + credentials: Record + external_identity: string + self_identity: Record + routing: Record + capabilities: Record + disabled: boolean + verified_at: string + created_at: string + updated_at: string +} + +const renderChannelsTable = (items: ChannelMeta[]) => { const rows: string[][] = [['Type', 'Name', 'Configless']] for (const item of items) { - rows.push([item.type ?? '', item.display_name ?? '', item.configless ? 'yes' : 'no']) + rows.push([item.type, item.display_name, item.configless ? 'yes' : 'no']) } return table(rows) } -const fetchChannelList = async () => { - const { data } = await getChannels({ throwOnError: true }) - return data as HandlersChannelMeta[] +const fetchChannels = async (token: ReturnType) => { + return apiRequest('/channels', {}, token) } const resolveChannelType = async ( + token: ReturnType, preset?: string, options?: { allowConfigless?: boolean } ) => { if (preset && preset.trim()) { return preset.trim() } - const channels = await fetchChannelList() + const channels = await fetchChannels(token) const allowConfigless = options?.allowConfigless ?? false const candidates = channels.filter(item => allowConfigless || !item.configless) if (candidates.length === 0) { @@ -128,8 +167,8 @@ export const registerChannelCommands = (program: Command) => { .command('list') .description('List available channels') .action(async () => { - ensureAuth() - const channels = await fetchChannelList() + const token = ensureAuth() + const channels = await fetchChannels(token) if (!channels.length) { console.log(chalk.yellow('No channels available.')) return @@ -142,13 +181,10 @@ export const registerChannelCommands = (program: Command) => { .description('Show channel meta and schema') .argument('[type]') .action(async (type) => { - ensureAuth() - const channelType = await resolveChannelType(type, { allowConfigless: true }) - const { data } = await getChannelsByPlatform({ - path: { platform: channelType }, - throwOnError: true, - }) - console.log(JSON.stringify(data, null, 2)) + const token = ensureAuth() + const channelType = await resolveChannelType(token, type, { allowConfigless: true }) + const meta = await apiRequest(`/channels/${encodeURIComponent(channelType)}`, {}, token) + console.log(JSON.stringify(meta, null, 2)) }) const config = channel.command('config').description('Bot channel configuration') @@ -159,14 +195,11 @@ export const registerChannelCommands = (program: Command) => { .argument('[bot_id]') .option('--type ', 'Channel type') .action(async (botId, opts) => { - ensureAuth() - const resolvedBotId = await resolveBotId(botId) - const channelType = await resolveChannelType(opts.type) - const { data } = await getBotsByIdChannelByPlatform({ - path: { id: resolvedBotId, platform: channelType }, - throwOnError: true, - }) - console.log(JSON.stringify(data, null, 2)) + const token = ensureAuth() + const resolvedBotId = await resolveBotId(token, botId) + const channelType = await resolveChannelType(token, opts.type) + const resp = await apiRequest(`/bots/${encodeURIComponent(resolvedBotId)}/channel/${encodeURIComponent(channelType)}`, {}, token) + console.log(JSON.stringify(resp, null, 2)) }) config @@ -179,9 +212,9 @@ export const registerChannelCommands = (program: Command) => { .option('--encrypt_key ') .option('--verification_token ') .action(async (botId, opts) => { - ensureAuth() - const resolvedBotId = await resolveBotId(botId) - const channelType = await resolveChannelType(opts.type) + const token = ensureAuth() + const resolvedBotId = await resolveBotId(token, botId) + const channelType = await resolveChannelType(token, opts.type) if (channelType !== 'feishu') { console.log(chalk.red(`Channel type ${channelType} is not supported by this command.`)) process.exit(1) @@ -189,11 +222,10 @@ export const registerChannelCommands = (program: Command) => { const credentials = await collectFeishuCredentials(opts) const spinner = ora('Updating channel config...').start() try { - await putBotsByIdChannelByPlatform({ - path: { id: resolvedBotId, platform: channelType }, - body: { credentials }, - throwOnError: true, - }) + await apiRequest(`/bots/${encodeURIComponent(resolvedBotId)}/channel/${encodeURIComponent(channelType)}`, { + method: 'PUT', + body: JSON.stringify({ credentials }), + }, token) spinner.succeed('Channel config updated') } catch (err: unknown) { spinner.fail(getErrorMessage(err) || 'Failed to update channel config') @@ -208,13 +240,10 @@ export const registerChannelCommands = (program: Command) => { .description('Get current user channel binding') .option('--type ', 'Channel type') .action(async (opts) => { - ensureAuth() - const channelType = await resolveChannelType(opts.type) - const { data } = await getUsersMeChannelsByPlatform({ - path: { platform: channelType }, - throwOnError: true, - }) - console.log(JSON.stringify(data, null, 2)) + const token = ensureAuth() + const channelType = await resolveChannelType(token, opts.type) + const resp = await apiRequest(`/users/me/channels/${encodeURIComponent(channelType)}`, {}, token) + console.log(JSON.stringify(resp, null, 2)) }) binding @@ -224,8 +253,8 @@ export const registerChannelCommands = (program: Command) => { .option('--open_id ') .option('--user_id ') .action(async (opts) => { - ensureAuth() - const channelType = await resolveChannelType(opts.type) + const token = ensureAuth() + const channelType = await resolveChannelType(token, opts.type) if (channelType !== 'feishu') { console.log(chalk.red(`Channel type ${channelType} is not supported by this command.`)) process.exit(1) @@ -233,11 +262,10 @@ export const registerChannelCommands = (program: Command) => { const configPayload = await collectFeishuUserConfig(opts) const spinner = ora('Updating user binding...').start() try { - await putUsersMeChannelsByPlatform({ - path: { platform: channelType }, - body: { config: configPayload }, - throwOnError: true, - }) + await apiRequest(`/users/me/channels/${encodeURIComponent(channelType)}`, { + method: 'PUT', + body: JSON.stringify({ config: configPayload }), + }, token) spinner.succeed('User binding updated') } catch (err: unknown) { spinner.fail(getErrorMessage(err) || 'Failed to update user binding') @@ -245,3 +273,4 @@ export const registerChannelCommands = (program: Command) => { } }) } + diff --git a/packages/cli/src/cli/index.ts b/packages/cli/src/cli/index.ts index 3e45cb3e..c79cb8ab 100755 --- a/packages/cli/src/cli/index.ts +++ b/packages/cli/src/cli/index.ts @@ -62,7 +62,7 @@ registerChannelCommands(program) const getModelId = (item: ModelsGetResponse) => item.model_id ?? '' const getProviderId = (item: ModelsGetResponse) => item.llm_provider_id ?? '' const getModelType = (item: ModelsGetResponse) => item.type ?? 'chat' -const getModelMultimodal = (item: ModelsGetResponse) => item.is_multimodal ?? false +const getModelInputModalities = (item: ModelsGetResponse) => item.input_modalities ?? ['text'] const ensureModelsReady = async () => { ensureAuth() @@ -98,13 +98,13 @@ const renderProvidersTable = (providers: ProvidersGetResponse[], models: ModelsG const renderModelsTable = (models: ModelsGetResponse[], providers: ProvidersGetResponse[]) => { const providerMap = new Map(providers.map(p => [p.id, p.name])) - const rows: string[][] = [['Model ID', 'Type', 'Provider', 'Multimodal']] + const rows: string[][] = [['Model ID', 'Type', 'Provider', 'Input Modalities']] for (const item of models) { rows.push([ getModelId(item), getModelType(item), providerMap.get(getProviderId(item)) ?? getProviderId(item), - getModelMultimodal(item) ? 'yes' : 'no', + getModelInputModalities(item).join(', '), ]) } return table(rows) @@ -389,7 +389,7 @@ model console.log(chalk.red('Embedding models require a valid dimensions value.')) process.exit(1) } - const isMultimodal = Boolean(opts.multimodal) + const inputModalities = opts.multimodal ? ['text', 'image'] : ['text'] const spinner = ora('Creating model...').start() try { await postModels({ @@ -397,7 +397,7 @@ model model_id: modelId, name: opts.name ?? modelId, llm_provider_id: provider.id, - is_multimodal: isMultimodal, + input_modalities: inputModalities, type: modelType, dimensions, }, diff --git a/packages/sdk/src/@pinia/colada.gen.ts b/packages/sdk/src/@pinia/colada.gen.ts index c40da7c8..f0e9912e 100644 --- a/packages/sdk/src/@pinia/colada.gen.ts +++ b/packages/sdk/src/@pinia/colada.gen.ts @@ -4,8 +4,8 @@ import { type _JSONValue, defineQueryOptions, type UseMutationOptions } from '@p import { serializeQueryKeyValue } from '../client'; import { client } from '../client.gen'; -import { deleteBotsByBotIdContainer, deleteBotsByBotIdContainerSkills, deleteBotsByBotIdMcpById, deleteBotsByBotIdMemory, deleteBotsByBotIdMemoryById, deleteBotsByBotIdScheduleById, deleteBotsByBotIdSettings, deleteBotsByBotIdSubagentsById, deleteBotsById, deleteBotsByIdMembersByUserId, deleteModelsById, deleteModelsModelByModelId, deleteProvidersById, deleteSearchProvidersById, getBots, getBotsByBotIdContainer, getBotsByBotIdContainerSkills, getBotsByBotIdContainerSnapshots, getBotsByBotIdMcp, getBotsByBotIdMcpById, getBotsByBotIdMcpExport, getBotsByBotIdMemory, getBotsByBotIdMemoryUsage, getBotsByBotIdMessages, getBotsByBotIdSchedule, getBotsByBotIdScheduleById, getBotsByBotIdSettings, getBotsByBotIdSubagents, getBotsByBotIdSubagentsById, getBotsByBotIdSubagentsByIdContext, getBotsByBotIdSubagentsByIdSkills, getBotsById, getBotsByIdChannelByPlatform, getBotsByIdChecks, getBotsByIdChecksKeys, getBotsByIdChecksRunByKey, getBotsByIdMembers, getChannels, getChannelsByPlatform, getModels, getModelsById, getModelsCount, getModelsModelByModelId, getProviders, getProvidersById, getProvidersByIdModels, getProvidersCount, getProvidersNameByName, getSearchProviders, getSearchProvidersById, getSearchProvidersMeta, getUsers, getUsersById, getUsersMe, getUsersMeChannelsByPlatform, getUsersMeIdentities, type Options, postAuthLogin, postBots, postBotsByBotIdContainer, postBotsByBotIdContainerSkills, postBotsByBotIdContainerSnapshots, postBotsByBotIdContainerStart, postBotsByBotIdContainerStop, postBotsByBotIdMcp, postBotsByBotIdMcpOpsBatchDelete, postBotsByBotIdMcpStdio, postBotsByBotIdMcpStdioByConnectionId, postBotsByBotIdMemory, postBotsByBotIdMemoryCompact, postBotsByBotIdMemoryRebuild, postBotsByBotIdMemorySearch, postBotsByBotIdSchedule, postBotsByBotIdSettings, postBotsByBotIdSubagents, postBotsByBotIdSubagentsByIdSkills, postBotsByBotIdTools, postBotsByIdChannelByPlatformSend, postBotsByIdChannelByPlatformSendChat, postEmbeddings, postModels, postProviders, postSearchProviders, postUsers, putBotsByBotIdMcpById, putBotsByBotIdMcpImport, putBotsByBotIdScheduleById, putBotsByBotIdSettings, putBotsByBotIdSubagentsById, putBotsByBotIdSubagentsByIdContext, putBotsByBotIdSubagentsByIdSkills, putBotsById, putBotsByIdChannelByPlatform, putBotsByIdMembers, putBotsByIdOwner, putModelsById, putModelsModelByModelId, putProvidersById, putSearchProvidersById, putUsersById, putUsersByIdPassword, putUsersMe, putUsersMeChannelsByPlatform, putUsersMePassword } from '../sdk.gen'; -import type { DeleteBotsByBotIdContainerData, DeleteBotsByBotIdContainerError, DeleteBotsByBotIdContainerSkillsData, DeleteBotsByBotIdContainerSkillsError, DeleteBotsByBotIdContainerSkillsResponse, DeleteBotsByBotIdMcpByIdData, DeleteBotsByBotIdMcpByIdError, DeleteBotsByBotIdMemoryByIdData, DeleteBotsByBotIdMemoryByIdError, DeleteBotsByBotIdMemoryByIdResponse, DeleteBotsByBotIdMemoryData, DeleteBotsByBotIdMemoryError, DeleteBotsByBotIdMemoryResponse, DeleteBotsByBotIdScheduleByIdData, DeleteBotsByBotIdScheduleByIdError, DeleteBotsByBotIdSettingsData, DeleteBotsByBotIdSettingsError, DeleteBotsByBotIdSubagentsByIdData, DeleteBotsByBotIdSubagentsByIdError, DeleteBotsByIdData, DeleteBotsByIdError, DeleteBotsByIdMembersByUserIdData, DeleteBotsByIdMembersByUserIdError, DeleteBotsByIdResponse, DeleteModelsByIdData, DeleteModelsByIdError, DeleteModelsModelByModelIdData, DeleteModelsModelByModelIdError, DeleteProvidersByIdData, DeleteProvidersByIdError, DeleteSearchProvidersByIdData, DeleteSearchProvidersByIdError, GetBotsByBotIdContainerData, GetBotsByBotIdContainerSkillsData, GetBotsByBotIdContainerSnapshotsData, GetBotsByBotIdMcpByIdData, GetBotsByBotIdMcpData, GetBotsByBotIdMcpExportData, GetBotsByBotIdMemoryData, GetBotsByBotIdMemoryUsageData, GetBotsByBotIdMessagesData, GetBotsByBotIdScheduleByIdData, GetBotsByBotIdScheduleData, GetBotsByBotIdSettingsData, GetBotsByBotIdSubagentsByIdContextData, GetBotsByBotIdSubagentsByIdData, GetBotsByBotIdSubagentsByIdSkillsData, GetBotsByBotIdSubagentsData, GetBotsByIdChannelByPlatformData, GetBotsByIdChecksData, GetBotsByIdChecksKeysData, GetBotsByIdChecksRunByKeyData, GetBotsByIdData, GetBotsByIdMembersData, GetBotsData, GetChannelsByPlatformData, GetChannelsData, GetModelsByIdData, GetModelsCountData, GetModelsData, GetModelsModelByModelIdData, GetProvidersByIdData, GetProvidersByIdModelsData, GetProvidersCountData, GetProvidersData, GetProvidersNameByNameData, GetSearchProvidersByIdData, GetSearchProvidersData, GetSearchProvidersMetaData, GetUsersByIdData, GetUsersData, GetUsersMeChannelsByPlatformData, GetUsersMeData, GetUsersMeIdentitiesData, PostAuthLoginData, PostAuthLoginError, PostAuthLoginResponse, PostBotsByBotIdContainerData, PostBotsByBotIdContainerError, PostBotsByBotIdContainerResponse, PostBotsByBotIdContainerSkillsData, PostBotsByBotIdContainerSkillsError, PostBotsByBotIdContainerSkillsResponse, PostBotsByBotIdContainerSnapshotsData, PostBotsByBotIdContainerSnapshotsError, PostBotsByBotIdContainerSnapshotsResponse, PostBotsByBotIdContainerStartData, PostBotsByBotIdContainerStartError, PostBotsByBotIdContainerStartResponse, PostBotsByBotIdContainerStopData, PostBotsByBotIdContainerStopError, PostBotsByBotIdContainerStopResponse, PostBotsByBotIdMcpData, PostBotsByBotIdMcpError, PostBotsByBotIdMcpOpsBatchDeleteData, PostBotsByBotIdMcpOpsBatchDeleteError, PostBotsByBotIdMcpResponse, PostBotsByBotIdMcpStdioByConnectionIdData, PostBotsByBotIdMcpStdioByConnectionIdError, PostBotsByBotIdMcpStdioByConnectionIdResponse, PostBotsByBotIdMcpStdioData, PostBotsByBotIdMcpStdioError, PostBotsByBotIdMcpStdioResponse, PostBotsByBotIdMemoryCompactData, PostBotsByBotIdMemoryCompactError, PostBotsByBotIdMemoryCompactResponse, PostBotsByBotIdMemoryData, PostBotsByBotIdMemoryError, PostBotsByBotIdMemoryRebuildData, PostBotsByBotIdMemoryRebuildError, PostBotsByBotIdMemoryRebuildResponse, PostBotsByBotIdMemoryResponse, PostBotsByBotIdMemorySearchData, PostBotsByBotIdMemorySearchError, PostBotsByBotIdMemorySearchResponse, PostBotsByBotIdScheduleData, PostBotsByBotIdScheduleError, PostBotsByBotIdScheduleResponse, PostBotsByBotIdSettingsData, PostBotsByBotIdSettingsError, PostBotsByBotIdSettingsResponse, PostBotsByBotIdSubagentsByIdSkillsData, PostBotsByBotIdSubagentsByIdSkillsError, PostBotsByBotIdSubagentsByIdSkillsResponse, PostBotsByBotIdSubagentsData, PostBotsByBotIdSubagentsError, PostBotsByBotIdSubagentsResponse, PostBotsByBotIdToolsData, PostBotsByBotIdToolsError, PostBotsByBotIdToolsResponse, PostBotsByIdChannelByPlatformSendChatData, PostBotsByIdChannelByPlatformSendChatError, PostBotsByIdChannelByPlatformSendChatResponse, PostBotsByIdChannelByPlatformSendData, PostBotsByIdChannelByPlatformSendError, PostBotsByIdChannelByPlatformSendResponse, PostBotsData, PostBotsError, PostBotsResponse, PostEmbeddingsData, PostEmbeddingsError, PostEmbeddingsResponse, PostModelsData, PostModelsError, PostModelsResponse, PostProvidersData, PostProvidersError, PostProvidersResponse, PostSearchProvidersData, PostSearchProvidersError, PostSearchProvidersResponse, PostUsersData, PostUsersError, PostUsersResponse, PutBotsByBotIdMcpByIdData, PutBotsByBotIdMcpByIdError, PutBotsByBotIdMcpByIdResponse, PutBotsByBotIdMcpImportData, PutBotsByBotIdMcpImportError, PutBotsByBotIdMcpImportResponse, PutBotsByBotIdScheduleByIdData, PutBotsByBotIdScheduleByIdError, PutBotsByBotIdScheduleByIdResponse, PutBotsByBotIdSettingsData, PutBotsByBotIdSettingsError, PutBotsByBotIdSettingsResponse, PutBotsByBotIdSubagentsByIdContextData, PutBotsByBotIdSubagentsByIdContextError, PutBotsByBotIdSubagentsByIdContextResponse, PutBotsByBotIdSubagentsByIdData, PutBotsByBotIdSubagentsByIdError, PutBotsByBotIdSubagentsByIdResponse, PutBotsByBotIdSubagentsByIdSkillsData, PutBotsByBotIdSubagentsByIdSkillsError, PutBotsByBotIdSubagentsByIdSkillsResponse, PutBotsByIdChannelByPlatformData, PutBotsByIdChannelByPlatformError, PutBotsByIdChannelByPlatformResponse, PutBotsByIdData, PutBotsByIdError, PutBotsByIdMembersData, PutBotsByIdMembersError, PutBotsByIdMembersResponse, PutBotsByIdOwnerData, PutBotsByIdOwnerError, PutBotsByIdOwnerResponse, PutBotsByIdResponse, PutModelsByIdData, PutModelsByIdError, PutModelsByIdResponse, PutModelsModelByModelIdData, PutModelsModelByModelIdError, PutModelsModelByModelIdResponse, PutProvidersByIdData, PutProvidersByIdError, PutProvidersByIdResponse, PutSearchProvidersByIdData, PutSearchProvidersByIdError, PutSearchProvidersByIdResponse, PutUsersByIdData, PutUsersByIdError, PutUsersByIdPasswordData, PutUsersByIdPasswordError, PutUsersByIdResponse, PutUsersMeChannelsByPlatformData, PutUsersMeChannelsByPlatformError, PutUsersMeChannelsByPlatformResponse, PutUsersMeData, PutUsersMeError, PutUsersMePasswordData, PutUsersMePasswordError, PutUsersMeResponse } from '../types.gen'; +import { deleteBotsByBotIdContainer, deleteBotsByBotIdContainerSkills, deleteBotsByBotIdMcpById, deleteBotsByBotIdMemory, deleteBotsByBotIdMemoryById, deleteBotsByBotIdScheduleById, deleteBotsByBotIdSettings, deleteBotsByBotIdSubagentsById, deleteBotsById, deleteBotsByIdChannelByPlatform, deleteBotsByIdMembersByUserId, deleteModelsById, deleteModelsModelByModelId, deleteProvidersById, deleteSearchProvidersById, getBots, getBotsByBotIdContainer, getBotsByBotIdContainerSkills, getBotsByBotIdContainerSnapshots, getBotsByBotIdMcp, getBotsByBotIdMcpById, getBotsByBotIdMcpExport, getBotsByBotIdMemory, getBotsByBotIdMemoryUsage, getBotsByBotIdMessages, getBotsByBotIdSchedule, getBotsByBotIdScheduleById, getBotsByBotIdSettings, getBotsByBotIdSubagents, getBotsByBotIdSubagentsById, getBotsByBotIdSubagentsByIdContext, getBotsByBotIdSubagentsByIdSkills, getBotsById, getBotsByIdChannelByPlatform, getBotsByIdChecks, getBotsByIdMembers, getChannels, getChannelsByPlatform, getModels, getModelsById, getModelsCount, getModelsModelByModelId, getProviders, getProvidersById, getProvidersByIdModels, getProvidersCount, getProvidersNameByName, getSearchProviders, getSearchProvidersById, getSearchProvidersMeta, getUsers, getUsersById, getUsersMe, getUsersMeChannelsByPlatform, getUsersMeIdentities, type Options, patchBotsByIdChannelByPlatformStatus, postAuthLogin, postBots, postBotsByBotIdContainer, postBotsByBotIdContainerSkills, postBotsByBotIdContainerSnapshots, postBotsByBotIdContainerStart, postBotsByBotIdContainerStop, postBotsByBotIdMcp, postBotsByBotIdMcpOpsBatchDelete, postBotsByBotIdMcpStdio, postBotsByBotIdMcpStdioByConnectionId, postBotsByBotIdMemory, postBotsByBotIdMemoryCompact, postBotsByBotIdMemoryRebuild, postBotsByBotIdMemorySearch, postBotsByBotIdSchedule, postBotsByBotIdSettings, postBotsByBotIdSubagents, postBotsByBotIdSubagentsByIdSkills, postBotsByBotIdTools, postBotsByIdChannelByPlatformSend, postBotsByIdChannelByPlatformSendChat, postEmbeddings, postModels, postProviders, postSearchProviders, postUsers, putBotsByBotIdMcpById, putBotsByBotIdMcpImport, putBotsByBotIdScheduleById, putBotsByBotIdSettings, putBotsByBotIdSubagentsById, putBotsByBotIdSubagentsByIdContext, putBotsByBotIdSubagentsByIdSkills, putBotsById, putBotsByIdChannelByPlatform, putBotsByIdMembers, putBotsByIdOwner, putModelsById, putModelsModelByModelId, putProvidersById, putSearchProvidersById, putUsersById, putUsersByIdPassword, putUsersMe, putUsersMeChannelsByPlatform, putUsersMePassword } from '../sdk.gen'; +import type { DeleteBotsByBotIdContainerData, DeleteBotsByBotIdContainerError, DeleteBotsByBotIdContainerSkillsData, DeleteBotsByBotIdContainerSkillsError, DeleteBotsByBotIdContainerSkillsResponse, DeleteBotsByBotIdMcpByIdData, DeleteBotsByBotIdMcpByIdError, DeleteBotsByBotIdMemoryByIdData, DeleteBotsByBotIdMemoryByIdError, DeleteBotsByBotIdMemoryByIdResponse, DeleteBotsByBotIdMemoryData, DeleteBotsByBotIdMemoryError, DeleteBotsByBotIdMemoryResponse, DeleteBotsByBotIdScheduleByIdData, DeleteBotsByBotIdScheduleByIdError, DeleteBotsByBotIdSettingsData, DeleteBotsByBotIdSettingsError, DeleteBotsByBotIdSubagentsByIdData, DeleteBotsByBotIdSubagentsByIdError, DeleteBotsByIdChannelByPlatformData, DeleteBotsByIdChannelByPlatformError, DeleteBotsByIdData, DeleteBotsByIdError, DeleteBotsByIdMembersByUserIdData, DeleteBotsByIdMembersByUserIdError, DeleteBotsByIdResponse, DeleteModelsByIdData, DeleteModelsByIdError, DeleteModelsModelByModelIdData, DeleteModelsModelByModelIdError, DeleteProvidersByIdData, DeleteProvidersByIdError, DeleteSearchProvidersByIdData, DeleteSearchProvidersByIdError, GetBotsByBotIdContainerData, GetBotsByBotIdContainerSkillsData, GetBotsByBotIdContainerSnapshotsData, GetBotsByBotIdMcpByIdData, GetBotsByBotIdMcpData, GetBotsByBotIdMcpExportData, GetBotsByBotIdMemoryData, GetBotsByBotIdMemoryUsageData, GetBotsByBotIdMessagesData, GetBotsByBotIdScheduleByIdData, GetBotsByBotIdScheduleData, GetBotsByBotIdSettingsData, GetBotsByBotIdSubagentsByIdContextData, GetBotsByBotIdSubagentsByIdData, GetBotsByBotIdSubagentsByIdSkillsData, GetBotsByBotIdSubagentsData, GetBotsByIdChannelByPlatformData, GetBotsByIdChecksData, GetBotsByIdData, GetBotsByIdMembersData, GetBotsData, GetChannelsByPlatformData, GetChannelsData, GetModelsByIdData, GetModelsCountData, GetModelsData, GetModelsModelByModelIdData, GetProvidersByIdData, GetProvidersByIdModelsData, GetProvidersCountData, GetProvidersData, GetProvidersNameByNameData, GetSearchProvidersByIdData, GetSearchProvidersData, GetSearchProvidersMetaData, GetUsersByIdData, GetUsersData, GetUsersMeChannelsByPlatformData, GetUsersMeData, GetUsersMeIdentitiesData, PatchBotsByIdChannelByPlatformStatusData, PatchBotsByIdChannelByPlatformStatusError, PatchBotsByIdChannelByPlatformStatusResponse, PostAuthLoginData, PostAuthLoginError, PostAuthLoginResponse, PostBotsByBotIdContainerData, PostBotsByBotIdContainerError, PostBotsByBotIdContainerResponse, PostBotsByBotIdContainerSkillsData, PostBotsByBotIdContainerSkillsError, PostBotsByBotIdContainerSkillsResponse, PostBotsByBotIdContainerSnapshotsData, PostBotsByBotIdContainerSnapshotsError, PostBotsByBotIdContainerSnapshotsResponse, PostBotsByBotIdContainerStartData, PostBotsByBotIdContainerStartError, PostBotsByBotIdContainerStartResponse, PostBotsByBotIdContainerStopData, PostBotsByBotIdContainerStopError, PostBotsByBotIdContainerStopResponse, PostBotsByBotIdMcpData, PostBotsByBotIdMcpError, PostBotsByBotIdMcpOpsBatchDeleteData, PostBotsByBotIdMcpOpsBatchDeleteError, PostBotsByBotIdMcpResponse, PostBotsByBotIdMcpStdioByConnectionIdData, PostBotsByBotIdMcpStdioByConnectionIdError, PostBotsByBotIdMcpStdioByConnectionIdResponse, PostBotsByBotIdMcpStdioData, PostBotsByBotIdMcpStdioError, PostBotsByBotIdMcpStdioResponse, PostBotsByBotIdMemoryCompactData, PostBotsByBotIdMemoryCompactError, PostBotsByBotIdMemoryCompactResponse, PostBotsByBotIdMemoryData, PostBotsByBotIdMemoryError, PostBotsByBotIdMemoryRebuildData, PostBotsByBotIdMemoryRebuildError, PostBotsByBotIdMemoryRebuildResponse, PostBotsByBotIdMemoryResponse, PostBotsByBotIdMemorySearchData, PostBotsByBotIdMemorySearchError, PostBotsByBotIdMemorySearchResponse, PostBotsByBotIdScheduleData, PostBotsByBotIdScheduleError, PostBotsByBotIdScheduleResponse, PostBotsByBotIdSettingsData, PostBotsByBotIdSettingsError, PostBotsByBotIdSettingsResponse, PostBotsByBotIdSubagentsByIdSkillsData, PostBotsByBotIdSubagentsByIdSkillsError, PostBotsByBotIdSubagentsByIdSkillsResponse, PostBotsByBotIdSubagentsData, PostBotsByBotIdSubagentsError, PostBotsByBotIdSubagentsResponse, PostBotsByBotIdToolsData, PostBotsByBotIdToolsError, PostBotsByBotIdToolsResponse, PostBotsByIdChannelByPlatformSendChatData, PostBotsByIdChannelByPlatformSendChatError, PostBotsByIdChannelByPlatformSendChatResponse, PostBotsByIdChannelByPlatformSendData, PostBotsByIdChannelByPlatformSendError, PostBotsByIdChannelByPlatformSendResponse, PostBotsData, PostBotsError, PostBotsResponse, PostEmbeddingsData, PostEmbeddingsError, PostEmbeddingsResponse, PostModelsData, PostModelsError, PostModelsResponse, PostProvidersData, PostProvidersError, PostProvidersResponse, PostSearchProvidersData, PostSearchProvidersError, PostSearchProvidersResponse, PostUsersData, PostUsersError, PostUsersResponse, PutBotsByBotIdMcpByIdData, PutBotsByBotIdMcpByIdError, PutBotsByBotIdMcpByIdResponse, PutBotsByBotIdMcpImportData, PutBotsByBotIdMcpImportError, PutBotsByBotIdMcpImportResponse, PutBotsByBotIdScheduleByIdData, PutBotsByBotIdScheduleByIdError, PutBotsByBotIdScheduleByIdResponse, PutBotsByBotIdSettingsData, PutBotsByBotIdSettingsError, PutBotsByBotIdSettingsResponse, PutBotsByBotIdSubagentsByIdContextData, PutBotsByBotIdSubagentsByIdContextError, PutBotsByBotIdSubagentsByIdContextResponse, PutBotsByBotIdSubagentsByIdData, PutBotsByBotIdSubagentsByIdError, PutBotsByBotIdSubagentsByIdResponse, PutBotsByBotIdSubagentsByIdSkillsData, PutBotsByBotIdSubagentsByIdSkillsError, PutBotsByBotIdSubagentsByIdSkillsResponse, PutBotsByIdChannelByPlatformData, PutBotsByIdChannelByPlatformError, PutBotsByIdChannelByPlatformResponse, PutBotsByIdData, PutBotsByIdError, PutBotsByIdMembersData, PutBotsByIdMembersError, PutBotsByIdMembersResponse, PutBotsByIdOwnerData, PutBotsByIdOwnerError, PutBotsByIdOwnerResponse, PutBotsByIdResponse, PutModelsByIdData, PutModelsByIdError, PutModelsByIdResponse, PutModelsModelByModelIdData, PutModelsModelByModelIdError, PutModelsModelByModelIdResponse, PutProvidersByIdData, PutProvidersByIdError, PutProvidersByIdResponse, PutSearchProvidersByIdData, PutSearchProvidersByIdError, PutSearchProvidersByIdResponse, PutUsersByIdData, PutUsersByIdError, PutUsersByIdPasswordData, PutUsersByIdPasswordError, PutUsersByIdResponse, PutUsersMeChannelsByPlatformData, PutUsersMeChannelsByPlatformError, PutUsersMeChannelsByPlatformResponse, PutUsersMeData, PutUsersMeError, PutUsersMePasswordData, PutUsersMePasswordError, PutUsersMeResponse } from '../types.gen'; /** * Login @@ -963,6 +963,22 @@ export const putBotsByIdMutation = (options?: Partial>) } }); +/** + * Delete bot channel config + * + * Remove bot channel configuration + */ +export const deleteBotsByIdChannelByPlatformMutation = (options?: Partial>): UseMutationOptions, DeleteBotsByIdChannelByPlatformError> => ({ + mutation: async (vars) => { + const { data } = await deleteBotsByIdChannelByPlatform({ + ...options, + ...vars, + throwOnError: true + }); + return data; + } +}); + export const getBotsByIdChannelByPlatformQueryKey = (options: Options) => createQueryKey('getBotsByIdChannelByPlatform', options); /** @@ -1030,6 +1046,22 @@ export const postBotsByIdChannelByPlatformSendChatMutation = (options?: Partial< } }); +/** + * Update bot channel status + * + * Update bot channel enabled/disabled status + */ +export const patchBotsByIdChannelByPlatformStatusMutation = (options?: Partial>): UseMutationOptions, PatchBotsByIdChannelByPlatformStatusError> => ({ + mutation: async (vars) => { + const { data } = await patchBotsByIdChannelByPlatformStatus({ + ...options, + ...vars, + throwOnError: true + }); + return data; + } +}); + export const getBotsByIdChecksQueryKey = (options: Options) => createQueryKey('getBotsByIdChecks', options); /** @@ -1049,44 +1081,6 @@ export const getBotsByIdChecksQuery = defineQueryOptions((options: Options) => createQueryKey('getBotsByIdChecksKeys', options); - -/** - * List available check keys - * - * Returns all check keys available for a bot (builtin + MCP connections) - */ -export const getBotsByIdChecksKeysQuery = defineQueryOptions((options: Options) => ({ - key: getBotsByIdChecksKeysQueryKey(options), - query: async (context) => { - const { data } = await getBotsByIdChecksKeys({ - ...options, - ...context, - throwOnError: true - }); - return data; - } -})); - -export const getBotsByIdChecksRunByKeyQueryKey = (options: Options) => createQueryKey('getBotsByIdChecksRunByKey', options); - -/** - * Run a single bot check - * - * Evaluate one check key for a bot - */ -export const getBotsByIdChecksRunByKeyQuery = defineQueryOptions((options: Options) => ({ - key: getBotsByIdChecksRunByKeyQueryKey(options), - query: async (context) => { - const { data } = await getBotsByIdChecksRunByKey({ - ...options, - ...context, - throwOnError: true - }); - return data; - } -})); - export const getBotsByIdMembersQueryKey = (options: Options) => createQueryKey('getBotsByIdMembers', options); /** diff --git a/packages/sdk/src/index.ts b/packages/sdk/src/index.ts index 8cd533fb..9b608be4 100644 --- a/packages/sdk/src/index.ts +++ b/packages/sdk/src/index.ts @@ -1,4 +1,4 @@ // This file is auto-generated by @hey-api/openapi-ts -export { deleteBotsByBotIdContainer, deleteBotsByBotIdContainerSkills, deleteBotsByBotIdMcpById, deleteBotsByBotIdMemory, deleteBotsByBotIdMemoryById, deleteBotsByBotIdScheduleById, deleteBotsByBotIdSettings, deleteBotsByBotIdSubagentsById, deleteBotsById, deleteBotsByIdMembersByUserId, deleteModelsById, deleteModelsModelByModelId, deleteProvidersById, deleteSearchProvidersById, getBots, getBotsByBotIdContainer, getBotsByBotIdContainerSkills, getBotsByBotIdContainerSnapshots, getBotsByBotIdMcp, getBotsByBotIdMcpById, getBotsByBotIdMcpExport, getBotsByBotIdMemory, getBotsByBotIdMemoryUsage, getBotsByBotIdMessages, getBotsByBotIdSchedule, getBotsByBotIdScheduleById, getBotsByBotIdSettings, getBotsByBotIdSubagents, getBotsByBotIdSubagentsById, getBotsByBotIdSubagentsByIdContext, getBotsByBotIdSubagentsByIdSkills, getBotsById, getBotsByIdChannelByPlatform, getBotsByIdChecks, getBotsByIdChecksKeys, getBotsByIdChecksRunByKey, getBotsByIdMembers, getChannels, getChannelsByPlatform, getModels, getModelsById, getModelsCount, getModelsModelByModelId, getProviders, getProvidersById, getProvidersByIdModels, getProvidersCount, getProvidersNameByName, getSearchProviders, getSearchProvidersById, getSearchProvidersMeta, getUsers, getUsersById, getUsersMe, getUsersMeChannelsByPlatform, getUsersMeIdentities, type Options, postAuthLogin, postBots, postBotsByBotIdContainer, postBotsByBotIdContainerSkills, postBotsByBotIdContainerSnapshots, postBotsByBotIdContainerStart, postBotsByBotIdContainerStop, postBotsByBotIdMcp, postBotsByBotIdMcpOpsBatchDelete, postBotsByBotIdMcpStdio, postBotsByBotIdMcpStdioByConnectionId, postBotsByBotIdMemory, postBotsByBotIdMemoryCompact, postBotsByBotIdMemoryRebuild, postBotsByBotIdMemorySearch, postBotsByBotIdSchedule, postBotsByBotIdSettings, postBotsByBotIdSubagents, postBotsByBotIdSubagentsByIdSkills, postBotsByBotIdTools, postBotsByIdChannelByPlatformSend, postBotsByIdChannelByPlatformSendChat, postEmbeddings, postModels, postProviders, postSearchProviders, postUsers, putBotsByBotIdMcpById, putBotsByBotIdMcpImport, putBotsByBotIdScheduleById, putBotsByBotIdSettings, putBotsByBotIdSubagentsById, putBotsByBotIdSubagentsByIdContext, putBotsByBotIdSubagentsByIdSkills, putBotsById, putBotsByIdChannelByPlatform, putBotsByIdMembers, putBotsByIdOwner, putModelsById, putModelsModelByModelId, putProvidersById, putSearchProvidersById, putUsersById, putUsersByIdPassword, putUsersMe, putUsersMeChannelsByPlatform, putUsersMePassword } from './sdk.gen'; -export type { AccountsAccount, AccountsCreateAccountRequest, AccountsListAccountsResponse, AccountsResetPasswordRequest, AccountsUpdateAccountRequest, AccountsUpdatePasswordRequest, AccountsUpdateProfileRequest, BotsBot, BotsBotCheck, BotsBotMember, BotsCreateBotRequest, BotsListBotsResponse, BotsListCheckKeysResponse, BotsListChecksResponse, BotsListMembersResponse, BotsTransferBotRequest, BotsUpdateBotRequest, BotsUpsertMemberRequest, ChannelAction, ChannelAttachment, ChannelAttachmentType, ChannelChannelCapabilities, ChannelChannelConfig, ChannelChannelIdentityBinding, ChannelConfigSchema, ChannelFieldSchema, ChannelFieldType, ChannelMessage, ChannelMessageFormat, ChannelMessagePart, ChannelMessagePartType, ChannelMessageTextStyle, ChannelReplyRef, ChannelSendRequest, ChannelTargetHint, ChannelTargetSpec, ChannelThreadRef, ChannelUpsertChannelIdentityConfigRequest, ChannelUpsertConfigRequest, ClientOptions, DeleteBotsByBotIdContainerData, DeleteBotsByBotIdContainerError, DeleteBotsByBotIdContainerErrors, DeleteBotsByBotIdContainerResponses, DeleteBotsByBotIdContainerSkillsData, DeleteBotsByBotIdContainerSkillsError, DeleteBotsByBotIdContainerSkillsErrors, DeleteBotsByBotIdContainerSkillsResponse, DeleteBotsByBotIdContainerSkillsResponses, DeleteBotsByBotIdMcpByIdData, DeleteBotsByBotIdMcpByIdError, DeleteBotsByBotIdMcpByIdErrors, DeleteBotsByBotIdMcpByIdResponses, DeleteBotsByBotIdMemoryByIdData, DeleteBotsByBotIdMemoryByIdError, DeleteBotsByBotIdMemoryByIdErrors, DeleteBotsByBotIdMemoryByIdResponse, DeleteBotsByBotIdMemoryByIdResponses, DeleteBotsByBotIdMemoryData, DeleteBotsByBotIdMemoryError, DeleteBotsByBotIdMemoryErrors, DeleteBotsByBotIdMemoryResponse, DeleteBotsByBotIdMemoryResponses, DeleteBotsByBotIdScheduleByIdData, DeleteBotsByBotIdScheduleByIdError, DeleteBotsByBotIdScheduleByIdErrors, DeleteBotsByBotIdScheduleByIdResponses, DeleteBotsByBotIdSettingsData, DeleteBotsByBotIdSettingsError, DeleteBotsByBotIdSettingsErrors, DeleteBotsByBotIdSettingsResponses, DeleteBotsByBotIdSubagentsByIdData, DeleteBotsByBotIdSubagentsByIdError, DeleteBotsByBotIdSubagentsByIdErrors, DeleteBotsByBotIdSubagentsByIdResponses, DeleteBotsByIdData, DeleteBotsByIdError, DeleteBotsByIdErrors, DeleteBotsByIdMembersByUserIdData, DeleteBotsByIdMembersByUserIdError, DeleteBotsByIdMembersByUserIdErrors, DeleteBotsByIdMembersByUserIdResponses, DeleteBotsByIdResponse, DeleteBotsByIdResponses, DeleteModelsByIdData, DeleteModelsByIdError, DeleteModelsByIdErrors, DeleteModelsByIdResponses, DeleteModelsModelByModelIdData, DeleteModelsModelByModelIdError, DeleteModelsModelByModelIdErrors, DeleteModelsModelByModelIdResponses, DeleteProvidersByIdData, DeleteProvidersByIdError, DeleteProvidersByIdErrors, DeleteProvidersByIdResponses, DeleteSearchProvidersByIdData, DeleteSearchProvidersByIdError, DeleteSearchProvidersByIdErrors, DeleteSearchProvidersByIdResponses, GetBotsByBotIdContainerData, GetBotsByBotIdContainerError, GetBotsByBotIdContainerErrors, GetBotsByBotIdContainerResponse, GetBotsByBotIdContainerResponses, GetBotsByBotIdContainerSkillsData, GetBotsByBotIdContainerSkillsError, GetBotsByBotIdContainerSkillsErrors, GetBotsByBotIdContainerSkillsResponse, GetBotsByBotIdContainerSkillsResponses, GetBotsByBotIdContainerSnapshotsData, GetBotsByBotIdContainerSnapshotsResponse, GetBotsByBotIdContainerSnapshotsResponses, GetBotsByBotIdMcpByIdData, GetBotsByBotIdMcpByIdError, GetBotsByBotIdMcpByIdErrors, GetBotsByBotIdMcpByIdResponse, GetBotsByBotIdMcpByIdResponses, GetBotsByBotIdMcpData, GetBotsByBotIdMcpError, GetBotsByBotIdMcpErrors, GetBotsByBotIdMcpExportData, GetBotsByBotIdMcpExportError, GetBotsByBotIdMcpExportErrors, GetBotsByBotIdMcpExportResponse, GetBotsByBotIdMcpExportResponses, GetBotsByBotIdMcpResponse, GetBotsByBotIdMcpResponses, GetBotsByBotIdMemoryData, GetBotsByBotIdMemoryError, GetBotsByBotIdMemoryErrors, GetBotsByBotIdMemoryResponse, GetBotsByBotIdMemoryResponses, GetBotsByBotIdMemoryUsageData, GetBotsByBotIdMemoryUsageError, GetBotsByBotIdMemoryUsageErrors, GetBotsByBotIdMemoryUsageResponse, GetBotsByBotIdMemoryUsageResponses, GetBotsByBotIdMessagesData, GetBotsByBotIdMessagesError, GetBotsByBotIdMessagesErrors, GetBotsByBotIdMessagesResponse, GetBotsByBotIdMessagesResponses, GetBotsByBotIdScheduleByIdData, GetBotsByBotIdScheduleByIdError, GetBotsByBotIdScheduleByIdErrors, GetBotsByBotIdScheduleByIdResponse, GetBotsByBotIdScheduleByIdResponses, GetBotsByBotIdScheduleData, GetBotsByBotIdScheduleError, GetBotsByBotIdScheduleErrors, GetBotsByBotIdScheduleResponse, GetBotsByBotIdScheduleResponses, GetBotsByBotIdSettingsData, GetBotsByBotIdSettingsError, GetBotsByBotIdSettingsErrors, GetBotsByBotIdSettingsResponse, GetBotsByBotIdSettingsResponses, GetBotsByBotIdSubagentsByIdContextData, GetBotsByBotIdSubagentsByIdContextError, GetBotsByBotIdSubagentsByIdContextErrors, GetBotsByBotIdSubagentsByIdContextResponse, GetBotsByBotIdSubagentsByIdContextResponses, GetBotsByBotIdSubagentsByIdData, GetBotsByBotIdSubagentsByIdError, GetBotsByBotIdSubagentsByIdErrors, GetBotsByBotIdSubagentsByIdResponse, GetBotsByBotIdSubagentsByIdResponses, GetBotsByBotIdSubagentsByIdSkillsData, GetBotsByBotIdSubagentsByIdSkillsError, GetBotsByBotIdSubagentsByIdSkillsErrors, GetBotsByBotIdSubagentsByIdSkillsResponse, GetBotsByBotIdSubagentsByIdSkillsResponses, GetBotsByBotIdSubagentsData, GetBotsByBotIdSubagentsError, GetBotsByBotIdSubagentsErrors, GetBotsByBotIdSubagentsResponse, GetBotsByBotIdSubagentsResponses, GetBotsByIdChannelByPlatformData, GetBotsByIdChannelByPlatformError, GetBotsByIdChannelByPlatformErrors, GetBotsByIdChannelByPlatformResponse, GetBotsByIdChannelByPlatformResponses, GetBotsByIdChecksData, GetBotsByIdChecksError, GetBotsByIdChecksErrors, GetBotsByIdChecksKeysData, GetBotsByIdChecksKeysResponse, GetBotsByIdChecksKeysResponses, GetBotsByIdChecksResponse, GetBotsByIdChecksResponses, GetBotsByIdChecksRunByKeyData, GetBotsByIdChecksRunByKeyResponse, GetBotsByIdChecksRunByKeyResponses, GetBotsByIdData, GetBotsByIdError, GetBotsByIdErrors, GetBotsByIdMembersData, GetBotsByIdMembersError, GetBotsByIdMembersErrors, GetBotsByIdMembersResponse, GetBotsByIdMembersResponses, GetBotsByIdResponse, GetBotsByIdResponses, GetBotsData, GetBotsError, GetBotsErrors, GetBotsResponse, GetBotsResponses, GetChannelsByPlatformData, GetChannelsByPlatformError, GetChannelsByPlatformErrors, GetChannelsByPlatformResponse, GetChannelsByPlatformResponses, GetChannelsData, GetChannelsError, GetChannelsErrors, GetChannelsResponse, GetChannelsResponses, GetModelsByIdData, GetModelsByIdError, GetModelsByIdErrors, GetModelsByIdResponse, GetModelsByIdResponses, GetModelsCountData, GetModelsCountError, GetModelsCountErrors, GetModelsCountResponse, GetModelsCountResponses, GetModelsData, GetModelsError, GetModelsErrors, GetModelsModelByModelIdData, GetModelsModelByModelIdError, GetModelsModelByModelIdErrors, GetModelsModelByModelIdResponse, GetModelsModelByModelIdResponses, GetModelsResponse, GetModelsResponses, GetProvidersByIdData, GetProvidersByIdError, GetProvidersByIdErrors, GetProvidersByIdModelsData, GetProvidersByIdModelsError, GetProvidersByIdModelsErrors, GetProvidersByIdModelsResponse, GetProvidersByIdModelsResponses, GetProvidersByIdResponse, GetProvidersByIdResponses, GetProvidersCountData, GetProvidersCountError, GetProvidersCountErrors, GetProvidersCountResponse, GetProvidersCountResponses, GetProvidersData, GetProvidersError, GetProvidersErrors, GetProvidersNameByNameData, GetProvidersNameByNameError, GetProvidersNameByNameErrors, GetProvidersNameByNameResponse, GetProvidersNameByNameResponses, GetProvidersResponse, GetProvidersResponses, GetSearchProvidersByIdData, GetSearchProvidersByIdError, GetSearchProvidersByIdErrors, GetSearchProvidersByIdResponse, GetSearchProvidersByIdResponses, GetSearchProvidersData, GetSearchProvidersError, GetSearchProvidersErrors, GetSearchProvidersMetaData, GetSearchProvidersMetaResponse, GetSearchProvidersMetaResponses, GetSearchProvidersResponse, GetSearchProvidersResponses, GetUsersByIdData, GetUsersByIdError, GetUsersByIdErrors, GetUsersByIdResponse, GetUsersByIdResponses, GetUsersData, GetUsersError, GetUsersErrors, GetUsersMeChannelsByPlatformData, GetUsersMeChannelsByPlatformError, GetUsersMeChannelsByPlatformErrors, GetUsersMeChannelsByPlatformResponse, GetUsersMeChannelsByPlatformResponses, GetUsersMeData, GetUsersMeError, GetUsersMeErrors, GetUsersMeIdentitiesData, GetUsersMeIdentitiesError, GetUsersMeIdentitiesErrors, GetUsersMeIdentitiesResponse, GetUsersMeIdentitiesResponses, GetUsersMeResponse, GetUsersMeResponses, GetUsersResponse, GetUsersResponses, GithubComMemohaiMemohInternalMcpConnection, HandlersBatchDeleteRequest, HandlersChannelMeta, HandlersCreateContainerRequest, HandlersCreateContainerResponse, HandlersCreateSnapshotRequest, HandlersCreateSnapshotResponse, HandlersEmbeddingsInput, HandlersEmbeddingsRequest, HandlersEmbeddingsResponse, HandlersEmbeddingsUsage, HandlersErrorResponse, HandlersGetContainerResponse, HandlersListMyIdentitiesResponse, HandlersListSnapshotsResponse, HandlersLoginRequest, HandlersLoginResponse, HandlersMcpStdioRequest, HandlersMcpStdioResponse, HandlersMemoryAddPayload, HandlersMemoryCompactPayload, HandlersMemoryDeletePayload, HandlersMemorySearchPayload, HandlersSkillItem, HandlersSkillsDeleteRequest, HandlersSkillsOpResponse, HandlersSkillsResponse, HandlersSkillsUpsertRequest, HandlersSnapshotInfo, IdentitiesChannelIdentity, McpExportResponse, McpImportRequest, McpListResponse, McpMcpServerEntry, McpUpsertRequest, MemoryCdfPoint, MemoryCompactResult, MemoryDeleteResponse, MemoryMemoryItem, MemoryMessage, MemoryRebuildResult, MemorySearchResponse, MemoryTopKBucket, MemoryUsageResponse, MessageMessage, ModelsAddRequest, ModelsAddResponse, ModelsCountResponse, ModelsGetResponse, ModelsModelType, ModelsUpdateRequest, PostAuthLoginData, PostAuthLoginError, PostAuthLoginErrors, PostAuthLoginResponse, PostAuthLoginResponses, PostBotsByBotIdContainerData, PostBotsByBotIdContainerError, PostBotsByBotIdContainerErrors, PostBotsByBotIdContainerResponse, PostBotsByBotIdContainerResponses, PostBotsByBotIdContainerSkillsData, PostBotsByBotIdContainerSkillsError, PostBotsByBotIdContainerSkillsErrors, PostBotsByBotIdContainerSkillsResponse, PostBotsByBotIdContainerSkillsResponses, PostBotsByBotIdContainerSnapshotsData, PostBotsByBotIdContainerSnapshotsError, PostBotsByBotIdContainerSnapshotsErrors, PostBotsByBotIdContainerSnapshotsResponse, PostBotsByBotIdContainerSnapshotsResponses, PostBotsByBotIdContainerStartData, PostBotsByBotIdContainerStartError, PostBotsByBotIdContainerStartErrors, PostBotsByBotIdContainerStartResponse, PostBotsByBotIdContainerStartResponses, PostBotsByBotIdContainerStopData, PostBotsByBotIdContainerStopError, PostBotsByBotIdContainerStopErrors, PostBotsByBotIdContainerStopResponse, PostBotsByBotIdContainerStopResponses, PostBotsByBotIdMcpData, PostBotsByBotIdMcpError, PostBotsByBotIdMcpErrors, PostBotsByBotIdMcpOpsBatchDeleteData, PostBotsByBotIdMcpOpsBatchDeleteError, PostBotsByBotIdMcpOpsBatchDeleteErrors, PostBotsByBotIdMcpOpsBatchDeleteResponses, PostBotsByBotIdMcpResponse, PostBotsByBotIdMcpResponses, PostBotsByBotIdMcpStdioByConnectionIdData, PostBotsByBotIdMcpStdioByConnectionIdError, PostBotsByBotIdMcpStdioByConnectionIdErrors, PostBotsByBotIdMcpStdioByConnectionIdResponse, PostBotsByBotIdMcpStdioByConnectionIdResponses, PostBotsByBotIdMcpStdioData, PostBotsByBotIdMcpStdioError, PostBotsByBotIdMcpStdioErrors, PostBotsByBotIdMcpStdioResponse, PostBotsByBotIdMcpStdioResponses, PostBotsByBotIdMemoryCompactData, PostBotsByBotIdMemoryCompactError, PostBotsByBotIdMemoryCompactErrors, PostBotsByBotIdMemoryCompactResponse, PostBotsByBotIdMemoryCompactResponses, PostBotsByBotIdMemoryData, PostBotsByBotIdMemoryError, PostBotsByBotIdMemoryErrors, PostBotsByBotIdMemoryRebuildData, PostBotsByBotIdMemoryRebuildError, PostBotsByBotIdMemoryRebuildErrors, PostBotsByBotIdMemoryRebuildResponse, PostBotsByBotIdMemoryRebuildResponses, PostBotsByBotIdMemoryResponse, PostBotsByBotIdMemoryResponses, PostBotsByBotIdMemorySearchData, PostBotsByBotIdMemorySearchError, PostBotsByBotIdMemorySearchErrors, PostBotsByBotIdMemorySearchResponse, PostBotsByBotIdMemorySearchResponses, PostBotsByBotIdScheduleData, PostBotsByBotIdScheduleError, PostBotsByBotIdScheduleErrors, PostBotsByBotIdScheduleResponse, PostBotsByBotIdScheduleResponses, PostBotsByBotIdSettingsData, PostBotsByBotIdSettingsError, PostBotsByBotIdSettingsErrors, PostBotsByBotIdSettingsResponse, PostBotsByBotIdSettingsResponses, PostBotsByBotIdSubagentsByIdSkillsData, PostBotsByBotIdSubagentsByIdSkillsError, PostBotsByBotIdSubagentsByIdSkillsErrors, PostBotsByBotIdSubagentsByIdSkillsResponse, PostBotsByBotIdSubagentsByIdSkillsResponses, PostBotsByBotIdSubagentsData, PostBotsByBotIdSubagentsError, PostBotsByBotIdSubagentsErrors, PostBotsByBotIdSubagentsResponse, PostBotsByBotIdSubagentsResponses, PostBotsByBotIdToolsData, PostBotsByBotIdToolsError, PostBotsByBotIdToolsErrors, PostBotsByBotIdToolsResponse, PostBotsByBotIdToolsResponses, PostBotsByIdChannelByPlatformSendChatData, PostBotsByIdChannelByPlatformSendChatError, PostBotsByIdChannelByPlatformSendChatErrors, PostBotsByIdChannelByPlatformSendChatResponse, PostBotsByIdChannelByPlatformSendChatResponses, PostBotsByIdChannelByPlatformSendData, PostBotsByIdChannelByPlatformSendError, PostBotsByIdChannelByPlatformSendErrors, PostBotsByIdChannelByPlatformSendResponse, PostBotsByIdChannelByPlatformSendResponses, PostBotsData, PostBotsError, PostBotsErrors, PostBotsResponse, PostBotsResponses, PostEmbeddingsData, PostEmbeddingsError, PostEmbeddingsErrors, PostEmbeddingsResponse, PostEmbeddingsResponses, PostModelsData, PostModelsError, PostModelsErrors, PostModelsResponse, PostModelsResponses, PostProvidersData, PostProvidersError, PostProvidersErrors, PostProvidersResponse, PostProvidersResponses, PostSearchProvidersData, PostSearchProvidersError, PostSearchProvidersErrors, PostSearchProvidersResponse, PostSearchProvidersResponses, PostUsersData, PostUsersError, PostUsersErrors, PostUsersResponse, PostUsersResponses, ProvidersClientType, ProvidersCountResponse, ProvidersCreateRequest, ProvidersGetResponse, ProvidersUpdateRequest, PutBotsByBotIdMcpByIdData, PutBotsByBotIdMcpByIdError, PutBotsByBotIdMcpByIdErrors, PutBotsByBotIdMcpByIdResponse, PutBotsByBotIdMcpByIdResponses, PutBotsByBotIdMcpImportData, PutBotsByBotIdMcpImportError, PutBotsByBotIdMcpImportErrors, PutBotsByBotIdMcpImportResponse, PutBotsByBotIdMcpImportResponses, PutBotsByBotIdScheduleByIdData, PutBotsByBotIdScheduleByIdError, PutBotsByBotIdScheduleByIdErrors, PutBotsByBotIdScheduleByIdResponse, PutBotsByBotIdScheduleByIdResponses, PutBotsByBotIdSettingsData, PutBotsByBotIdSettingsError, PutBotsByBotIdSettingsErrors, PutBotsByBotIdSettingsResponse, PutBotsByBotIdSettingsResponses, PutBotsByBotIdSubagentsByIdContextData, PutBotsByBotIdSubagentsByIdContextError, PutBotsByBotIdSubagentsByIdContextErrors, PutBotsByBotIdSubagentsByIdContextResponse, PutBotsByBotIdSubagentsByIdContextResponses, PutBotsByBotIdSubagentsByIdData, PutBotsByBotIdSubagentsByIdError, PutBotsByBotIdSubagentsByIdErrors, PutBotsByBotIdSubagentsByIdResponse, PutBotsByBotIdSubagentsByIdResponses, PutBotsByBotIdSubagentsByIdSkillsData, PutBotsByBotIdSubagentsByIdSkillsError, PutBotsByBotIdSubagentsByIdSkillsErrors, PutBotsByBotIdSubagentsByIdSkillsResponse, PutBotsByBotIdSubagentsByIdSkillsResponses, PutBotsByIdChannelByPlatformData, PutBotsByIdChannelByPlatformError, PutBotsByIdChannelByPlatformErrors, PutBotsByIdChannelByPlatformResponse, PutBotsByIdChannelByPlatformResponses, PutBotsByIdData, PutBotsByIdError, PutBotsByIdErrors, PutBotsByIdMembersData, PutBotsByIdMembersError, PutBotsByIdMembersErrors, PutBotsByIdMembersResponse, PutBotsByIdMembersResponses, PutBotsByIdOwnerData, PutBotsByIdOwnerError, PutBotsByIdOwnerErrors, PutBotsByIdOwnerResponse, PutBotsByIdOwnerResponses, PutBotsByIdResponse, PutBotsByIdResponses, PutModelsByIdData, PutModelsByIdError, PutModelsByIdErrors, PutModelsByIdResponse, PutModelsByIdResponses, PutModelsModelByModelIdData, PutModelsModelByModelIdError, PutModelsModelByModelIdErrors, PutModelsModelByModelIdResponse, PutModelsModelByModelIdResponses, PutProvidersByIdData, PutProvidersByIdError, PutProvidersByIdErrors, PutProvidersByIdResponse, PutProvidersByIdResponses, PutSearchProvidersByIdData, PutSearchProvidersByIdError, PutSearchProvidersByIdErrors, PutSearchProvidersByIdResponse, PutSearchProvidersByIdResponses, PutUsersByIdData, PutUsersByIdError, PutUsersByIdErrors, PutUsersByIdPasswordData, PutUsersByIdPasswordError, PutUsersByIdPasswordErrors, PutUsersByIdPasswordResponses, PutUsersByIdResponse, PutUsersByIdResponses, PutUsersMeChannelsByPlatformData, PutUsersMeChannelsByPlatformError, PutUsersMeChannelsByPlatformErrors, PutUsersMeChannelsByPlatformResponse, PutUsersMeChannelsByPlatformResponses, PutUsersMeData, PutUsersMeError, PutUsersMeErrors, PutUsersMePasswordData, PutUsersMePasswordError, PutUsersMePasswordErrors, PutUsersMePasswordResponses, PutUsersMeResponse, PutUsersMeResponses, ScheduleCreateRequest, ScheduleListResponse, ScheduleNullableInt, ScheduleSchedule, ScheduleUpdateRequest, SearchprovidersCreateRequest, SearchprovidersGetResponse, SearchprovidersProviderConfigSchema, SearchprovidersProviderFieldSchema, SearchprovidersProviderMeta, SearchprovidersProviderName, SearchprovidersUpdateRequest, SettingsSettings, SettingsUpsertRequest, SubagentAddSkillsRequest, SubagentContextResponse, SubagentCreateRequest, SubagentListResponse, SubagentSkillsResponse, SubagentSubagent, SubagentUpdateContextRequest, SubagentUpdateRequest, SubagentUpdateSkillsRequest } from './types.gen'; +export { deleteBotsByBotIdContainer, deleteBotsByBotIdContainerSkills, deleteBotsByBotIdMcpById, deleteBotsByBotIdMemory, deleteBotsByBotIdMemoryById, deleteBotsByBotIdScheduleById, deleteBotsByBotIdSettings, deleteBotsByBotIdSubagentsById, deleteBotsById, deleteBotsByIdChannelByPlatform, deleteBotsByIdMembersByUserId, deleteModelsById, deleteModelsModelByModelId, deleteProvidersById, deleteSearchProvidersById, getBots, getBotsByBotIdContainer, getBotsByBotIdContainerSkills, getBotsByBotIdContainerSnapshots, getBotsByBotIdMcp, getBotsByBotIdMcpById, getBotsByBotIdMcpExport, getBotsByBotIdMemory, getBotsByBotIdMemoryUsage, getBotsByBotIdMessages, getBotsByBotIdSchedule, getBotsByBotIdScheduleById, getBotsByBotIdSettings, getBotsByBotIdSubagents, getBotsByBotIdSubagentsById, getBotsByBotIdSubagentsByIdContext, getBotsByBotIdSubagentsByIdSkills, getBotsById, getBotsByIdChannelByPlatform, getBotsByIdChecks, getBotsByIdMembers, getChannels, getChannelsByPlatform, getModels, getModelsById, getModelsCount, getModelsModelByModelId, getProviders, getProvidersById, getProvidersByIdModels, getProvidersCount, getProvidersNameByName, getSearchProviders, getSearchProvidersById, getSearchProvidersMeta, getUsers, getUsersById, getUsersMe, getUsersMeChannelsByPlatform, getUsersMeIdentities, type Options, patchBotsByIdChannelByPlatformStatus, postAuthLogin, postBots, postBotsByBotIdContainer, postBotsByBotIdContainerSkills, postBotsByBotIdContainerSnapshots, postBotsByBotIdContainerStart, postBotsByBotIdContainerStop, postBotsByBotIdMcp, postBotsByBotIdMcpOpsBatchDelete, postBotsByBotIdMcpStdio, postBotsByBotIdMcpStdioByConnectionId, postBotsByBotIdMemory, postBotsByBotIdMemoryCompact, postBotsByBotIdMemoryRebuild, postBotsByBotIdMemorySearch, postBotsByBotIdSchedule, postBotsByBotIdSettings, postBotsByBotIdSubagents, postBotsByBotIdSubagentsByIdSkills, postBotsByBotIdTools, postBotsByIdChannelByPlatformSend, postBotsByIdChannelByPlatformSendChat, postEmbeddings, postModels, postProviders, postSearchProviders, postUsers, putBotsByBotIdMcpById, putBotsByBotIdMcpImport, putBotsByBotIdScheduleById, putBotsByBotIdSettings, putBotsByBotIdSubagentsById, putBotsByBotIdSubagentsByIdContext, putBotsByBotIdSubagentsByIdSkills, putBotsById, putBotsByIdChannelByPlatform, putBotsByIdMembers, putBotsByIdOwner, putModelsById, putModelsModelByModelId, putProvidersById, putSearchProvidersById, putUsersById, putUsersByIdPassword, putUsersMe, putUsersMeChannelsByPlatform, putUsersMePassword } from './sdk.gen'; +export type { AccountsAccount, AccountsCreateAccountRequest, AccountsListAccountsResponse, AccountsResetPasswordRequest, AccountsUpdateAccountRequest, AccountsUpdatePasswordRequest, AccountsUpdateProfileRequest, BotsBot, BotsBotCheck, BotsBotMember, BotsCreateBotRequest, BotsListBotsResponse, BotsListChecksResponse, BotsListMembersResponse, BotsTransferBotRequest, BotsUpdateBotRequest, BotsUpsertMemberRequest, ChannelAction, ChannelAttachment, ChannelAttachmentType, ChannelChannelCapabilities, ChannelChannelConfig, ChannelChannelIdentityBinding, ChannelConfigSchema, ChannelFieldSchema, ChannelFieldType, ChannelMessage, ChannelMessageFormat, ChannelMessagePart, ChannelMessagePartType, ChannelMessageTextStyle, ChannelReplyRef, ChannelSendRequest, ChannelTargetHint, ChannelTargetSpec, ChannelThreadRef, ChannelUpdateChannelStatusRequest, ChannelUpsertChannelIdentityConfigRequest, ChannelUpsertConfigRequest, ClientOptions, DeleteBotsByBotIdContainerData, DeleteBotsByBotIdContainerError, DeleteBotsByBotIdContainerErrors, DeleteBotsByBotIdContainerResponses, DeleteBotsByBotIdContainerSkillsData, DeleteBotsByBotIdContainerSkillsError, DeleteBotsByBotIdContainerSkillsErrors, DeleteBotsByBotIdContainerSkillsResponse, DeleteBotsByBotIdContainerSkillsResponses, DeleteBotsByBotIdMcpByIdData, DeleteBotsByBotIdMcpByIdError, DeleteBotsByBotIdMcpByIdErrors, DeleteBotsByBotIdMcpByIdResponses, DeleteBotsByBotIdMemoryByIdData, DeleteBotsByBotIdMemoryByIdError, DeleteBotsByBotIdMemoryByIdErrors, DeleteBotsByBotIdMemoryByIdResponse, DeleteBotsByBotIdMemoryByIdResponses, DeleteBotsByBotIdMemoryData, DeleteBotsByBotIdMemoryError, DeleteBotsByBotIdMemoryErrors, DeleteBotsByBotIdMemoryResponse, DeleteBotsByBotIdMemoryResponses, DeleteBotsByBotIdScheduleByIdData, DeleteBotsByBotIdScheduleByIdError, DeleteBotsByBotIdScheduleByIdErrors, DeleteBotsByBotIdScheduleByIdResponses, DeleteBotsByBotIdSettingsData, DeleteBotsByBotIdSettingsError, DeleteBotsByBotIdSettingsErrors, DeleteBotsByBotIdSettingsResponses, DeleteBotsByBotIdSubagentsByIdData, DeleteBotsByBotIdSubagentsByIdError, DeleteBotsByBotIdSubagentsByIdErrors, DeleteBotsByBotIdSubagentsByIdResponses, DeleteBotsByIdChannelByPlatformData, DeleteBotsByIdChannelByPlatformError, DeleteBotsByIdChannelByPlatformErrors, DeleteBotsByIdChannelByPlatformResponses, DeleteBotsByIdData, DeleteBotsByIdError, DeleteBotsByIdErrors, DeleteBotsByIdMembersByUserIdData, DeleteBotsByIdMembersByUserIdError, DeleteBotsByIdMembersByUserIdErrors, DeleteBotsByIdMembersByUserIdResponses, DeleteBotsByIdResponse, DeleteBotsByIdResponses, DeleteModelsByIdData, DeleteModelsByIdError, DeleteModelsByIdErrors, DeleteModelsByIdResponses, DeleteModelsModelByModelIdData, DeleteModelsModelByModelIdError, DeleteModelsModelByModelIdErrors, DeleteModelsModelByModelIdResponses, DeleteProvidersByIdData, DeleteProvidersByIdError, DeleteProvidersByIdErrors, DeleteProvidersByIdResponses, DeleteSearchProvidersByIdData, DeleteSearchProvidersByIdError, DeleteSearchProvidersByIdErrors, DeleteSearchProvidersByIdResponses, GetBotsByBotIdContainerData, GetBotsByBotIdContainerError, GetBotsByBotIdContainerErrors, GetBotsByBotIdContainerResponse, GetBotsByBotIdContainerResponses, GetBotsByBotIdContainerSkillsData, GetBotsByBotIdContainerSkillsError, GetBotsByBotIdContainerSkillsErrors, GetBotsByBotIdContainerSkillsResponse, GetBotsByBotIdContainerSkillsResponses, GetBotsByBotIdContainerSnapshotsData, GetBotsByBotIdContainerSnapshotsResponse, GetBotsByBotIdContainerSnapshotsResponses, GetBotsByBotIdMcpByIdData, GetBotsByBotIdMcpByIdError, GetBotsByBotIdMcpByIdErrors, GetBotsByBotIdMcpByIdResponse, GetBotsByBotIdMcpByIdResponses, GetBotsByBotIdMcpData, GetBotsByBotIdMcpError, GetBotsByBotIdMcpErrors, GetBotsByBotIdMcpExportData, GetBotsByBotIdMcpExportError, GetBotsByBotIdMcpExportErrors, GetBotsByBotIdMcpExportResponse, GetBotsByBotIdMcpExportResponses, GetBotsByBotIdMcpResponse, GetBotsByBotIdMcpResponses, GetBotsByBotIdMemoryData, GetBotsByBotIdMemoryError, GetBotsByBotIdMemoryErrors, GetBotsByBotIdMemoryResponse, GetBotsByBotIdMemoryResponses, GetBotsByBotIdMemoryUsageData, GetBotsByBotIdMemoryUsageError, GetBotsByBotIdMemoryUsageErrors, GetBotsByBotIdMemoryUsageResponse, GetBotsByBotIdMemoryUsageResponses, GetBotsByBotIdMessagesData, GetBotsByBotIdMessagesError, GetBotsByBotIdMessagesErrors, GetBotsByBotIdMessagesResponse, GetBotsByBotIdMessagesResponses, GetBotsByBotIdScheduleByIdData, GetBotsByBotIdScheduleByIdError, GetBotsByBotIdScheduleByIdErrors, GetBotsByBotIdScheduleByIdResponse, GetBotsByBotIdScheduleByIdResponses, GetBotsByBotIdScheduleData, GetBotsByBotIdScheduleError, GetBotsByBotIdScheduleErrors, GetBotsByBotIdScheduleResponse, GetBotsByBotIdScheduleResponses, GetBotsByBotIdSettingsData, GetBotsByBotIdSettingsError, GetBotsByBotIdSettingsErrors, GetBotsByBotIdSettingsResponse, GetBotsByBotIdSettingsResponses, GetBotsByBotIdSubagentsByIdContextData, GetBotsByBotIdSubagentsByIdContextError, GetBotsByBotIdSubagentsByIdContextErrors, GetBotsByBotIdSubagentsByIdContextResponse, GetBotsByBotIdSubagentsByIdContextResponses, GetBotsByBotIdSubagentsByIdData, GetBotsByBotIdSubagentsByIdError, GetBotsByBotIdSubagentsByIdErrors, GetBotsByBotIdSubagentsByIdResponse, GetBotsByBotIdSubagentsByIdResponses, GetBotsByBotIdSubagentsByIdSkillsData, GetBotsByBotIdSubagentsByIdSkillsError, GetBotsByBotIdSubagentsByIdSkillsErrors, GetBotsByBotIdSubagentsByIdSkillsResponse, GetBotsByBotIdSubagentsByIdSkillsResponses, GetBotsByBotIdSubagentsData, GetBotsByBotIdSubagentsError, GetBotsByBotIdSubagentsErrors, GetBotsByBotIdSubagentsResponse, GetBotsByBotIdSubagentsResponses, GetBotsByIdChannelByPlatformData, GetBotsByIdChannelByPlatformError, GetBotsByIdChannelByPlatformErrors, GetBotsByIdChannelByPlatformResponse, GetBotsByIdChannelByPlatformResponses, GetBotsByIdChecksData, GetBotsByIdChecksError, GetBotsByIdChecksErrors, GetBotsByIdChecksResponse, GetBotsByIdChecksResponses, GetBotsByIdData, GetBotsByIdError, GetBotsByIdErrors, GetBotsByIdMembersData, GetBotsByIdMembersError, GetBotsByIdMembersErrors, GetBotsByIdMembersResponse, GetBotsByIdMembersResponses, GetBotsByIdResponse, GetBotsByIdResponses, GetBotsData, GetBotsError, GetBotsErrors, GetBotsResponse, GetBotsResponses, GetChannelsByPlatformData, GetChannelsByPlatformError, GetChannelsByPlatformErrors, GetChannelsByPlatformResponse, GetChannelsByPlatformResponses, GetChannelsData, GetChannelsError, GetChannelsErrors, GetChannelsResponse, GetChannelsResponses, GetModelsByIdData, GetModelsByIdError, GetModelsByIdErrors, GetModelsByIdResponse, GetModelsByIdResponses, GetModelsCountData, GetModelsCountError, GetModelsCountErrors, GetModelsCountResponse, GetModelsCountResponses, GetModelsData, GetModelsError, GetModelsErrors, GetModelsModelByModelIdData, GetModelsModelByModelIdError, GetModelsModelByModelIdErrors, GetModelsModelByModelIdResponse, GetModelsModelByModelIdResponses, GetModelsResponse, GetModelsResponses, GetProvidersByIdData, GetProvidersByIdError, GetProvidersByIdErrors, GetProvidersByIdModelsData, GetProvidersByIdModelsError, GetProvidersByIdModelsErrors, GetProvidersByIdModelsResponse, GetProvidersByIdModelsResponses, GetProvidersByIdResponse, GetProvidersByIdResponses, GetProvidersCountData, GetProvidersCountError, GetProvidersCountErrors, GetProvidersCountResponse, GetProvidersCountResponses, GetProvidersData, GetProvidersError, GetProvidersErrors, GetProvidersNameByNameData, GetProvidersNameByNameError, GetProvidersNameByNameErrors, GetProvidersNameByNameResponse, GetProvidersNameByNameResponses, GetProvidersResponse, GetProvidersResponses, GetSearchProvidersByIdData, GetSearchProvidersByIdError, GetSearchProvidersByIdErrors, GetSearchProvidersByIdResponse, GetSearchProvidersByIdResponses, GetSearchProvidersData, GetSearchProvidersError, GetSearchProvidersErrors, GetSearchProvidersMetaData, GetSearchProvidersMetaResponse, GetSearchProvidersMetaResponses, GetSearchProvidersResponse, GetSearchProvidersResponses, GetUsersByIdData, GetUsersByIdError, GetUsersByIdErrors, GetUsersByIdResponse, GetUsersByIdResponses, GetUsersData, GetUsersError, GetUsersErrors, GetUsersMeChannelsByPlatformData, GetUsersMeChannelsByPlatformError, GetUsersMeChannelsByPlatformErrors, GetUsersMeChannelsByPlatformResponse, GetUsersMeChannelsByPlatformResponses, GetUsersMeData, GetUsersMeError, GetUsersMeErrors, GetUsersMeIdentitiesData, GetUsersMeIdentitiesError, GetUsersMeIdentitiesErrors, GetUsersMeIdentitiesResponse, GetUsersMeIdentitiesResponses, GetUsersMeResponse, GetUsersMeResponses, GetUsersResponse, GetUsersResponses, GithubComMemohaiMemohInternalMcpConnection, HandlersBatchDeleteRequest, HandlersChannelMeta, HandlersCreateContainerRequest, HandlersCreateContainerResponse, HandlersCreateSnapshotRequest, HandlersCreateSnapshotResponse, HandlersEmbeddingsInput, HandlersEmbeddingsRequest, HandlersEmbeddingsResponse, HandlersEmbeddingsUsage, HandlersErrorResponse, HandlersGetContainerResponse, HandlersListMyIdentitiesResponse, HandlersListSnapshotsResponse, HandlersLoginRequest, HandlersLoginResponse, HandlersMcpStdioRequest, HandlersMcpStdioResponse, HandlersMemoryAddPayload, HandlersMemoryCompactPayload, HandlersMemoryDeletePayload, HandlersMemorySearchPayload, HandlersSkillItem, HandlersSkillsDeleteRequest, HandlersSkillsOpResponse, HandlersSkillsResponse, HandlersSkillsUpsertRequest, HandlersSnapshotInfo, IdentitiesChannelIdentity, McpExportResponse, McpImportRequest, McpListResponse, McpMcpServerEntry, McpUpsertRequest, MemoryCdfPoint, MemoryCompactResult, MemoryDeleteResponse, MemoryMemoryItem, MemoryMessage, MemoryRebuildResult, MemorySearchResponse, MemoryTopKBucket, MemoryUsageResponse, MessageMessage, MessageMessageAsset, ModelsAddRequest, ModelsAddResponse, ModelsCountResponse, ModelsGetResponse, ModelsModelType, ModelsUpdateRequest, PatchBotsByIdChannelByPlatformStatusData, PatchBotsByIdChannelByPlatformStatusError, PatchBotsByIdChannelByPlatformStatusErrors, PatchBotsByIdChannelByPlatformStatusResponse, PatchBotsByIdChannelByPlatformStatusResponses, PostAuthLoginData, PostAuthLoginError, PostAuthLoginErrors, PostAuthLoginResponse, PostAuthLoginResponses, PostBotsByBotIdContainerData, PostBotsByBotIdContainerError, PostBotsByBotIdContainerErrors, PostBotsByBotIdContainerResponse, PostBotsByBotIdContainerResponses, PostBotsByBotIdContainerSkillsData, PostBotsByBotIdContainerSkillsError, PostBotsByBotIdContainerSkillsErrors, PostBotsByBotIdContainerSkillsResponse, PostBotsByBotIdContainerSkillsResponses, PostBotsByBotIdContainerSnapshotsData, PostBotsByBotIdContainerSnapshotsError, PostBotsByBotIdContainerSnapshotsErrors, PostBotsByBotIdContainerSnapshotsResponse, PostBotsByBotIdContainerSnapshotsResponses, PostBotsByBotIdContainerStartData, PostBotsByBotIdContainerStartError, PostBotsByBotIdContainerStartErrors, PostBotsByBotIdContainerStartResponse, PostBotsByBotIdContainerStartResponses, PostBotsByBotIdContainerStopData, PostBotsByBotIdContainerStopError, PostBotsByBotIdContainerStopErrors, PostBotsByBotIdContainerStopResponse, PostBotsByBotIdContainerStopResponses, PostBotsByBotIdMcpData, PostBotsByBotIdMcpError, PostBotsByBotIdMcpErrors, PostBotsByBotIdMcpOpsBatchDeleteData, PostBotsByBotIdMcpOpsBatchDeleteError, PostBotsByBotIdMcpOpsBatchDeleteErrors, PostBotsByBotIdMcpOpsBatchDeleteResponses, PostBotsByBotIdMcpResponse, PostBotsByBotIdMcpResponses, PostBotsByBotIdMcpStdioByConnectionIdData, PostBotsByBotIdMcpStdioByConnectionIdError, PostBotsByBotIdMcpStdioByConnectionIdErrors, PostBotsByBotIdMcpStdioByConnectionIdResponse, PostBotsByBotIdMcpStdioByConnectionIdResponses, PostBotsByBotIdMcpStdioData, PostBotsByBotIdMcpStdioError, PostBotsByBotIdMcpStdioErrors, PostBotsByBotIdMcpStdioResponse, PostBotsByBotIdMcpStdioResponses, PostBotsByBotIdMemoryCompactData, PostBotsByBotIdMemoryCompactError, PostBotsByBotIdMemoryCompactErrors, PostBotsByBotIdMemoryCompactResponse, PostBotsByBotIdMemoryCompactResponses, PostBotsByBotIdMemoryData, PostBotsByBotIdMemoryError, PostBotsByBotIdMemoryErrors, PostBotsByBotIdMemoryRebuildData, PostBotsByBotIdMemoryRebuildError, PostBotsByBotIdMemoryRebuildErrors, PostBotsByBotIdMemoryRebuildResponse, PostBotsByBotIdMemoryRebuildResponses, PostBotsByBotIdMemoryResponse, PostBotsByBotIdMemoryResponses, PostBotsByBotIdMemorySearchData, PostBotsByBotIdMemorySearchError, PostBotsByBotIdMemorySearchErrors, PostBotsByBotIdMemorySearchResponse, PostBotsByBotIdMemorySearchResponses, PostBotsByBotIdScheduleData, PostBotsByBotIdScheduleError, PostBotsByBotIdScheduleErrors, PostBotsByBotIdScheduleResponse, PostBotsByBotIdScheduleResponses, PostBotsByBotIdSettingsData, PostBotsByBotIdSettingsError, PostBotsByBotIdSettingsErrors, PostBotsByBotIdSettingsResponse, PostBotsByBotIdSettingsResponses, PostBotsByBotIdSubagentsByIdSkillsData, PostBotsByBotIdSubagentsByIdSkillsError, PostBotsByBotIdSubagentsByIdSkillsErrors, PostBotsByBotIdSubagentsByIdSkillsResponse, PostBotsByBotIdSubagentsByIdSkillsResponses, PostBotsByBotIdSubagentsData, PostBotsByBotIdSubagentsError, PostBotsByBotIdSubagentsErrors, PostBotsByBotIdSubagentsResponse, PostBotsByBotIdSubagentsResponses, PostBotsByBotIdToolsData, PostBotsByBotIdToolsError, PostBotsByBotIdToolsErrors, PostBotsByBotIdToolsResponse, PostBotsByBotIdToolsResponses, PostBotsByIdChannelByPlatformSendChatData, PostBotsByIdChannelByPlatformSendChatError, PostBotsByIdChannelByPlatformSendChatErrors, PostBotsByIdChannelByPlatformSendChatResponse, PostBotsByIdChannelByPlatformSendChatResponses, PostBotsByIdChannelByPlatformSendData, PostBotsByIdChannelByPlatformSendError, PostBotsByIdChannelByPlatformSendErrors, PostBotsByIdChannelByPlatformSendResponse, PostBotsByIdChannelByPlatformSendResponses, PostBotsData, PostBotsError, PostBotsErrors, PostBotsResponse, PostBotsResponses, PostEmbeddingsData, PostEmbeddingsError, PostEmbeddingsErrors, PostEmbeddingsResponse, PostEmbeddingsResponses, PostModelsData, PostModelsError, PostModelsErrors, PostModelsResponse, PostModelsResponses, PostProvidersData, PostProvidersError, PostProvidersErrors, PostProvidersResponse, PostProvidersResponses, PostSearchProvidersData, PostSearchProvidersError, PostSearchProvidersErrors, PostSearchProvidersResponse, PostSearchProvidersResponses, PostUsersData, PostUsersError, PostUsersErrors, PostUsersResponse, PostUsersResponses, ProvidersClientType, ProvidersCountResponse, ProvidersCreateRequest, ProvidersGetResponse, ProvidersUpdateRequest, PutBotsByBotIdMcpByIdData, PutBotsByBotIdMcpByIdError, PutBotsByBotIdMcpByIdErrors, PutBotsByBotIdMcpByIdResponse, PutBotsByBotIdMcpByIdResponses, PutBotsByBotIdMcpImportData, PutBotsByBotIdMcpImportError, PutBotsByBotIdMcpImportErrors, PutBotsByBotIdMcpImportResponse, PutBotsByBotIdMcpImportResponses, PutBotsByBotIdScheduleByIdData, PutBotsByBotIdScheduleByIdError, PutBotsByBotIdScheduleByIdErrors, PutBotsByBotIdScheduleByIdResponse, PutBotsByBotIdScheduleByIdResponses, PutBotsByBotIdSettingsData, PutBotsByBotIdSettingsError, PutBotsByBotIdSettingsErrors, PutBotsByBotIdSettingsResponse, PutBotsByBotIdSettingsResponses, PutBotsByBotIdSubagentsByIdContextData, PutBotsByBotIdSubagentsByIdContextError, PutBotsByBotIdSubagentsByIdContextErrors, PutBotsByBotIdSubagentsByIdContextResponse, PutBotsByBotIdSubagentsByIdContextResponses, PutBotsByBotIdSubagentsByIdData, PutBotsByBotIdSubagentsByIdError, PutBotsByBotIdSubagentsByIdErrors, PutBotsByBotIdSubagentsByIdResponse, PutBotsByBotIdSubagentsByIdResponses, PutBotsByBotIdSubagentsByIdSkillsData, PutBotsByBotIdSubagentsByIdSkillsError, PutBotsByBotIdSubagentsByIdSkillsErrors, PutBotsByBotIdSubagentsByIdSkillsResponse, PutBotsByBotIdSubagentsByIdSkillsResponses, PutBotsByIdChannelByPlatformData, PutBotsByIdChannelByPlatformError, PutBotsByIdChannelByPlatformErrors, PutBotsByIdChannelByPlatformResponse, PutBotsByIdChannelByPlatformResponses, PutBotsByIdData, PutBotsByIdError, PutBotsByIdErrors, PutBotsByIdMembersData, PutBotsByIdMembersError, PutBotsByIdMembersErrors, PutBotsByIdMembersResponse, PutBotsByIdMembersResponses, PutBotsByIdOwnerData, PutBotsByIdOwnerError, PutBotsByIdOwnerErrors, PutBotsByIdOwnerResponse, PutBotsByIdOwnerResponses, PutBotsByIdResponse, PutBotsByIdResponses, PutModelsByIdData, PutModelsByIdError, PutModelsByIdErrors, PutModelsByIdResponse, PutModelsByIdResponses, PutModelsModelByModelIdData, PutModelsModelByModelIdError, PutModelsModelByModelIdErrors, PutModelsModelByModelIdResponse, PutModelsModelByModelIdResponses, PutProvidersByIdData, PutProvidersByIdError, PutProvidersByIdErrors, PutProvidersByIdResponse, PutProvidersByIdResponses, PutSearchProvidersByIdData, PutSearchProvidersByIdError, PutSearchProvidersByIdErrors, PutSearchProvidersByIdResponse, PutSearchProvidersByIdResponses, PutUsersByIdData, PutUsersByIdError, PutUsersByIdErrors, PutUsersByIdPasswordData, PutUsersByIdPasswordError, PutUsersByIdPasswordErrors, PutUsersByIdPasswordResponses, PutUsersByIdResponse, PutUsersByIdResponses, PutUsersMeChannelsByPlatformData, PutUsersMeChannelsByPlatformError, PutUsersMeChannelsByPlatformErrors, PutUsersMeChannelsByPlatformResponse, PutUsersMeChannelsByPlatformResponses, PutUsersMeData, PutUsersMeError, PutUsersMeErrors, PutUsersMePasswordData, PutUsersMePasswordError, PutUsersMePasswordErrors, PutUsersMePasswordResponses, PutUsersMeResponse, PutUsersMeResponses, ScheduleCreateRequest, ScheduleListResponse, ScheduleNullableInt, ScheduleSchedule, ScheduleUpdateRequest, SearchprovidersCreateRequest, SearchprovidersGetResponse, SearchprovidersProviderConfigSchema, SearchprovidersProviderFieldSchema, SearchprovidersProviderMeta, SearchprovidersProviderName, SearchprovidersUpdateRequest, SettingsSettings, SettingsUpsertRequest, SubagentAddSkillsRequest, SubagentContextResponse, SubagentCreateRequest, SubagentListResponse, SubagentSkillsResponse, SubagentSubagent, SubagentUpdateContextRequest, SubagentUpdateRequest, SubagentUpdateSkillsRequest } from './types.gen'; diff --git a/packages/sdk/src/sdk.gen.ts b/packages/sdk/src/sdk.gen.ts index 33c542a7..0eedf838 100644 --- a/packages/sdk/src/sdk.gen.ts +++ b/packages/sdk/src/sdk.gen.ts @@ -2,7 +2,7 @@ import type { Client, Options as Options2, TDataShape } from './client'; import { client } from './client.gen'; -import type { DeleteBotsByBotIdContainerData, DeleteBotsByBotIdContainerErrors, DeleteBotsByBotIdContainerResponses, DeleteBotsByBotIdContainerSkillsData, DeleteBotsByBotIdContainerSkillsErrors, DeleteBotsByBotIdContainerSkillsResponses, DeleteBotsByBotIdMcpByIdData, DeleteBotsByBotIdMcpByIdErrors, DeleteBotsByBotIdMcpByIdResponses, DeleteBotsByBotIdMemoryByIdData, DeleteBotsByBotIdMemoryByIdErrors, DeleteBotsByBotIdMemoryByIdResponses, DeleteBotsByBotIdMemoryData, DeleteBotsByBotIdMemoryErrors, DeleteBotsByBotIdMemoryResponses, DeleteBotsByBotIdScheduleByIdData, DeleteBotsByBotIdScheduleByIdErrors, DeleteBotsByBotIdScheduleByIdResponses, DeleteBotsByBotIdSettingsData, DeleteBotsByBotIdSettingsErrors, DeleteBotsByBotIdSettingsResponses, DeleteBotsByBotIdSubagentsByIdData, DeleteBotsByBotIdSubagentsByIdErrors, DeleteBotsByBotIdSubagentsByIdResponses, DeleteBotsByIdData, DeleteBotsByIdErrors, DeleteBotsByIdMembersByUserIdData, DeleteBotsByIdMembersByUserIdErrors, DeleteBotsByIdMembersByUserIdResponses, DeleteBotsByIdResponses, DeleteModelsByIdData, DeleteModelsByIdErrors, DeleteModelsByIdResponses, DeleteModelsModelByModelIdData, DeleteModelsModelByModelIdErrors, DeleteModelsModelByModelIdResponses, DeleteProvidersByIdData, DeleteProvidersByIdErrors, DeleteProvidersByIdResponses, DeleteSearchProvidersByIdData, DeleteSearchProvidersByIdErrors, DeleteSearchProvidersByIdResponses, GetBotsByBotIdContainerData, GetBotsByBotIdContainerErrors, GetBotsByBotIdContainerResponses, GetBotsByBotIdContainerSkillsData, GetBotsByBotIdContainerSkillsErrors, GetBotsByBotIdContainerSkillsResponses, GetBotsByBotIdContainerSnapshotsData, GetBotsByBotIdContainerSnapshotsResponses, GetBotsByBotIdMcpByIdData, GetBotsByBotIdMcpByIdErrors, GetBotsByBotIdMcpByIdResponses, GetBotsByBotIdMcpData, GetBotsByBotIdMcpErrors, GetBotsByBotIdMcpExportData, GetBotsByBotIdMcpExportErrors, GetBotsByBotIdMcpExportResponses, GetBotsByBotIdMcpResponses, GetBotsByBotIdMemoryData, GetBotsByBotIdMemoryErrors, GetBotsByBotIdMemoryResponses, GetBotsByBotIdMemoryUsageData, GetBotsByBotIdMemoryUsageErrors, GetBotsByBotIdMemoryUsageResponses, GetBotsByBotIdMessagesData, GetBotsByBotIdMessagesErrors, GetBotsByBotIdMessagesResponses, GetBotsByBotIdScheduleByIdData, GetBotsByBotIdScheduleByIdErrors, GetBotsByBotIdScheduleByIdResponses, GetBotsByBotIdScheduleData, GetBotsByBotIdScheduleErrors, GetBotsByBotIdScheduleResponses, GetBotsByBotIdSettingsData, GetBotsByBotIdSettingsErrors, GetBotsByBotIdSettingsResponses, GetBotsByBotIdSubagentsByIdContextData, GetBotsByBotIdSubagentsByIdContextErrors, GetBotsByBotIdSubagentsByIdContextResponses, GetBotsByBotIdSubagentsByIdData, GetBotsByBotIdSubagentsByIdErrors, GetBotsByBotIdSubagentsByIdResponses, GetBotsByBotIdSubagentsByIdSkillsData, GetBotsByBotIdSubagentsByIdSkillsErrors, GetBotsByBotIdSubagentsByIdSkillsResponses, GetBotsByBotIdSubagentsData, GetBotsByBotIdSubagentsErrors, GetBotsByBotIdSubagentsResponses, GetBotsByIdChannelByPlatformData, GetBotsByIdChannelByPlatformErrors, GetBotsByIdChannelByPlatformResponses, GetBotsByIdChecksData, GetBotsByIdChecksErrors, GetBotsByIdChecksKeysData, GetBotsByIdChecksKeysResponses, GetBotsByIdChecksResponses, GetBotsByIdChecksRunByKeyData, GetBotsByIdChecksRunByKeyResponses, GetBotsByIdData, GetBotsByIdErrors, GetBotsByIdMembersData, GetBotsByIdMembersErrors, GetBotsByIdMembersResponses, GetBotsByIdResponses, GetBotsData, GetBotsErrors, GetBotsResponses, GetChannelsByPlatformData, GetChannelsByPlatformErrors, GetChannelsByPlatformResponses, GetChannelsData, GetChannelsErrors, GetChannelsResponses, GetModelsByIdData, GetModelsByIdErrors, GetModelsByIdResponses, GetModelsCountData, GetModelsCountErrors, GetModelsCountResponses, GetModelsData, GetModelsErrors, GetModelsModelByModelIdData, GetModelsModelByModelIdErrors, GetModelsModelByModelIdResponses, GetModelsResponses, GetProvidersByIdData, GetProvidersByIdErrors, GetProvidersByIdModelsData, GetProvidersByIdModelsErrors, GetProvidersByIdModelsResponses, GetProvidersByIdResponses, GetProvidersCountData, GetProvidersCountErrors, GetProvidersCountResponses, GetProvidersData, GetProvidersErrors, GetProvidersNameByNameData, GetProvidersNameByNameErrors, GetProvidersNameByNameResponses, GetProvidersResponses, GetSearchProvidersByIdData, GetSearchProvidersByIdErrors, GetSearchProvidersByIdResponses, GetSearchProvidersData, GetSearchProvidersErrors, GetSearchProvidersMetaData, GetSearchProvidersMetaResponses, GetSearchProvidersResponses, GetUsersByIdData, GetUsersByIdErrors, GetUsersByIdResponses, GetUsersData, GetUsersErrors, GetUsersMeChannelsByPlatformData, GetUsersMeChannelsByPlatformErrors, GetUsersMeChannelsByPlatformResponses, GetUsersMeData, GetUsersMeErrors, GetUsersMeIdentitiesData, GetUsersMeIdentitiesErrors, GetUsersMeIdentitiesResponses, GetUsersMeResponses, GetUsersResponses, PostAuthLoginData, PostAuthLoginErrors, PostAuthLoginResponses, PostBotsByBotIdContainerData, PostBotsByBotIdContainerErrors, PostBotsByBotIdContainerResponses, PostBotsByBotIdContainerSkillsData, PostBotsByBotIdContainerSkillsErrors, PostBotsByBotIdContainerSkillsResponses, PostBotsByBotIdContainerSnapshotsData, PostBotsByBotIdContainerSnapshotsErrors, PostBotsByBotIdContainerSnapshotsResponses, PostBotsByBotIdContainerStartData, PostBotsByBotIdContainerStartErrors, PostBotsByBotIdContainerStartResponses, PostBotsByBotIdContainerStopData, PostBotsByBotIdContainerStopErrors, PostBotsByBotIdContainerStopResponses, PostBotsByBotIdMcpData, PostBotsByBotIdMcpErrors, PostBotsByBotIdMcpOpsBatchDeleteData, PostBotsByBotIdMcpOpsBatchDeleteErrors, PostBotsByBotIdMcpOpsBatchDeleteResponses, PostBotsByBotIdMcpResponses, PostBotsByBotIdMcpStdioByConnectionIdData, PostBotsByBotIdMcpStdioByConnectionIdErrors, PostBotsByBotIdMcpStdioByConnectionIdResponses, PostBotsByBotIdMcpStdioData, PostBotsByBotIdMcpStdioErrors, PostBotsByBotIdMcpStdioResponses, PostBotsByBotIdMemoryCompactData, PostBotsByBotIdMemoryCompactErrors, PostBotsByBotIdMemoryCompactResponses, PostBotsByBotIdMemoryData, PostBotsByBotIdMemoryErrors, PostBotsByBotIdMemoryRebuildData, PostBotsByBotIdMemoryRebuildErrors, PostBotsByBotIdMemoryRebuildResponses, PostBotsByBotIdMemoryResponses, PostBotsByBotIdMemorySearchData, PostBotsByBotIdMemorySearchErrors, PostBotsByBotIdMemorySearchResponses, PostBotsByBotIdScheduleData, PostBotsByBotIdScheduleErrors, PostBotsByBotIdScheduleResponses, PostBotsByBotIdSettingsData, PostBotsByBotIdSettingsErrors, PostBotsByBotIdSettingsResponses, PostBotsByBotIdSubagentsByIdSkillsData, PostBotsByBotIdSubagentsByIdSkillsErrors, PostBotsByBotIdSubagentsByIdSkillsResponses, PostBotsByBotIdSubagentsData, PostBotsByBotIdSubagentsErrors, PostBotsByBotIdSubagentsResponses, PostBotsByBotIdToolsData, PostBotsByBotIdToolsErrors, PostBotsByBotIdToolsResponses, PostBotsByIdChannelByPlatformSendChatData, PostBotsByIdChannelByPlatformSendChatErrors, PostBotsByIdChannelByPlatformSendChatResponses, PostBotsByIdChannelByPlatformSendData, PostBotsByIdChannelByPlatformSendErrors, PostBotsByIdChannelByPlatformSendResponses, PostBotsData, PostBotsErrors, PostBotsResponses, PostEmbeddingsData, PostEmbeddingsErrors, PostEmbeddingsResponses, PostModelsData, PostModelsErrors, PostModelsResponses, PostProvidersData, PostProvidersErrors, PostProvidersResponses, PostSearchProvidersData, PostSearchProvidersErrors, PostSearchProvidersResponses, PostUsersData, PostUsersErrors, PostUsersResponses, PutBotsByBotIdMcpByIdData, PutBotsByBotIdMcpByIdErrors, PutBotsByBotIdMcpByIdResponses, PutBotsByBotIdMcpImportData, PutBotsByBotIdMcpImportErrors, PutBotsByBotIdMcpImportResponses, PutBotsByBotIdScheduleByIdData, PutBotsByBotIdScheduleByIdErrors, PutBotsByBotIdScheduleByIdResponses, PutBotsByBotIdSettingsData, PutBotsByBotIdSettingsErrors, PutBotsByBotIdSettingsResponses, PutBotsByBotIdSubagentsByIdContextData, PutBotsByBotIdSubagentsByIdContextErrors, PutBotsByBotIdSubagentsByIdContextResponses, PutBotsByBotIdSubagentsByIdData, PutBotsByBotIdSubagentsByIdErrors, PutBotsByBotIdSubagentsByIdResponses, PutBotsByBotIdSubagentsByIdSkillsData, PutBotsByBotIdSubagentsByIdSkillsErrors, PutBotsByBotIdSubagentsByIdSkillsResponses, PutBotsByIdChannelByPlatformData, PutBotsByIdChannelByPlatformErrors, PutBotsByIdChannelByPlatformResponses, PutBotsByIdData, PutBotsByIdErrors, PutBotsByIdMembersData, PutBotsByIdMembersErrors, PutBotsByIdMembersResponses, PutBotsByIdOwnerData, PutBotsByIdOwnerErrors, PutBotsByIdOwnerResponses, PutBotsByIdResponses, PutModelsByIdData, PutModelsByIdErrors, PutModelsByIdResponses, PutModelsModelByModelIdData, PutModelsModelByModelIdErrors, PutModelsModelByModelIdResponses, PutProvidersByIdData, PutProvidersByIdErrors, PutProvidersByIdResponses, PutSearchProvidersByIdData, PutSearchProvidersByIdErrors, PutSearchProvidersByIdResponses, PutUsersByIdData, PutUsersByIdErrors, PutUsersByIdPasswordData, PutUsersByIdPasswordErrors, PutUsersByIdPasswordResponses, PutUsersByIdResponses, PutUsersMeChannelsByPlatformData, PutUsersMeChannelsByPlatformErrors, PutUsersMeChannelsByPlatformResponses, PutUsersMeData, PutUsersMeErrors, PutUsersMePasswordData, PutUsersMePasswordErrors, PutUsersMePasswordResponses, PutUsersMeResponses } from './types.gen'; +import type { DeleteBotsByBotIdContainerData, DeleteBotsByBotIdContainerErrors, DeleteBotsByBotIdContainerResponses, DeleteBotsByBotIdContainerSkillsData, DeleteBotsByBotIdContainerSkillsErrors, DeleteBotsByBotIdContainerSkillsResponses, DeleteBotsByBotIdMcpByIdData, DeleteBotsByBotIdMcpByIdErrors, DeleteBotsByBotIdMcpByIdResponses, DeleteBotsByBotIdMemoryByIdData, DeleteBotsByBotIdMemoryByIdErrors, DeleteBotsByBotIdMemoryByIdResponses, DeleteBotsByBotIdMemoryData, DeleteBotsByBotIdMemoryErrors, DeleteBotsByBotIdMemoryResponses, DeleteBotsByBotIdScheduleByIdData, DeleteBotsByBotIdScheduleByIdErrors, DeleteBotsByBotIdScheduleByIdResponses, DeleteBotsByBotIdSettingsData, DeleteBotsByBotIdSettingsErrors, DeleteBotsByBotIdSettingsResponses, DeleteBotsByBotIdSubagentsByIdData, DeleteBotsByBotIdSubagentsByIdErrors, DeleteBotsByBotIdSubagentsByIdResponses, DeleteBotsByIdChannelByPlatformData, DeleteBotsByIdChannelByPlatformErrors, DeleteBotsByIdChannelByPlatformResponses, DeleteBotsByIdData, DeleteBotsByIdErrors, DeleteBotsByIdMembersByUserIdData, DeleteBotsByIdMembersByUserIdErrors, DeleteBotsByIdMembersByUserIdResponses, DeleteBotsByIdResponses, DeleteModelsByIdData, DeleteModelsByIdErrors, DeleteModelsByIdResponses, DeleteModelsModelByModelIdData, DeleteModelsModelByModelIdErrors, DeleteModelsModelByModelIdResponses, DeleteProvidersByIdData, DeleteProvidersByIdErrors, DeleteProvidersByIdResponses, DeleteSearchProvidersByIdData, DeleteSearchProvidersByIdErrors, DeleteSearchProvidersByIdResponses, GetBotsByBotIdContainerData, GetBotsByBotIdContainerErrors, GetBotsByBotIdContainerResponses, GetBotsByBotIdContainerSkillsData, GetBotsByBotIdContainerSkillsErrors, GetBotsByBotIdContainerSkillsResponses, GetBotsByBotIdContainerSnapshotsData, GetBotsByBotIdContainerSnapshotsResponses, GetBotsByBotIdMcpByIdData, GetBotsByBotIdMcpByIdErrors, GetBotsByBotIdMcpByIdResponses, GetBotsByBotIdMcpData, GetBotsByBotIdMcpErrors, GetBotsByBotIdMcpExportData, GetBotsByBotIdMcpExportErrors, GetBotsByBotIdMcpExportResponses, GetBotsByBotIdMcpResponses, GetBotsByBotIdMemoryData, GetBotsByBotIdMemoryErrors, GetBotsByBotIdMemoryResponses, GetBotsByBotIdMemoryUsageData, GetBotsByBotIdMemoryUsageErrors, GetBotsByBotIdMemoryUsageResponses, GetBotsByBotIdMessagesData, GetBotsByBotIdMessagesErrors, GetBotsByBotIdMessagesResponses, GetBotsByBotIdScheduleByIdData, GetBotsByBotIdScheduleByIdErrors, GetBotsByBotIdScheduleByIdResponses, GetBotsByBotIdScheduleData, GetBotsByBotIdScheduleErrors, GetBotsByBotIdScheduleResponses, GetBotsByBotIdSettingsData, GetBotsByBotIdSettingsErrors, GetBotsByBotIdSettingsResponses, GetBotsByBotIdSubagentsByIdContextData, GetBotsByBotIdSubagentsByIdContextErrors, GetBotsByBotIdSubagentsByIdContextResponses, GetBotsByBotIdSubagentsByIdData, GetBotsByBotIdSubagentsByIdErrors, GetBotsByBotIdSubagentsByIdResponses, GetBotsByBotIdSubagentsByIdSkillsData, GetBotsByBotIdSubagentsByIdSkillsErrors, GetBotsByBotIdSubagentsByIdSkillsResponses, GetBotsByBotIdSubagentsData, GetBotsByBotIdSubagentsErrors, GetBotsByBotIdSubagentsResponses, GetBotsByIdChannelByPlatformData, GetBotsByIdChannelByPlatformErrors, GetBotsByIdChannelByPlatformResponses, GetBotsByIdChecksData, GetBotsByIdChecksErrors, GetBotsByIdChecksResponses, GetBotsByIdData, GetBotsByIdErrors, GetBotsByIdMembersData, GetBotsByIdMembersErrors, GetBotsByIdMembersResponses, GetBotsByIdResponses, GetBotsData, GetBotsErrors, GetBotsResponses, GetChannelsByPlatformData, GetChannelsByPlatformErrors, GetChannelsByPlatformResponses, GetChannelsData, GetChannelsErrors, GetChannelsResponses, GetModelsByIdData, GetModelsByIdErrors, GetModelsByIdResponses, GetModelsCountData, GetModelsCountErrors, GetModelsCountResponses, GetModelsData, GetModelsErrors, GetModelsModelByModelIdData, GetModelsModelByModelIdErrors, GetModelsModelByModelIdResponses, GetModelsResponses, GetProvidersByIdData, GetProvidersByIdErrors, GetProvidersByIdModelsData, GetProvidersByIdModelsErrors, GetProvidersByIdModelsResponses, GetProvidersByIdResponses, GetProvidersCountData, GetProvidersCountErrors, GetProvidersCountResponses, GetProvidersData, GetProvidersErrors, GetProvidersNameByNameData, GetProvidersNameByNameErrors, GetProvidersNameByNameResponses, GetProvidersResponses, GetSearchProvidersByIdData, GetSearchProvidersByIdErrors, GetSearchProvidersByIdResponses, GetSearchProvidersData, GetSearchProvidersErrors, GetSearchProvidersMetaData, GetSearchProvidersMetaResponses, GetSearchProvidersResponses, GetUsersByIdData, GetUsersByIdErrors, GetUsersByIdResponses, GetUsersData, GetUsersErrors, GetUsersMeChannelsByPlatformData, GetUsersMeChannelsByPlatformErrors, GetUsersMeChannelsByPlatformResponses, GetUsersMeData, GetUsersMeErrors, GetUsersMeIdentitiesData, GetUsersMeIdentitiesErrors, GetUsersMeIdentitiesResponses, GetUsersMeResponses, GetUsersResponses, PatchBotsByIdChannelByPlatformStatusData, PatchBotsByIdChannelByPlatformStatusErrors, PatchBotsByIdChannelByPlatformStatusResponses, PostAuthLoginData, PostAuthLoginErrors, PostAuthLoginResponses, PostBotsByBotIdContainerData, PostBotsByBotIdContainerErrors, PostBotsByBotIdContainerResponses, PostBotsByBotIdContainerSkillsData, PostBotsByBotIdContainerSkillsErrors, PostBotsByBotIdContainerSkillsResponses, PostBotsByBotIdContainerSnapshotsData, PostBotsByBotIdContainerSnapshotsErrors, PostBotsByBotIdContainerSnapshotsResponses, PostBotsByBotIdContainerStartData, PostBotsByBotIdContainerStartErrors, PostBotsByBotIdContainerStartResponses, PostBotsByBotIdContainerStopData, PostBotsByBotIdContainerStopErrors, PostBotsByBotIdContainerStopResponses, PostBotsByBotIdMcpData, PostBotsByBotIdMcpErrors, PostBotsByBotIdMcpOpsBatchDeleteData, PostBotsByBotIdMcpOpsBatchDeleteErrors, PostBotsByBotIdMcpOpsBatchDeleteResponses, PostBotsByBotIdMcpResponses, PostBotsByBotIdMcpStdioByConnectionIdData, PostBotsByBotIdMcpStdioByConnectionIdErrors, PostBotsByBotIdMcpStdioByConnectionIdResponses, PostBotsByBotIdMcpStdioData, PostBotsByBotIdMcpStdioErrors, PostBotsByBotIdMcpStdioResponses, PostBotsByBotIdMemoryCompactData, PostBotsByBotIdMemoryCompactErrors, PostBotsByBotIdMemoryCompactResponses, PostBotsByBotIdMemoryData, PostBotsByBotIdMemoryErrors, PostBotsByBotIdMemoryRebuildData, PostBotsByBotIdMemoryRebuildErrors, PostBotsByBotIdMemoryRebuildResponses, PostBotsByBotIdMemoryResponses, PostBotsByBotIdMemorySearchData, PostBotsByBotIdMemorySearchErrors, PostBotsByBotIdMemorySearchResponses, PostBotsByBotIdScheduleData, PostBotsByBotIdScheduleErrors, PostBotsByBotIdScheduleResponses, PostBotsByBotIdSettingsData, PostBotsByBotIdSettingsErrors, PostBotsByBotIdSettingsResponses, PostBotsByBotIdSubagentsByIdSkillsData, PostBotsByBotIdSubagentsByIdSkillsErrors, PostBotsByBotIdSubagentsByIdSkillsResponses, PostBotsByBotIdSubagentsData, PostBotsByBotIdSubagentsErrors, PostBotsByBotIdSubagentsResponses, PostBotsByBotIdToolsData, PostBotsByBotIdToolsErrors, PostBotsByBotIdToolsResponses, PostBotsByIdChannelByPlatformSendChatData, PostBotsByIdChannelByPlatformSendChatErrors, PostBotsByIdChannelByPlatformSendChatResponses, PostBotsByIdChannelByPlatformSendData, PostBotsByIdChannelByPlatformSendErrors, PostBotsByIdChannelByPlatformSendResponses, PostBotsData, PostBotsErrors, PostBotsResponses, PostEmbeddingsData, PostEmbeddingsErrors, PostEmbeddingsResponses, PostModelsData, PostModelsErrors, PostModelsResponses, PostProvidersData, PostProvidersErrors, PostProvidersResponses, PostSearchProvidersData, PostSearchProvidersErrors, PostSearchProvidersResponses, PostUsersData, PostUsersErrors, PostUsersResponses, PutBotsByBotIdMcpByIdData, PutBotsByBotIdMcpByIdErrors, PutBotsByBotIdMcpByIdResponses, PutBotsByBotIdMcpImportData, PutBotsByBotIdMcpImportErrors, PutBotsByBotIdMcpImportResponses, PutBotsByBotIdScheduleByIdData, PutBotsByBotIdScheduleByIdErrors, PutBotsByBotIdScheduleByIdResponses, PutBotsByBotIdSettingsData, PutBotsByBotIdSettingsErrors, PutBotsByBotIdSettingsResponses, PutBotsByBotIdSubagentsByIdContextData, PutBotsByBotIdSubagentsByIdContextErrors, PutBotsByBotIdSubagentsByIdContextResponses, PutBotsByBotIdSubagentsByIdData, PutBotsByBotIdSubagentsByIdErrors, PutBotsByBotIdSubagentsByIdResponses, PutBotsByBotIdSubagentsByIdSkillsData, PutBotsByBotIdSubagentsByIdSkillsErrors, PutBotsByBotIdSubagentsByIdSkillsResponses, PutBotsByIdChannelByPlatformData, PutBotsByIdChannelByPlatformErrors, PutBotsByIdChannelByPlatformResponses, PutBotsByIdData, PutBotsByIdErrors, PutBotsByIdMembersData, PutBotsByIdMembersErrors, PutBotsByIdMembersResponses, PutBotsByIdOwnerData, PutBotsByIdOwnerErrors, PutBotsByIdOwnerResponses, PutBotsByIdResponses, PutModelsByIdData, PutModelsByIdErrors, PutModelsByIdResponses, PutModelsModelByModelIdData, PutModelsModelByModelIdErrors, PutModelsModelByModelIdResponses, PutProvidersByIdData, PutProvidersByIdErrors, PutProvidersByIdResponses, PutSearchProvidersByIdData, PutSearchProvidersByIdErrors, PutSearchProvidersByIdResponses, PutUsersByIdData, PutUsersByIdErrors, PutUsersByIdPasswordData, PutUsersByIdPasswordErrors, PutUsersByIdPasswordResponses, PutUsersByIdResponses, PutUsersMeChannelsByPlatformData, PutUsersMeChannelsByPlatformErrors, PutUsersMeChannelsByPlatformResponses, PutUsersMeData, PutUsersMeErrors, PutUsersMePasswordData, PutUsersMePasswordErrors, PutUsersMePasswordResponses, PutUsersMeResponses } from './types.gen'; export type Options = Options2 & { /** @@ -579,6 +579,13 @@ export const putBotsById = (options: Optio } }); +/** + * Delete bot channel config + * + * Remove bot channel configuration + */ +export const deleteBotsByIdChannelByPlatform = (options: Options) => (options.client ?? client).delete({ url: '/bots/{id}/channel/{platform}', ...options }); + /** * Get bot channel config * @@ -628,6 +635,20 @@ export const postBotsByIdChannelByPlatformSendChat = (options: Options) => (options.client ?? client).patch({ + url: '/bots/{id}/channel/{platform}/status', + ...options, + headers: { + 'Content-Type': 'application/json', + ...options.headers + } +}); + /** * List bot runtime checks * @@ -635,20 +656,6 @@ export const postBotsByIdChannelByPlatformSendChat = (options: Options) => (options.client ?? client).get({ url: '/bots/{id}/checks', ...options }); -/** - * List available check keys - * - * Returns all check keys available for a bot (builtin + MCP connections) - */ -export const getBotsByIdChecksKeys = (options: Options) => (options.client ?? client).get({ url: '/bots/{id}/checks/keys', ...options }); - -/** - * Run a single bot check - * - * Evaluate one check key for a bot - */ -export const getBotsByIdChecksRunByKey = (options: Options) => (options.client ?? client).get({ url: '/bots/{id}/checks/run/{key}', ...options }); - /** * List bot members * diff --git a/packages/sdk/src/types.gen.ts b/packages/sdk/src/types.gen.ts index 9c0acbde..7aab5383 100644 --- a/packages/sdk/src/types.gen.ts +++ b/packages/sdk/src/types.gen.ts @@ -71,13 +71,16 @@ export type BotsBot = { }; export type BotsBotCheck = { - check_key?: string; detail?: string; + id?: string; metadata?: { [key: string]: unknown; }; status?: string; + subtitle?: string; summary?: string; + title_key?: string; + type?: string; }; export type BotsBotMember = { @@ -101,10 +104,6 @@ export type BotsListBotsResponse = { items?: Array; }; -export type BotsListCheckKeysResponse = { - keys?: Array; -}; - export type BotsListChecksResponse = { items?: Array; }; @@ -139,6 +138,11 @@ export type ChannelAction = { }; export type ChannelAttachment = { + asset_id?: string; + /** + * data URL for agent delivery + */ + base64?: string; caption?: string; duration_ms?: number; height?: number; @@ -184,6 +188,7 @@ export type ChannelChannelConfig = { credentials?: { [key: string]: unknown; }; + disabled?: boolean; external_identity?: string; id?: string; routing?: { @@ -192,7 +197,6 @@ export type ChannelChannelConfig = { self_identity?: { [key: string]: unknown; }; - status?: string; updated_at?: string; verified_at?: string; }; @@ -284,6 +288,10 @@ export type ChannelThreadRef = { id?: string; }; +export type ChannelUpdateChannelStatusRequest = { + disabled?: boolean; +}; + export type ChannelUpsertChannelIdentityConfigRequest = { config?: { [key: string]: unknown; @@ -294,6 +302,7 @@ export type ChannelUpsertConfigRequest = { credentials?: { [key: string]: unknown; }; + disabled?: boolean; external_identity?: string; routing?: { [key: string]: unknown; @@ -301,7 +310,6 @@ export type ChannelUpsertConfigRequest = { self_identity?: { [key: string]: unknown; }; - status?: string; verified_at?: string; }; @@ -648,6 +656,7 @@ export type MemoryUsageResponse = { }; export type MessageMessage = { + assets?: Array; bot_id?: string; content?: Array; created_at?: string; @@ -666,10 +675,23 @@ export type MessageMessage = { source_reply_to_message_id?: string; }; +export type MessageMessageAsset = { + asset_id?: string; + duration_ms?: number; + height?: number; + media_type?: string; + mime?: string; + ordinal?: number; + original_name?: string; + role?: string; + size_bytes?: number; + storage_key?: string; + width?: number; +}; + export type ModelsAddRequest = { dimensions?: number; - input?: Array; - is_multimodal?: boolean; + input_modalities?: Array; llm_provider_id?: string; model_id?: string; name?: string; @@ -687,8 +709,7 @@ export type ModelsCountResponse = { export type ModelsGetResponse = { dimensions?: number; - input?: Array; - is_multimodal?: boolean; + input_modalities?: Array; llm_provider_id?: string; model_id?: string; name?: string; @@ -699,8 +720,7 @@ export type ModelsModelType = 'chat' | 'embedding'; export type ModelsUpdateRequest = { dimensions?: number; - input?: Array; - is_multimodal?: boolean; + input_modalities?: Array; llm_provider_id?: string; model_id?: string; name?: string; @@ -3040,6 +3060,46 @@ export type PutBotsByIdResponses = { export type PutBotsByIdResponse = PutBotsByIdResponses[keyof PutBotsByIdResponses]; +export type DeleteBotsByIdChannelByPlatformData = { + body?: never; + path: { + /** + * Bot ID + */ + id: string; + /** + * Channel platform + */ + platform: string; + }; + query?: never; + url: '/bots/{id}/channel/{platform}'; +}; + +export type DeleteBotsByIdChannelByPlatformErrors = { + /** + * Bad Request + */ + 400: HandlersErrorResponse; + /** + * Forbidden + */ + 403: HandlersErrorResponse; + /** + * Internal Server Error + */ + 500: HandlersErrorResponse; +}; + +export type DeleteBotsByIdChannelByPlatformError = DeleteBotsByIdChannelByPlatformErrors[keyof DeleteBotsByIdChannelByPlatformErrors]; + +export type DeleteBotsByIdChannelByPlatformResponses = { + /** + * No Content + */ + 204: unknown; +}; + export type GetBotsByIdChannelByPlatformData = { body?: never; path: { @@ -3237,6 +3297,55 @@ export type PostBotsByIdChannelByPlatformSendChatResponses = { export type PostBotsByIdChannelByPlatformSendChatResponse = PostBotsByIdChannelByPlatformSendChatResponses[keyof PostBotsByIdChannelByPlatformSendChatResponses]; +export type PatchBotsByIdChannelByPlatformStatusData = { + /** + * Channel status payload + */ + body: ChannelUpdateChannelStatusRequest; + path: { + /** + * Bot ID + */ + id: string; + /** + * Channel platform + */ + platform: string; + }; + query?: never; + url: '/bots/{id}/channel/{platform}/status'; +}; + +export type PatchBotsByIdChannelByPlatformStatusErrors = { + /** + * Bad Request + */ + 400: HandlersErrorResponse; + /** + * Forbidden + */ + 403: HandlersErrorResponse; + /** + * Not Found + */ + 404: HandlersErrorResponse; + /** + * Internal Server Error + */ + 500: HandlersErrorResponse; +}; + +export type PatchBotsByIdChannelByPlatformStatusError = PatchBotsByIdChannelByPlatformStatusErrors[keyof PatchBotsByIdChannelByPlatformStatusErrors]; + +export type PatchBotsByIdChannelByPlatformStatusResponses = { + /** + * OK + */ + 200: ChannelChannelConfig; +}; + +export type PatchBotsByIdChannelByPlatformStatusResponse = PatchBotsByIdChannelByPlatformStatusResponses[keyof PatchBotsByIdChannelByPlatformStatusResponses]; + export type GetBotsByIdChecksData = { body?: never; path: { @@ -3279,52 +3388,6 @@ export type GetBotsByIdChecksResponses = { export type GetBotsByIdChecksResponse = GetBotsByIdChecksResponses[keyof GetBotsByIdChecksResponses]; -export type GetBotsByIdChecksKeysData = { - body?: never; - path: { - /** - * Bot ID - */ - id: string; - }; - query?: never; - url: '/bots/{id}/checks/keys'; -}; - -export type GetBotsByIdChecksKeysResponses = { - /** - * OK - */ - 200: BotsListCheckKeysResponse; -}; - -export type GetBotsByIdChecksKeysResponse = GetBotsByIdChecksKeysResponses[keyof GetBotsByIdChecksKeysResponses]; - -export type GetBotsByIdChecksRunByKeyData = { - body?: never; - path: { - /** - * Bot ID - */ - id: string; - /** - * Check key - */ - key: string; - }; - query?: never; - url: '/bots/{id}/checks/run/{key}'; -}; - -export type GetBotsByIdChecksRunByKeyResponses = { - /** - * OK - */ - 200: BotsBotCheck; -}; - -export type GetBotsByIdChecksRunByKeyResponse = GetBotsByIdChecksRunByKeyResponses[keyof GetBotsByIdChecksRunByKeyResponses]; - export type GetBotsByIdMembersData = { body?: never; path: { diff --git a/packages/web/src/components/create-model/index.vue b/packages/web/src/components/create-model/index.vue index 06cd4721..841d7ae5 100644 --- a/packages/web/src/components/create-model/index.vue +++ b/packages/web/src/components/create-model/index.vue @@ -105,22 +105,26 @@ - - - - - - - + +
+ +
+ +
+
@@ -163,7 +167,7 @@ import { SelectTrigger, SelectValue, FormItem, - Switch, + Checkbox, Separator, Label, Spinner, @@ -176,12 +180,14 @@ import { useMutation, useQueryCache } from '@pinia/colada' import { postModels, putModelsModelByModelId } from '@memoh/sdk' import type { ModelsGetResponse } from '@memoh/sdk' +const availableInputModalities = ['text', 'image', 'audio', 'video', 'file'] as const +const selectedModalities = ref(['text']) + const formSchema = toTypedSchema(z.object({ type: z.string().min(1), model_id: z.string().min(1), name: z.string().optional(), dimensions: z.coerce.number().min(1).optional(), - is_multimodal: z.coerce.boolean().optional(), })) const form = useForm({ @@ -202,13 +208,19 @@ const canSubmit = computed(() => { return !!type && !!model_id }) -// 新建时的空值 +function toggleModality(mod: string, checked: boolean) { + if (checked) { + selectedModalities.value = [...selectedModalities.value, mod] + } else { + selectedModalities.value = selectedModalities.value.filter(m => m !== mod) + } +} + const emptyValues = { type: '' as string, model_id: '' as string, name: '' as string, dimensions: undefined as number | undefined, - is_multimodal: undefined as boolean | undefined, } // Display Name 自动跟随 Model ID,除非用户主动修改过 @@ -263,7 +275,6 @@ async function addModel(e: Event) { const model_id = form.values.model_id || (isEdit ? fallback!.model_id : '') const name = form.values.name ?? (isEdit ? fallback!.name : '') const dimensions = form.values.dimensions ?? (isEdit ? fallback!.dimensions : undefined) - const is_multimodal = form.values.is_multimodal ?? (isEdit ? fallback!.is_multimodal : undefined) if (!type || !model_id) return @@ -283,7 +294,7 @@ async function addModel(e: Event) { } if (type === 'chat') { - payload.is_multimodal = is_multimodal ?? false + payload.input_modalities = selectedModalities.value.length > 0 ? selectedModalities.value : ['text'] } if (isEdit) { @@ -308,13 +319,13 @@ watch(open, async () => { await nextTick() if (editInfo?.value) { - const { type, model_id, name, dimensions, is_multimodal } = editInfo.value - form.resetForm({ values: { type, model_id, name, dimensions, is_multimodal } }) - // 编辑时,如果已有 name 且与 model_id 不同,视为用户自定义 + const { type, model_id, name, dimensions, input_modalities } = editInfo.value + form.resetForm({ values: { type, model_id, name, dimensions } }) + selectedModalities.value = input_modalities ?? ['text'] userEditedName.value = !!(name && name !== model_id) } else { - // 新建模式:显式传空值,避免复用上次编辑数据 form.resetForm({ values: { ...emptyValues } }) + selectedModalities.value = ['text'] userEditedName.value = false } }, { diff --git a/packages/web/src/composables/api/useChat.ts b/packages/web/src/composables/api/useChat.ts index 811db359..a6224580 100644 --- a/packages/web/src/composables/api/useChat.ts +++ b/packages/web/src/composables/api/useChat.ts @@ -18,6 +18,20 @@ export interface ChatSummary { last_observed_at?: string } +export interface MessageAsset { + asset_id: string + role: string + ordinal: number + media_type: string + mime: string + size_bytes: number + storage_key: string + original_name?: string + width?: number + height?: number + duration_ms?: number +} + export interface Message { id: string bot_id: string @@ -32,6 +46,7 @@ export interface Message { role: string content?: unknown metadata?: Record + assets?: MessageAsset[] created_at?: string } @@ -40,13 +55,16 @@ export interface StreamEvent { | 'text_start' | 'text_delta' | 'text_end' | 'reasoning_start' | 'reasoning_delta' | 'reasoning_end' | 'tool_call_start' | 'tool_call_end' + | 'attachment_delta' | 'agent_start' | 'agent_end' | 'processing_started' | 'processing_completed' | 'processing_failed' | 'error' delta?: string + toolCallId?: string toolName?: string input?: unknown result?: unknown + attachments?: Array> error?: string message?: string [key: string]: unknown @@ -199,6 +217,13 @@ export async function fetchMessages( * Stream a chat message via SSE. Sends parsed StreamEvents to onEvent callback. * Returns an abort function. */ +export interface ChatAttachment { + type: string + base64: string + mime?: string + name?: string +} + export function streamMessage( botId: string, _chatId: string, @@ -206,15 +231,20 @@ export function streamMessage( onEvent: StreamEventHandler, onDone: () => void, onError: (err: Error) => void, + attachments?: ChatAttachment[], ): () => void { const controller = new AbortController() ;(async () => { try { + const reqBody: Record = { query: text, current_channel: 'web', channels: ['web'] } + if (attachments?.length) { + reqBody.attachments = attachments + } const { data: body } = await client.post({ url: '/bots/{bot_id}/messages/stream', path: { bot_id: botId }, - body: { query: text, current_channel: 'web', channels: ['web'] }, + body: reqBody, parseAs: 'stream', signal: controller.signal, throwOnError: true, diff --git a/packages/web/src/i18n/locales/en.json b/packages/web/src/i18n/locales/en.json index 0821054d..b9e2fe4d 100644 --- a/packages/web/src/i18n/locales/en.json +++ b/packages/web/src/i18n/locales/en.json @@ -149,7 +149,15 @@ "displayNamePlaceholder": "Custom display name", "dimensions": "Dimensions", "dimensionsPlaceholder": "e.g. 1536", - "multimodal": "Multimodal" + "multimodal": "Multimodal", + "inputModalities": "Input Modalities", + "modality": { + "text": "Text", + "image": "Image", + "audio": "Audio", + "video": "Video", + "file": "File" + } }, "provider": { "add": "Add Provider", @@ -250,13 +258,22 @@ }, "checks": { "title": "Runtime Checks", - "subtitle": "Resource health is evaluated at request time.", + "subtitle": "View current health status and issue details.", "ok": "No issues", "hasIssue": "Has issues", "issueCount": "{count} issues", "empty": "No check items", "loadFailed": "Failed to load runtime checks", "actions": {}, + "titles": { + "containerInit": "Container initialization", + "containerRecord": "Container record", + "containerTask": "Container task", + "containerDataPath": "Container data path", + "botDelete": "Bot deletion", + "mcpConnection": "MCP connection", + "channelConnection": "Channel connection" + }, "keys": { "containerInit": "Container initialization", "containerRecord": "Container record", @@ -363,7 +380,13 @@ "save": "Save Platform Configuration", "statusActive": "Active", "statusInactive": "Inactive", + "actionEnable": "Enable", + "actionDisable": "Disable", + "saveOnly": "Save only", + "saveAndEnable": "Save and enable", "deleteConfirm": "Are you sure you want to remove this platform?", + "deleteSuccess": "Platform removed", + "deleteFailed": "Failed to remove platform", "noAvailableTypes": "All platform types have been configured", "types": { "feishu": "Feishu", diff --git a/packages/web/src/i18n/locales/zh.json b/packages/web/src/i18n/locales/zh.json index f1d363db..135279b5 100644 --- a/packages/web/src/i18n/locales/zh.json +++ b/packages/web/src/i18n/locales/zh.json @@ -145,7 +145,15 @@ "displayNamePlaceholder": "自定义显示名称", "dimensions": "向量维度", "dimensionsPlaceholder": "例如 1536", - "multimodal": "支持多模态" + "multimodal": "支持多模态", + "inputModalities": "输入模态", + "modality": { + "text": "文本", + "image": "图片", + "audio": "音频", + "video": "视频", + "file": "文件" + } }, "provider": { "add": "添加服务商", @@ -246,13 +254,22 @@ }, "checks": { "title": "运行时检查", - "subtitle": "附属资源健康状态在请求时实时检查。", + "subtitle": "查看当前健康状态与异常详情。", "ok": "无异常", "hasIssue": "存在异常", "issueCount": "{count} 个异常", "empty": "暂无检查项", "loadFailed": "加载运行时检查失败", "actions": {}, + "titles": { + "containerInit": "容器初始化", + "containerRecord": "容器记录", + "containerTask": "容器任务", + "containerDataPath": "容器数据路径", + "botDelete": "Bot 删除", + "mcpConnection": "MCP 连接", + "channelConnection": "平台连接" + }, "keys": { "containerInit": "容器初始化", "containerRecord": "容器记录", @@ -359,7 +376,13 @@ "save": "保存平台配置", "statusActive": "启用", "statusInactive": "停用", + "actionEnable": "启用", + "actionDisable": "停用", + "saveOnly": "仅保存", + "saveAndEnable": "立即启用", "deleteConfirm": "确定要移除这个平台吗?", + "deleteSuccess": "平台已移除", + "deleteFailed": "移除平台失败", "noAvailableTypes": "所有平台类型均已配置", "types": { "feishu": "飞书", diff --git a/packages/web/src/main.ts b/packages/web/src/main.ts index 4ddefeaf..85fdffb2 100644 --- a/packages/web/src/main.ts +++ b/packages/web/src/main.ts @@ -28,6 +28,7 @@ import { faCheck, faEye, faEyeSlash, + faChevronLeft, faChevronRight, faChevronDown, faEllipsisVertical, @@ -45,6 +46,12 @@ import { faBrain, faCopy, faCompress, + faPaperclip, + faXmark, + faImage, + faFile, + faMusic, + faVideo, } from '@fortawesome/free-solid-svg-icons' import { faRectangleList, @@ -66,6 +73,7 @@ library.add( faCheck, faEye, faEyeSlash, + faChevronLeft, faChevronRight, faChevronDown, faEllipsisVertical, @@ -83,6 +91,12 @@ library.add( faBrain, faCopy, faCompress, + faPaperclip, + faXmark, + faImage, + faFile, + faMusic, + faVideo, faRectangleList, faTrashCan, faComments, diff --git a/packages/web/src/pages/bots/components/bot-channels.vue b/packages/web/src/pages/bots/components/bot-channels.vue index e8c92192..ccab538a 100644 --- a/packages/web/src/pages/bots/components/bot-channels.vue +++ b/packages/web/src/pages/bots/components/bot-channels.vue @@ -51,7 +51,7 @@
{{ $t('bots.channels.statusActive') }} @@ -142,7 +142,7 @@ import { PopoverTrigger, PopoverContent, } from '@memoh/ui' -import { useQuery, useQueryCache } from '@pinia/colada' +import { useQuery } from '@pinia/colada' import { getChannels, getBotsByIdChannelByPlatform } from '@memoh/sdk' import type { HandlersChannelMeta, ChannelChannelConfig } from '@memoh/sdk' import ChannelSettingsPanel from './channel-settings-panel.vue' @@ -190,13 +190,13 @@ const addPopoverOpen = ref(false) const allChannels = computed(() => channels.value ?? []) const configuredChannels = computed(() => allChannels.value.filter((c) => c.configured)) + const unconfiguredChannels = computed(() => allChannels.value.filter((c) => !c.configured)) const selectedItem = computed(() => allChannels.value.find((c) => c.meta.type === selectedType.value) ?? null, ) -// 自动选中第一个已配置的渠道 watch(configuredChannels, (list) => { if (list.length > 0 && !selectedType.value) { selectedType.value = list[0].meta.type diff --git a/packages/web/src/pages/bots/components/channel-settings-panel.vue b/packages/web/src/pages/bots/components/channel-settings-panel.vue index 4c16fdb7..7dd9c1a8 100644 --- a/packages/web/src/pages/bots/components/channel-settings-panel.vue +++ b/packages/web/src/pages/bots/components/channel-settings-panel.vue @@ -10,9 +10,41 @@ {{ channelItem.meta.type }}

- - {{ channelItem.configured ? $t('bots.channels.configured') : $t('bots.channels.notConfigured') }} - +
+ +
@@ -111,31 +143,48 @@ - -
- - -
- - -
- +
+ +
diff --git a/packages/web/src/pages/bots/detail.vue b/packages/web/src/pages/bots/detail.vue index 7aa6ab8c..f6a14970 100644 --- a/packages/web/src/pages/bots/detail.vue +++ b/packages/web/src/pages/bots/detail.vue @@ -186,37 +186,33 @@ >
  • -

    {{ checkKeyLabel(item.check_key) }}

    -
    - +
    +

    {{ checkTitleLabel(item) }}

    +

    + {{ item.subtitle }} +

    {{ checkStatusLabel(item.status) }}
    +

    {{ item.summary }}

    - {{ $t('common.loading') }} + {{ item.detail }}

    -
  • @@ -553,11 +549,11 @@ import { useI18n } from 'vue-i18n' import { useQuery, useMutation, useQueryCache } from '@pinia/colada' import { getBotsById, putBotsById, + getBotsByIdChecks, getBotsByBotIdContainer, postBotsByBotIdContainer, deleteBotsByBotIdContainer, postBotsByBotIdContainerStart, postBotsByBotIdContainerStop, getBotsByBotIdContainerSnapshots, postBotsByBotIdContainerSnapshots, } from '@memoh/sdk' -import { client } from '@memoh/sdk/client' import type { BotsBotCheck, HandlersGetContainerResponse, HandlersListSnapshotsResponse, @@ -598,20 +594,9 @@ const { mutateAsync: updateBot, isLoading: updateBotLoading } = useMutation({ }, }) -async function fetchCheckKeys(id: string): Promise { - const { data } = await client.get({ - url: `/bots/${id}/checks/keys`, - throwOnError: true, - }) as { data: { keys: string[] } } - return data.keys ?? [] -} - -async function fetchSingleCheck(id: string, key: string): Promise { - const { data } = await client.get({ - url: `/bots/${id}/checks/run/${key}`, - throwOnError: true, - }) as { data: BotCheck } - return data +async function fetchChecks(id: string): Promise { + const { data } = await getBotsByIdChecks({ path: { id }, throwOnError: true }) + return data?.items ?? [] } const isEditingBotName = ref(false) @@ -704,13 +689,6 @@ const botLifecyclePending = computed(() => ( const checks = ref([]) const checksLoading = ref(false) -const checkKeyI18nKeys: Record = { - 'container.init': 'bots.checks.keys.containerInit', - 'container.record': 'bots.checks.keys.containerRecord', - 'container.task': 'bots.checks.keys.containerTask', - 'container.data_path': 'bots.checks.keys.containerDataPath', - 'bot.delete': 'bots.checks.keys.botDelete', -} const checksSummaryText = computed(() => { const issueCount = checks.value.filter((item) => item.status === 'warn' || item.status === 'error').length if (issueCount > 0) { @@ -873,56 +851,22 @@ function checkStatusLabel(status: BotCheck['status']): string { return t('bots.checks.status.ok') } -function isCheckLoading(item: BotCheck): boolean { - return item.status === 'unknown' && !item.summary -} - -function checkKeyLabel(checkKey: string): string { - const key = checkKeyI18nKeys[checkKey] - if (!key) { - return checkKey +function checkTitleLabel(item: BotCheck): string { + const titleKey = (item.title_key ?? '').trim() + if (titleKey) { + const translated = t(titleKey) + if (translated !== titleKey) { + return translated + } } - return t(key) + return (item.type ?? '').trim() || (item.id ?? '').trim() || '-' } async function loadChecks(showToast: boolean) { checksLoading.value = true checks.value = [] try { - const keys = await fetchCheckKeys(botId.value) - if (keys.length === 0) return - - // Maintain key order: pre-fill placeholders, replace as results arrive. - const keyOrder = new Map(keys.map((k, i) => [k, i])) - checks.value = keys.map((key) => ({ - check_key: key, - status: 'unknown' as BotCheck['status'], - summary: '', - })) - - const pending = keys.map(async (key) => { - try { - const result = await fetchSingleCheck(botId.value, key) - const idx = keyOrder.get(key) - if (idx !== undefined) { - const updated = [...checks.value] - updated[idx] = result - checks.value = updated - } - } catch { - const idx = keyOrder.get(key) - if (idx !== undefined) { - const updated = [...checks.value] - updated[idx] = { - check_key: key, - status: 'error' as BotCheck['status'], - summary: 'Check failed', - } - checks.value = updated - } - } - }) - await Promise.all(pending) + checks.value = await fetchChecks(botId.value) } catch (error) { if (showToast) { toast.error(resolveErrorMessage(error, t('bots.checks.loadFailed'))) diff --git a/packages/web/src/pages/chat/components/attachment-block.vue b/packages/web/src/pages/chat/components/attachment-block.vue new file mode 100644 index 00000000..0b04b6c3 --- /dev/null +++ b/packages/web/src/pages/chat/components/attachment-block.vue @@ -0,0 +1,105 @@ + + + diff --git a/packages/web/src/pages/chat/components/chat-area.vue b/packages/web/src/pages/chat/components/chat-area.vue index de640ba9..edda0be4 100644 --- a/packages/web/src/pages/chat/components/chat-area.vue +++ b/packages/web/src/pages/chat/components/chat-area.vue @@ -74,37 +74,88 @@ v-for="msg in messages" :key="msg.id" :message="msg" + :on-open-media="galleryOpenBySrc" /> + + +
    -
    -