Files
Memoh/internal/db/sqlc/token_usage.sql.go
T
Acbox Liu 8d5c38f0e5 refactor: unify providers and models tables (#338)
* refactor: unify providers and models tables

- Rename `llm_providers` → `providers`, `llm_provider_oauth_tokens` → `provider_oauth_tokens`
- Remove `tts_providers` and `tts_models` tables; speech models now live in the unified `models` table with `type = 'speech'`
- Replace top-level `api_key`/`base_url` columns with a JSONB `config` field on `providers`
- Rename `llm_provider_id` → `provider_id` across all references
- Add `edge-speech` client type and `conf/providers/edge.yaml` default provider
- Create new read-only speech endpoints (`/speech-providers`, `/speech-models`) backed by filtered views of the unified tables
- Remove old TTS CRUD handlers; simplify speech page to read-only + test
- Update registry loader to skip malformed YAML files instead of failing entirely
- Fix YAML quoting for model names containing colons in openrouter.yaml
- Regenerate sqlc, swagger, and TypeScript SDK

* fix: exclude speech providers from providers list endpoint

ListProviders now filters out client_type matching '%-speech' so Edge
and future speech providers no longer appear on the Providers page.
ListSpeechProviders uses the same pattern match instead of hard-coding
'edge-speech'.

* fix: use explicit client_type list instead of LIKE pattern

Replace '%-speech' pattern with explicit IN ('edge-speech') for both
ListProviders (exclusion) and ListSpeechProviders (inclusion). New
speech client types must be added to both queries.

* fix: use EXECUTE for dynamic SQL in migrations referencing old schema

PL/pgSQL pre-validates column/table references in static SQL statements
inside DO blocks before evaluating IF/RETURN guards. This caused
migrations 0010-0061 to fail on fresh databases where the canonical
schema uses `providers`/`provider_id` instead of `llm_providers`/
`llm_provider_id`.

Wrap all SQL that references potentially non-existent old schema objects
(llm_providers, llm_provider_id, tts_providers, tts_models, etc.) in
EXECUTE strings so they are only parsed at runtime when actually reached.

* fix: revert canonical schema to use llm_providers for migration compatibility

The CI migrations workflow (up → down → up) failed because 0061 down
renames `providers` back to `llm_providers`, but 0001 down only dropped
`providers` — leaving `llm_providers` as a remnant. On the second
migrate up, 0010 found the stale `llm_providers` and tried to reference
`models.llm_provider_id` which no longer existed.

Revert 0001 canonical schema to use original names (llm_providers,
tts_providers, tts_models) so incremental migrations work naturally and
0061 handles the final rename. Remove EXECUTE wrappers and unnecessary
guards from migrations that now always operate on llm_providers.

* fix: icons

* fix: sync canonical schema with 0061 migration to fix sqlc column mismatch

0001_init.up.sql still used old names (llm_providers, llm_provider_id)
and included dropped tts_providers/tts_models tables. sqlc could not
parse the PL/pgSQL EXECUTE in migration 0061, so generated code retained
stale columns (input_modalities, supports_reasoning) causing runtime
"column does not exist" errors when adding models.

- Update 0001_init.up.sql to current schema (providers, provider_id,
  no tts tables, add provider_oauth_tokens)
- Use ALTER TABLE IF EXISTS in 0010/0041/0042 for backward compat
- Regenerate sqlc

* fix: guard all legacy migrations against fresh schema for CI compat

On fresh databases, 0001_init.up.sql creates providers/provider_id
(not llm_providers/llm_provider_id). Migrations 0013, 0041, 0046, 0047
referenced the old names without guards, causing CI migration failures.

- 0013: check llm_provider_id column exists before adding old constraint
- 0041: check llm_providers table exists before backfill/constraint DDL
- 0046: wrap CREATE TABLE in DO block with llm_providers existence check
- 0047: use ALTER TABLE IF EXISTS + DO block guard
2026-04-08 01:03:44 +08:00

148 lines
4.4 KiB
Go

// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.30.0
// source: token_usage.sql
package sqlc
import (
"context"
"github.com/jackc/pgx/v5/pgtype"
)
const getTokenUsageByDayAndType = `-- name: GetTokenUsageByDayAndType :many
SELECT
COALESCE(
CASE WHEN s.type = 'subagent' THEN COALESCE(ps.type, 'chat') ELSE s.type END,
'chat'
)::text AS session_type,
date_trunc('day', m.created_at)::date AS day,
COALESCE(SUM((m.usage->>'inputTokens')::bigint), 0)::bigint AS input_tokens,
COALESCE(SUM((m.usage->>'outputTokens')::bigint), 0)::bigint AS output_tokens,
COALESCE(SUM((m.usage->'inputTokenDetails'->>'cacheReadTokens')::bigint), 0)::bigint AS cache_read_tokens,
COALESCE(SUM((m.usage->'inputTokenDetails'->>'cacheWriteTokens')::bigint), 0)::bigint AS cache_write_tokens,
COALESCE(SUM((m.usage->'outputTokenDetails'->>'reasoningTokens')::bigint), 0)::bigint AS reasoning_tokens
FROM bot_history_messages m
LEFT JOIN bot_sessions s ON s.id = m.session_id
LEFT JOIN bot_sessions ps ON ps.id = s.parent_session_id
WHERE m.bot_id = $1
AND m.usage IS NOT NULL
AND m.created_at >= $2
AND m.created_at < $3
AND ($4::uuid IS NULL OR m.model_id = $4::uuid)
GROUP BY session_type, day
ORDER BY day, session_type
`
type GetTokenUsageByDayAndTypeParams struct {
BotID pgtype.UUID `json:"bot_id"`
FromTime pgtype.Timestamptz `json:"from_time"`
ToTime pgtype.Timestamptz `json:"to_time"`
ModelID pgtype.UUID `json:"model_id"`
}
type GetTokenUsageByDayAndTypeRow struct {
SessionType string `json:"session_type"`
Day pgtype.Date `json:"day"`
InputTokens int64 `json:"input_tokens"`
OutputTokens int64 `json:"output_tokens"`
CacheReadTokens int64 `json:"cache_read_tokens"`
CacheWriteTokens int64 `json:"cache_write_tokens"`
ReasoningTokens int64 `json:"reasoning_tokens"`
}
func (q *Queries) GetTokenUsageByDayAndType(ctx context.Context, arg GetTokenUsageByDayAndTypeParams) ([]GetTokenUsageByDayAndTypeRow, error) {
rows, err := q.db.Query(ctx, getTokenUsageByDayAndType,
arg.BotID,
arg.FromTime,
arg.ToTime,
arg.ModelID,
)
if err != nil {
return nil, err
}
defer rows.Close()
var items []GetTokenUsageByDayAndTypeRow
for rows.Next() {
var i GetTokenUsageByDayAndTypeRow
if err := rows.Scan(
&i.SessionType,
&i.Day,
&i.InputTokens,
&i.OutputTokens,
&i.CacheReadTokens,
&i.CacheWriteTokens,
&i.ReasoningTokens,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const getTokenUsageByModel = `-- name: GetTokenUsageByModel :many
SELECT
m.model_id,
COALESCE(mo.model_id, 'unknown') AS model_slug,
COALESCE(mo.name, 'Unknown') AS model_name,
COALESCE(lp.name, 'Unknown') AS provider_name,
COALESCE(SUM((m.usage->>'inputTokens')::bigint), 0)::bigint AS input_tokens,
COALESCE(SUM((m.usage->>'outputTokens')::bigint), 0)::bigint AS output_tokens
FROM bot_history_messages m
LEFT JOIN models mo ON mo.id = m.model_id
LEFT JOIN providers lp ON lp.id = mo.provider_id
WHERE m.bot_id = $1
AND m.usage IS NOT NULL
AND m.created_at >= $2
AND m.created_at < $3
GROUP BY m.model_id, mo.model_id, mo.name, lp.name
ORDER BY input_tokens DESC
`
type GetTokenUsageByModelParams struct {
BotID pgtype.UUID `json:"bot_id"`
FromTime pgtype.Timestamptz `json:"from_time"`
ToTime pgtype.Timestamptz `json:"to_time"`
}
type GetTokenUsageByModelRow struct {
ModelID pgtype.UUID `json:"model_id"`
ModelSlug string `json:"model_slug"`
ModelName string `json:"model_name"`
ProviderName string `json:"provider_name"`
InputTokens int64 `json:"input_tokens"`
OutputTokens int64 `json:"output_tokens"`
}
func (q *Queries) GetTokenUsageByModel(ctx context.Context, arg GetTokenUsageByModelParams) ([]GetTokenUsageByModelRow, error) {
rows, err := q.db.Query(ctx, getTokenUsageByModel, arg.BotID, arg.FromTime, arg.ToTime)
if err != nil {
return nil, err
}
defer rows.Close()
var items []GetTokenUsageByModelRow
for rows.Next() {
var i GetTokenUsageByModelRow
if err := rows.Scan(
&i.ModelID,
&i.ModelSlug,
&i.ModelName,
&i.ProviderName,
&i.InputTokens,
&i.OutputTokens,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}