mirror of
https://github.com/memohai/Memoh.git
synced 2026-04-27 07:16:19 +09:00
80b36f79f3
- Rewrite SQL queries to join bot_history_messages with bot_sessions, supporting chat/heartbeat/schedule usage from a single source - Update Go handler and CLI command to use unified queries - Fix daily chart stacking: each session type gets its own bar group - Add total input/output trend lines to the daily token chart - Fix summary cards reactivity by restricting aggregation to allDays range - Fix cache chart reactive dependency tracking by inlining data access - Add i18n keys for schedule, totalInput, totalOutput - Default time range changed to 7 days - Regenerate sqlc, swagger, and SDK
144 lines
4.2 KiB
Go
144 lines
4.2 KiB
Go
// Code generated by sqlc. DO NOT EDIT.
|
|
// versions:
|
|
// sqlc v1.30.0
|
|
// source: token_usage.sql
|
|
|
|
package sqlc
|
|
|
|
import (
|
|
"context"
|
|
|
|
"github.com/jackc/pgx/v5/pgtype"
|
|
)
|
|
|
|
const getTokenUsageByDayAndType = `-- name: GetTokenUsageByDayAndType :many
|
|
SELECT
|
|
COALESCE(s.type, 'chat')::text AS session_type,
|
|
date_trunc('day', m.created_at)::date AS day,
|
|
COALESCE(SUM((m.usage->>'inputTokens')::bigint), 0)::bigint AS input_tokens,
|
|
COALESCE(SUM((m.usage->>'outputTokens')::bigint), 0)::bigint AS output_tokens,
|
|
COALESCE(SUM((m.usage->'inputTokenDetails'->>'cacheReadTokens')::bigint), 0)::bigint AS cache_read_tokens,
|
|
COALESCE(SUM((m.usage->'inputTokenDetails'->>'cacheWriteTokens')::bigint), 0)::bigint AS cache_write_tokens,
|
|
COALESCE(SUM((m.usage->'outputTokenDetails'->>'reasoningTokens')::bigint), 0)::bigint AS reasoning_tokens
|
|
FROM bot_history_messages m
|
|
LEFT JOIN bot_sessions s ON s.id = m.session_id
|
|
WHERE m.bot_id = $1
|
|
AND m.usage IS NOT NULL
|
|
AND m.created_at >= $2
|
|
AND m.created_at < $3
|
|
AND ($4::uuid IS NULL OR m.model_id = $4::uuid)
|
|
GROUP BY session_type, day
|
|
ORDER BY day, session_type
|
|
`
|
|
|
|
type GetTokenUsageByDayAndTypeParams struct {
|
|
BotID pgtype.UUID `json:"bot_id"`
|
|
FromTime pgtype.Timestamptz `json:"from_time"`
|
|
ToTime pgtype.Timestamptz `json:"to_time"`
|
|
ModelID pgtype.UUID `json:"model_id"`
|
|
}
|
|
|
|
type GetTokenUsageByDayAndTypeRow struct {
|
|
SessionType string `json:"session_type"`
|
|
Day pgtype.Date `json:"day"`
|
|
InputTokens int64 `json:"input_tokens"`
|
|
OutputTokens int64 `json:"output_tokens"`
|
|
CacheReadTokens int64 `json:"cache_read_tokens"`
|
|
CacheWriteTokens int64 `json:"cache_write_tokens"`
|
|
ReasoningTokens int64 `json:"reasoning_tokens"`
|
|
}
|
|
|
|
func (q *Queries) GetTokenUsageByDayAndType(ctx context.Context, arg GetTokenUsageByDayAndTypeParams) ([]GetTokenUsageByDayAndTypeRow, error) {
|
|
rows, err := q.db.Query(ctx, getTokenUsageByDayAndType,
|
|
arg.BotID,
|
|
arg.FromTime,
|
|
arg.ToTime,
|
|
arg.ModelID,
|
|
)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []GetTokenUsageByDayAndTypeRow
|
|
for rows.Next() {
|
|
var i GetTokenUsageByDayAndTypeRow
|
|
if err := rows.Scan(
|
|
&i.SessionType,
|
|
&i.Day,
|
|
&i.InputTokens,
|
|
&i.OutputTokens,
|
|
&i.CacheReadTokens,
|
|
&i.CacheWriteTokens,
|
|
&i.ReasoningTokens,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|
|
|
|
const getTokenUsageByModel = `-- name: GetTokenUsageByModel :many
|
|
SELECT
|
|
m.model_id,
|
|
COALESCE(mo.model_id, 'unknown') AS model_slug,
|
|
COALESCE(mo.name, 'Unknown') AS model_name,
|
|
COALESCE(lp.name, 'Unknown') AS provider_name,
|
|
COALESCE(SUM((m.usage->>'inputTokens')::bigint), 0)::bigint AS input_tokens,
|
|
COALESCE(SUM((m.usage->>'outputTokens')::bigint), 0)::bigint AS output_tokens
|
|
FROM bot_history_messages m
|
|
LEFT JOIN models mo ON mo.id = m.model_id
|
|
LEFT JOIN llm_providers lp ON lp.id = mo.llm_provider_id
|
|
WHERE m.bot_id = $1
|
|
AND m.usage IS NOT NULL
|
|
AND m.created_at >= $2
|
|
AND m.created_at < $3
|
|
GROUP BY m.model_id, mo.model_id, mo.name, lp.name
|
|
ORDER BY input_tokens DESC
|
|
`
|
|
|
|
type GetTokenUsageByModelParams struct {
|
|
BotID pgtype.UUID `json:"bot_id"`
|
|
FromTime pgtype.Timestamptz `json:"from_time"`
|
|
ToTime pgtype.Timestamptz `json:"to_time"`
|
|
}
|
|
|
|
type GetTokenUsageByModelRow struct {
|
|
ModelID pgtype.UUID `json:"model_id"`
|
|
ModelSlug string `json:"model_slug"`
|
|
ModelName string `json:"model_name"`
|
|
ProviderName string `json:"provider_name"`
|
|
InputTokens int64 `json:"input_tokens"`
|
|
OutputTokens int64 `json:"output_tokens"`
|
|
}
|
|
|
|
func (q *Queries) GetTokenUsageByModel(ctx context.Context, arg GetTokenUsageByModelParams) ([]GetTokenUsageByModelRow, error) {
|
|
rows, err := q.db.Query(ctx, getTokenUsageByModel, arg.BotID, arg.FromTime, arg.ToTime)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
var items []GetTokenUsageByModelRow
|
|
for rows.Next() {
|
|
var i GetTokenUsageByModelRow
|
|
if err := rows.Scan(
|
|
&i.ModelID,
|
|
&i.ModelSlug,
|
|
&i.ModelName,
|
|
&i.ProviderName,
|
|
&i.InputTokens,
|
|
&i.OutputTokens,
|
|
); err != nil {
|
|
return nil, err
|
|
}
|
|
items = append(items, i)
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return nil, err
|
|
}
|
|
return items, nil
|
|
}
|