Files
Memoh/internal/memory/llm_client_test.go
T
BBQ 83b6ee608c refactor: bind container lifecycle to bot and improve schedule trigger flow
- Add SetupBotContainer to ContainerLifecycle interface so containers
  are automatically created when a bot is created, matching the existing
  cleanup-on-delete behavior.
- Refactor schedule tools to use bot-scoped API paths and pass identity
  context for proper authorization.
- Introduce dedicated trigger-schedule endpoint in chat resolver with
  explicit schedule payload instead of reusing the generic chat path.
- Generate short-lived JWT tokens for schedule trigger callbacks with
  resolved bot owner identity.
- Validate required parameters in NewLLMClient and NewOpenAIEmbedder
  constructors, returning errors instead of falling back to defaults.
- Add unit tests for schedule token generation and chat resolver.
2026-02-07 12:04:37 +08:00

37 lines
898 B
Go

package memory
import (
"context"
"net/http"
"net/http/httptest"
"testing"
)
func TestLLMClientExtract(t *testing.T) {
t.Parallel()
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path != "/chat/completions" {
w.WriteHeader(http.StatusNotFound)
return
}
w.WriteHeader(http.StatusOK)
w.Write([]byte(`{"choices":[{"message":{"content":"{\"facts\":[\"hello\"]}"}}]}`))
}))
defer server.Close()
client, err := NewLLMClient(nil, server.URL, "test-key", "gpt-4.1-nano-2025-04-14", 0)
if err != nil {
t.Fatalf("new llm client: %v", err)
}
resp, err := client.Extract(context.Background(), ExtractRequest{
Messages: []Message{{Role: "user", Content: "hi"}},
})
if err != nil {
t.Fatalf("extract: %v", err)
}
if len(resp.Facts) != 1 || resp.Facts[0] != "hello" {
t.Fatalf("unexpected response: %+v", resp)
}
}