feat: add per-route message dispatch modes (inject/parallel/queue)

Introduce three inbound message handling modes for channel adapters:

- inject (default, /btw): when a route has an active agent stream,
  inject the new user message into the running stream via the SDK's
  PrepareStep hook between tool rounds. The message is interleaved at
  the correct position in the persisted round.
- parallel (/now): start a new agent stream immediately, running
  concurrently with any existing stream (preserves current behavior).
- queue (/next): enqueue the message and process it after the current
  stream completes.

Key components:
- RouteDispatcher: per-route state management with inject channel,
  task queue, and active-stream tracking.
- PrepareStep integration: drains inject channel between tool rounds,
  records insertion position via InjectedRecorder for correct
  persistence ordering.
- interleaveInjectedMessages: inserts injected user messages at their
  actual injection position within the persisted message round.
- Parallel mode isolation: /now streams do not interact with the
  dispatcher, preventing them from clearing another stream's active
  state.
This commit is contained in:
Acbox
2026-04-02 21:43:13 +08:00
parent 33b57ee345
commit a31995424c
10 changed files with 947 additions and 6 deletions
+40 -5
View File
@@ -12,6 +12,7 @@ import (
"sort"
"strconv"
"strings"
"sync"
"time"
sdk "github.com/memohai/twilight-ai/sdk"
@@ -136,10 +137,11 @@ type usageInfo struct {
}
type resolvedContext struct {
runConfig agentpkg.RunConfig
model models.GetResponse
provider sqlc.LlmProvider
query string // headerified query
runConfig agentpkg.RunConfig
model models.GetResponse
provider sqlc.LlmProvider
query string // headerified query
injectedRecords *[]conversation.InjectedMessageRecord
}
func (r *Resolver) resolve(ctx context.Context, req conversation.ChatRequest) (resolvedContext, error) {
@@ -292,7 +294,40 @@ func (r *Resolver) resolve(ctx context.Context, req conversation.ChatRequest) (r
LoopDetection: agentpkg.LoopDetectionConfig{Enabled: loopDetectionEnabled},
}
return resolvedContext{runConfig: runCfg, model: chatModel, provider: provider, query: headerifiedQuery}, nil
var injectedRecords *[]conversation.InjectedMessageRecord
if req.InjectCh != nil {
agentInjectCh := make(chan agentpkg.InjectMessage, cap(req.InjectCh))
go func() {
for msg := range req.InjectCh {
agentInjectCh <- agentpkg.InjectMessage{
Text: msg.Text,
HeaderifiedText: msg.HeaderifiedText,
}
}
close(agentInjectCh)
}()
runCfg.InjectCh = agentInjectCh
records := make([]conversation.InjectedMessageRecord, 0)
injectedRecords = &records
var recMu sync.Mutex
runCfg.InjectedRecorder = func(headerifiedText string, insertAfter int) {
recMu.Lock()
*injectedRecords = append(*injectedRecords, conversation.InjectedMessageRecord{
HeaderifiedText: headerifiedText,
InsertAfter: insertAfter,
})
recMu.Unlock()
}
}
return resolvedContext{
runConfig: runCfg,
model: chatModel,
provider: provider,
query: headerifiedQuery,
injectedRecords: injectedRecords,
}, nil
}
// Chat sends a synchronous chat request and stores the result.