1package agent
2
3import (
4 "context"
5 _ "embed"
6 "errors"
7 "fmt"
8 "log/slog"
9 "strings"
10 "sync"
11 "time"
12
13 "github.com/charmbracelet/catwalk/pkg/catwalk"
14 "github.com/charmbracelet/crush/internal/agent/tools"
15 "github.com/charmbracelet/crush/internal/config"
16 "github.com/charmbracelet/crush/internal/csync"
17 "github.com/charmbracelet/crush/internal/message"
18 "github.com/charmbracelet/crush/internal/permission"
19 "github.com/charmbracelet/crush/internal/session"
20 "github.com/charmbracelet/fantasy/ai"
21 "github.com/charmbracelet/fantasy/anthropic"
22)
23
24//go:embed templates/title.md
25var titlePrompt []byte
26
27//go:embed templates/summary.md
28var summaryPrompt []byte
29
30type SessionAgentCall struct {
31 SessionID string
32 Prompt string
33 ProviderOptions ai.ProviderOptions
34 Attachments []message.Attachment
35 MaxOutputTokens int64
36 Temperature *float64
37 TopP *float64
38 TopK *int64
39 FrequencyPenalty *float64
40 PresencePenalty *float64
41}
42
43type SessionAgent interface {
44 Run(context.Context, SessionAgentCall) (*ai.AgentResult, error)
45 SetModels(large Model, small Model)
46 SetTools(tools []ai.AgentTool)
47 Cancel(sessionID string)
48 CancelAll()
49 IsSessionBusy(sessionID string) bool
50 IsBusy() bool
51 QueuedPrompts(sessionID string) int
52 ClearQueue(sessionID string)
53 Summarize(context.Context, string) error
54 Model() Model
55}
56
57type Model struct {
58 Model ai.LanguageModel
59 CatwalkCfg catwalk.Model
60 ModelCfg config.SelectedModel
61}
62
63type sessionAgent struct {
64 largeModel Model
65 smallModel Model
66 systemPrompt string
67 tools []ai.AgentTool
68 sessions session.Service
69 messages message.Service
70
71 messageQueue *csync.Map[string, []SessionAgentCall]
72 activeRequests *csync.Map[string, context.CancelFunc]
73}
74
75type SessionAgentOption func(*sessionAgent)
76
77func NewSessionAgent(
78 largeModel Model,
79 smallModel Model,
80 systemPrompt string,
81 sessions session.Service,
82 messages message.Service,
83 tools ...ai.AgentTool,
84) SessionAgent {
85 return &sessionAgent{
86 largeModel: largeModel,
87 smallModel: smallModel,
88 systemPrompt: systemPrompt,
89 sessions: sessions,
90 messages: messages,
91 tools: tools,
92 messageQueue: csync.NewMap[string, []SessionAgentCall](),
93 activeRequests: csync.NewMap[string, context.CancelFunc](),
94 }
95}
96
97func (a *sessionAgent) Run(ctx context.Context, call SessionAgentCall) (*ai.AgentResult, error) {
98 if call.Prompt == "" {
99 return nil, ErrEmptyPrompt
100 }
101 if call.SessionID == "" {
102 return nil, ErrSessionMissing
103 }
104
105 // Queue the message if busy
106 if a.IsSessionBusy(call.SessionID) {
107 existing, ok := a.messageQueue.Get(call.SessionID)
108 if !ok {
109 existing = []SessionAgentCall{}
110 }
111 existing = append(existing, call)
112 a.messageQueue.Set(call.SessionID, existing)
113 return nil, nil
114 }
115
116 if len(a.tools) > 0 {
117 // add anthropic caching to the last tool
118 a.tools[len(a.tools)-1].SetProviderOptions(a.getCacheControlOptions())
119 }
120
121 agent := ai.NewAgent(
122 a.largeModel.Model,
123 ai.WithSystemPrompt(a.systemPrompt),
124 ai.WithTools(a.tools...),
125 )
126
127 sessionLock := sync.Mutex{}
128 currentSession, err := a.sessions.Get(ctx, call.SessionID)
129 if err != nil {
130 return nil, fmt.Errorf("failed to get session: %w", err)
131 }
132
133 msgs, err := a.getSessionMessages(ctx, currentSession)
134 if err != nil {
135 return nil, fmt.Errorf("failed to get session messages: %w", err)
136 }
137
138 var wg sync.WaitGroup
139 // Generate title if first message
140 if len(msgs) == 0 {
141 wg.Go(func() {
142 sessionLock.Lock()
143 a.generateTitle(ctx, ¤tSession, call.Prompt)
144 sessionLock.Unlock()
145 })
146 }
147
148 // Add the user message to the session
149 _, err = a.createUserMessage(ctx, call)
150 if err != nil {
151 return nil, err
152 }
153
154 // add the session to the context
155 ctx = context.WithValue(ctx, tools.SessionIDContextKey, call.SessionID)
156
157 genCtx, cancel := context.WithCancel(ctx)
158 a.activeRequests.Set(call.SessionID, cancel)
159
160 defer cancel()
161 defer a.activeRequests.Del(call.SessionID)
162
163 history, files := a.preparePrompt(msgs, call.Attachments...)
164
165 var currentAssistant *message.Message
166 result, err := agent.Stream(genCtx, ai.AgentStreamCall{
167 Prompt: call.Prompt,
168 Files: files,
169 Messages: history,
170 ProviderOptions: call.ProviderOptions,
171 MaxOutputTokens: &call.MaxOutputTokens,
172 TopP: call.TopP,
173 Temperature: call.Temperature,
174 PresencePenalty: call.PresencePenalty,
175 TopK: call.TopK,
176 FrequencyPenalty: call.FrequencyPenalty,
177 // Before each step create the new assistant message
178 PrepareStep: func(options ai.PrepareStepFunctionOptions) (prepared ai.PrepareStepResult, err error) {
179 var assistantMsg message.Message
180 assistantMsg, err = a.messages.Create(genCtx, call.SessionID, message.CreateMessageParams{
181 Role: message.Assistant,
182 Parts: []message.ContentPart{},
183 Model: a.largeModel.ModelCfg.Model,
184 Provider: a.largeModel.ModelCfg.Provider,
185 })
186 if err != nil {
187 return prepared, err
188 }
189
190 currentAssistant = &assistantMsg
191
192 prepared.Messages = options.Messages
193 // reset all cached items
194 for i := range prepared.Messages {
195 prepared.Messages[i].ProviderOptions = nil
196 }
197
198 queuedCalls, _ := a.messageQueue.Get(call.SessionID)
199 a.messageQueue.Del(call.SessionID)
200 for _, queued := range queuedCalls {
201 userMessage, createErr := a.createUserMessage(genCtx, queued)
202 if createErr != nil {
203 return prepared, createErr
204 }
205 prepared.Messages = append(prepared.Messages, userMessage.ToAIMessage()...)
206 }
207
208 lastSystemRoleInx := 0
209 systemMessageUpdated := false
210 for i, msg := range prepared.Messages {
211 // only add cache control to the last message
212 if msg.Role == ai.MessageRoleSystem {
213 lastSystemRoleInx = i
214 } else if !systemMessageUpdated {
215 prepared.Messages[lastSystemRoleInx].ProviderOptions = a.getCacheControlOptions()
216 systemMessageUpdated = true
217 }
218 // than add cache control to the last 2 messages
219 if i > len(prepared.Messages)-3 {
220 prepared.Messages[i].ProviderOptions = a.getCacheControlOptions()
221 }
222 }
223 return prepared, err
224 },
225 OnReasoningDelta: func(id string, text string) error {
226 currentAssistant.AppendReasoningContent(text)
227 return a.messages.Update(genCtx, *currentAssistant)
228 },
229 OnReasoningEnd: func(id string, reasoning ai.ReasoningContent) error {
230 // handle anthropic signature
231 if anthropicData, ok := reasoning.ProviderMetadata[anthropic.Name]; ok {
232 if reasoning, ok := anthropicData.(*anthropic.ReasoningOptionMetadata); ok {
233 currentAssistant.AppendReasoningSignature(reasoning.Signature)
234 }
235 }
236 currentAssistant.FinishThinking()
237 return a.messages.Update(genCtx, *currentAssistant)
238 },
239 OnTextDelta: func(id string, text string) error {
240 currentAssistant.AppendContent(text)
241 return a.messages.Update(genCtx, *currentAssistant)
242 },
243 OnToolInputStart: func(id string, toolName string) error {
244 toolCall := message.ToolCall{
245 ID: id,
246 Name: toolName,
247 ProviderExecuted: false,
248 Finished: false,
249 }
250 currentAssistant.AddToolCall(toolCall)
251 return a.messages.Update(genCtx, *currentAssistant)
252 },
253 OnRetry: func(err *ai.APICallError, delay time.Duration) {
254 // TODO: implement
255 },
256 OnToolCall: func(tc ai.ToolCallContent) error {
257 toolCall := message.ToolCall{
258 ID: tc.ToolCallID,
259 Name: tc.ToolName,
260 Input: tc.Input,
261 ProviderExecuted: false,
262 Finished: true,
263 }
264 currentAssistant.AddToolCall(toolCall)
265 return a.messages.Update(genCtx, *currentAssistant)
266 },
267 OnToolResult: func(result ai.ToolResultContent) error {
268 var resultContent string
269 isError := false
270 switch result.Result.GetType() {
271 case ai.ToolResultContentTypeText:
272 r, ok := ai.AsToolResultOutputType[ai.ToolResultOutputContentText](result.Result)
273 if ok {
274 resultContent = r.Text
275 }
276 case ai.ToolResultContentTypeError:
277 r, ok := ai.AsToolResultOutputType[ai.ToolResultOutputContentError](result.Result)
278 if ok {
279 isError = true
280 resultContent = r.Error.Error()
281 }
282 case ai.ToolResultContentTypeMedia:
283 // TODO: handle this message type
284 }
285 toolResult := message.ToolResult{
286 ToolCallID: result.ToolCallID,
287 Name: result.ToolName,
288 Content: resultContent,
289 IsError: isError,
290 Metadata: result.ClientMetadata,
291 }
292 a.messages.Create(context.Background(), currentAssistant.SessionID, message.CreateMessageParams{
293 Role: message.Tool,
294 Parts: []message.ContentPart{
295 toolResult,
296 },
297 })
298 return a.messages.Update(genCtx, *currentAssistant)
299 },
300 OnStepFinish: func(stepResult ai.StepResult) error {
301 finishReason := message.FinishReasonUnknown
302 switch stepResult.FinishReason {
303 case ai.FinishReasonLength:
304 finishReason = message.FinishReasonMaxTokens
305 case ai.FinishReasonStop:
306 finishReason = message.FinishReasonEndTurn
307 case ai.FinishReasonToolCalls:
308 finishReason = message.FinishReasonToolUse
309 }
310 slog.Info("OnStepFinish", "reason", stepResult.FinishReason)
311 currentAssistant.AddFinish(finishReason, "", "")
312 a.updateSessionUsage(a.largeModel, ¤tSession, stepResult.Usage)
313 sessionLock.Lock()
314 _, sessionErr := a.sessions.Save(genCtx, currentSession)
315 sessionLock.Unlock()
316 if sessionErr != nil {
317 return sessionErr
318 }
319 return a.messages.Update(genCtx, *currentAssistant)
320 },
321 })
322 if err != nil {
323 isCancelErr := errors.Is(err, context.Canceled)
324 isPermissionErr := errors.Is(err, permission.ErrorPermissionDenied)
325 if currentAssistant == nil {
326 return result, err
327 }
328 toolCalls := currentAssistant.ToolCalls()
329 toolResults := currentAssistant.ToolResults()
330 for _, tc := range toolCalls {
331 if !tc.Finished {
332 tc.Finished = true
333 tc.Input = "{}"
334 }
335 currentAssistant.AddToolCall(tc)
336 found := false
337 for _, tr := range toolResults {
338 if tr.ToolCallID == tc.ID {
339 found = true
340 break
341 }
342 }
343 if !found {
344 content := "There was an error while executing the tool"
345 if isCancelErr {
346 content = "Tool execution canceled by user"
347 } else if isPermissionErr {
348 content = "Permission denied"
349 }
350 currentAssistant.AddToolResult(message.ToolResult{
351 ToolCallID: tc.ID,
352 Name: tc.Name,
353 Content: content,
354 IsError: true,
355 })
356 }
357 }
358 if isCancelErr {
359 currentAssistant.AddFinish(message.FinishReasonCanceled, "Request cancelled", "")
360 } else if isPermissionErr {
361 currentAssistant.AddFinish(message.FinishReasonPermissionDenied, "Permission denied", "")
362 } else {
363 currentAssistant.AddFinish(message.FinishReasonError, "API Error", err.Error())
364 }
365 // INFO: we use the parent context here because the genCtx might have been cancelled
366 updateErr := a.messages.Update(ctx, *currentAssistant)
367 if updateErr != nil {
368 return nil, updateErr
369 }
370 }
371 if err != nil {
372 return nil, err
373 }
374 wg.Wait()
375
376 queuedMessages, ok := a.messageQueue.Get(call.SessionID)
377 if !ok || len(queuedMessages) == 0 {
378 return result, err
379 }
380 // there are queued messages restart the loop
381 firstQueuedMessage := queuedMessages[0]
382 a.messageQueue.Set(call.SessionID, queuedMessages[1:])
383 return a.Run(genCtx, firstQueuedMessage)
384}
385
386func (a *sessionAgent) Summarize(ctx context.Context, sessionID string) error {
387 if a.IsSessionBusy(sessionID) {
388 return ErrSessionBusy
389 }
390
391 currentSession, err := a.sessions.Get(ctx, sessionID)
392 if err != nil {
393 return fmt.Errorf("failed to get session: %w", err)
394 }
395 msgs, err := a.getSessionMessages(ctx, currentSession)
396 if err != nil {
397 return err
398 }
399 if len(msgs) == 0 {
400 // nothing to summarize
401 return nil
402 }
403
404 aiMsgs, _ := a.preparePrompt(msgs)
405
406 genCtx, cancel := context.WithCancel(ctx)
407 a.activeRequests.Set(sessionID, cancel)
408 defer a.activeRequests.Del(sessionID)
409 defer cancel()
410
411 agent := ai.NewAgent(a.largeModel.Model,
412 ai.WithSystemPrompt(string(summaryPrompt)),
413 )
414 summaryMessage, err := a.messages.Create(ctx, sessionID, message.CreateMessageParams{
415 Role: message.Assistant,
416 Model: a.largeModel.Model.Model(),
417 Provider: a.largeModel.Model.Provider(),
418 })
419 if err != nil {
420 return err
421 }
422
423 resp, err := agent.Stream(ctx, ai.AgentStreamCall{
424 Prompt: "Provide a detailed summary of our conversation above.",
425 Messages: aiMsgs,
426 OnReasoningDelta: func(id string, text string) error {
427 summaryMessage.AppendReasoningContent(text)
428 return a.messages.Update(ctx, summaryMessage)
429 },
430 OnReasoningEnd: func(id string, reasoning ai.ReasoningContent) error {
431 // handle anthropic signature
432 if anthropicData, ok := reasoning.ProviderMetadata["anthropic"]; ok {
433 if signature, ok := anthropicData.(*anthropic.ReasoningOptionMetadata); ok && signature.Signature != "" {
434 summaryMessage.AppendReasoningSignature(signature.Signature)
435 }
436 }
437 summaryMessage.FinishThinking()
438 return a.messages.Update(ctx, summaryMessage)
439 },
440 OnTextDelta: func(id, text string) error {
441 summaryMessage.AppendContent(text)
442 return a.messages.Update(ctx, summaryMessage)
443 },
444 })
445 if err != nil {
446 return err
447 }
448
449 summaryMessage.AddFinish(message.FinishReasonEndTurn, "", "")
450 err = a.messages.Update(genCtx, summaryMessage)
451 if err != nil {
452 return err
453 }
454
455 a.updateSessionUsage(a.largeModel, ¤tSession, resp.TotalUsage)
456
457 // just in case get just the last usage
458 usage := resp.Response.Usage
459 currentSession.SummaryMessageID = summaryMessage.ID
460 currentSession.CompletionTokens = usage.OutputTokens
461 currentSession.PromptTokens = 0
462 _, err = a.sessions.Save(genCtx, currentSession)
463 return err
464}
465
466func (a *sessionAgent) getCacheControlOptions() ai.ProviderOptions {
467 return ai.ProviderOptions{
468 anthropic.Name: &anthropic.ProviderCacheControlOptions{
469 CacheControl: anthropic.CacheControl{Type: "ephemeral"},
470 },
471 }
472}
473
474func (a *sessionAgent) createUserMessage(ctx context.Context, call SessionAgentCall) (message.Message, error) {
475 var attachmentParts []message.ContentPart
476 for _, attachment := range call.Attachments {
477 attachmentParts = append(attachmentParts, message.BinaryContent{Path: attachment.FilePath, MIMEType: attachment.MimeType, Data: attachment.Content})
478 }
479 parts := []message.ContentPart{message.TextContent{Text: call.Prompt}}
480 parts = append(parts, attachmentParts...)
481 msg, err := a.messages.Create(ctx, call.SessionID, message.CreateMessageParams{
482 Role: message.User,
483 Parts: parts,
484 })
485 if err != nil {
486 return message.Message{}, fmt.Errorf("failed to create user message: %w", err)
487 }
488 return msg, nil
489}
490
491func (a *sessionAgent) preparePrompt(msgs []message.Message, attachments ...message.Attachment) ([]ai.Message, []ai.FilePart) {
492 var history []ai.Message
493 for _, m := range msgs {
494 if len(m.Parts) == 0 {
495 continue
496 }
497 // Assistant message without content or tool calls (cancelled before it returned anything)
498 if m.Role == message.Assistant && len(m.ToolCalls()) == 0 && m.Content().Text == "" && m.ReasoningContent().String() == "" {
499 continue
500 }
501 history = append(history, m.ToAIMessage()...)
502 }
503
504 var files []ai.FilePart
505 for _, attachment := range attachments {
506 files = append(files, ai.FilePart{
507 Filename: attachment.FileName,
508 Data: attachment.Content,
509 MediaType: attachment.MimeType,
510 })
511 }
512
513 return history, files
514}
515
516func (a *sessionAgent) getSessionMessages(ctx context.Context, session session.Session) ([]message.Message, error) {
517 msgs, err := a.messages.List(ctx, session.ID)
518 if err != nil {
519 return nil, fmt.Errorf("failed to list messages: %w", err)
520 }
521
522 if session.SummaryMessageID != "" {
523 summaryMsgInex := -1
524 for i, msg := range msgs {
525 if msg.ID == session.SummaryMessageID {
526 summaryMsgInex = i
527 break
528 }
529 }
530 if summaryMsgInex != -1 {
531 msgs = msgs[summaryMsgInex:]
532 msgs[0].Role = message.User
533 }
534 }
535 return msgs, nil
536}
537
538func (a *sessionAgent) generateTitle(ctx context.Context, session *session.Session, prompt string) {
539 if prompt == "" {
540 return
541 }
542
543 agent := ai.NewAgent(a.smallModel.Model,
544 ai.WithSystemPrompt(string(titlePrompt)),
545 ai.WithMaxOutputTokens(40),
546 )
547
548 resp, err := agent.Stream(ctx, ai.AgentStreamCall{
549 Prompt: fmt.Sprintf("Generate a concise title for the following content:\n\n%s", prompt),
550 })
551 if err != nil {
552 slog.Error("error generating title", "err", err)
553 return
554 }
555
556 title := resp.Response.Content.Text()
557
558 title = strings.ReplaceAll(title, "\n", " ")
559
560 // remove thinking tags if present
561 if idx := strings.Index(title, "</think>"); idx > 0 {
562 title = title[idx+len("</think>"):]
563 }
564
565 title = strings.TrimSpace(title)
566 if title == "" {
567 slog.Warn("failed to generate title", "warn", "empty title")
568 return
569 }
570
571 session.Title = title
572 a.updateSessionUsage(a.smallModel, session, resp.TotalUsage)
573 _, saveErr := a.sessions.Save(ctx, *session)
574 if saveErr != nil {
575 slog.Error("failed to save session title & usage", "error", saveErr)
576 return
577 }
578}
579
580func (a *sessionAgent) updateSessionUsage(model Model, session *session.Session, usage ai.Usage) {
581 modelConfig := model.CatwalkCfg
582 cost := modelConfig.CostPer1MInCached/1e6*float64(usage.CacheCreationTokens) +
583 modelConfig.CostPer1MOutCached/1e6*float64(usage.CacheReadTokens) +
584 modelConfig.CostPer1MIn/1e6*float64(usage.InputTokens) +
585 modelConfig.CostPer1MOut/1e6*float64(usage.OutputTokens)
586 session.Cost += cost
587 session.CompletionTokens = usage.OutputTokens + usage.CacheReadTokens
588 session.PromptTokens = usage.InputTokens + usage.CacheCreationTokens
589}
590
591func (a *sessionAgent) Cancel(sessionID string) {
592 // Cancel regular requests
593 if cancel, ok := a.activeRequests.Take(sessionID); ok && cancel != nil {
594 slog.Info("Request cancellation initiated", "session_id", sessionID)
595 cancel()
596 }
597
598 // Also check for summarize requests
599 if cancel, ok := a.activeRequests.Take(sessionID + "-summarize"); ok && cancel != nil {
600 slog.Info("Summarize cancellation initiated", "session_id", sessionID)
601 cancel()
602 }
603
604 if a.QueuedPrompts(sessionID) > 0 {
605 slog.Info("Clearing queued prompts", "session_id", sessionID)
606 a.messageQueue.Del(sessionID)
607 }
608}
609
610func (a *sessionAgent) ClearQueue(sessionID string) {
611 if a.QueuedPrompts(sessionID) > 0 {
612 slog.Info("Clearing queued prompts", "session_id", sessionID)
613 a.messageQueue.Del(sessionID)
614 }
615}
616
617func (a *sessionAgent) CancelAll() {
618 if !a.IsBusy() {
619 return
620 }
621 for key := range a.activeRequests.Seq2() {
622 a.Cancel(key) // key is sessionID
623 }
624
625 timeout := time.After(5 * time.Second)
626 for a.IsBusy() {
627 select {
628 case <-timeout:
629 return
630 default:
631 time.Sleep(200 * time.Millisecond)
632 }
633 }
634}
635
636func (a *sessionAgent) IsBusy() bool {
637 var busy bool
638 for cancelFunc := range a.activeRequests.Seq() {
639 if cancelFunc != nil {
640 busy = true
641 break
642 }
643 }
644 return busy
645}
646
647func (a *sessionAgent) IsSessionBusy(sessionID string) bool {
648 _, busy := a.activeRequests.Get(sessionID)
649 return busy
650}
651
652func (a *sessionAgent) QueuedPrompts(sessionID string) int {
653 l, ok := a.messageQueue.Get(sessionID)
654 if !ok {
655 return 0
656 }
657 return len(l)
658}
659
660func (a *sessionAgent) SetModels(large Model, small Model) {
661 a.largeModel = large
662 a.smallModel = small
663}
664
665func (a *sessionAgent) SetTools(tools []ai.AgentTool) {
666 a.tools = tools
667}
668
669func (a *sessionAgent) Model() Model {
670 return a.largeModel
671}