1// Package agent is the core orchestration layer for Crush AI agents.
2//
3// It provides session-based AI agent functionality for managing
4// conversations, tool execution, and message handling. It coordinates
5// interactions between language models, messages, sessions, and tools while
6// handling features like automatic summarization, queuing, and token
7// management.
8package agent
9
10import (
11 "cmp"
12 "context"
13 _ "embed"
14 "encoding/base64"
15 "errors"
16 "fmt"
17 "log/slog"
18 "os"
19 "regexp"
20 "strconv"
21 "strings"
22 "sync"
23 "time"
24
25 "charm.land/catwalk/pkg/catwalk"
26 "charm.land/fantasy"
27 "charm.land/fantasy/providers/anthropic"
28 "charm.land/fantasy/providers/bedrock"
29 "charm.land/fantasy/providers/google"
30 "charm.land/fantasy/providers/openai"
31 "charm.land/fantasy/providers/openrouter"
32 "charm.land/fantasy/providers/vercel"
33 "charm.land/lipgloss/v2"
34 "github.com/charmbracelet/crush/internal/agent/hyper"
35 "github.com/charmbracelet/crush/internal/agent/notify"
36 "github.com/charmbracelet/crush/internal/agent/tools"
37 "github.com/charmbracelet/crush/internal/agent/tools/mcp"
38 "github.com/charmbracelet/crush/internal/config"
39 "github.com/charmbracelet/crush/internal/csync"
40 "github.com/charmbracelet/crush/internal/message"
41 "github.com/charmbracelet/crush/internal/permission"
42 "github.com/charmbracelet/crush/internal/pubsub"
43 "github.com/charmbracelet/crush/internal/session"
44 "github.com/charmbracelet/crush/internal/stringext"
45 "github.com/charmbracelet/crush/internal/version"
46 "github.com/charmbracelet/x/exp/charmtone"
47)
48
49const (
50 DefaultSessionName = "Untitled Session"
51
52 // Constants for auto-summarization thresholds
53 largeContextWindowThreshold = 200_000
54 largeContextWindowBuffer = 20_000
55 smallContextWindowRatio = 0.2
56)
57
58var userAgent = fmt.Sprintf("Charm-Crush/%s (https://charm.land/crush)", version.Version)
59
60//go:embed templates/title.md
61var titlePrompt []byte
62
63//go:embed templates/summary.md
64var summaryPrompt []byte
65
66// Used to remove <think> tags from generated titles.
67var (
68 thinkTagRegex = regexp.MustCompile(`(?s)<think>.*?</think>`)
69 orphanThinkTagRegex = regexp.MustCompile(`</?think>`)
70)
71
72type SessionAgentCall struct {
73 SessionID string
74 Prompt string
75 ProviderOptions fantasy.ProviderOptions
76 Attachments []message.Attachment
77 MaxOutputTokens int64
78 Temperature *float64
79 TopP *float64
80 TopK *int64
81 FrequencyPenalty *float64
82 PresencePenalty *float64
83 NonInteractive bool
84}
85
86type SessionAgent interface {
87 Run(context.Context, SessionAgentCall) (*fantasy.AgentResult, error)
88 SetModels(large Model, small Model)
89 SetTools(tools []fantasy.AgentTool)
90 SetSystemPrompt(systemPrompt string)
91 Cancel(sessionID string)
92 CancelAll()
93 IsSessionBusy(sessionID string) bool
94 IsBusy() bool
95 QueuedPrompts(sessionID string) int
96 QueuedPromptsList(sessionID string) []string
97 ClearQueue(sessionID string)
98 Summarize(context.Context, string, fantasy.ProviderOptions) error
99 Model() Model
100}
101
102type Model struct {
103 Model fantasy.LanguageModel
104 CatwalkCfg catwalk.Model
105 ModelCfg config.SelectedModel
106}
107
108type sessionAgent struct {
109 largeModel *csync.Value[Model]
110 smallModel *csync.Value[Model]
111 systemPromptPrefix *csync.Value[string]
112 systemPrompt *csync.Value[string]
113 tools *csync.Slice[fantasy.AgentTool]
114
115 isSubAgent bool
116 sessions session.Service
117 messages message.Service
118 disableAutoSummarize bool
119 isYolo bool
120 notify pubsub.Publisher[notify.Notification]
121
122 messageQueue *csync.Map[string, []SessionAgentCall]
123 activeRequests *csync.Map[string, context.CancelFunc]
124}
125
126type SessionAgentOptions struct {
127 LargeModel Model
128 SmallModel Model
129 SystemPromptPrefix string
130 SystemPrompt string
131 IsSubAgent bool
132 DisableAutoSummarize bool
133 IsYolo bool
134 Sessions session.Service
135 Messages message.Service
136 Tools []fantasy.AgentTool
137 Notify pubsub.Publisher[notify.Notification]
138}
139
140func NewSessionAgent(
141 opts SessionAgentOptions,
142) SessionAgent {
143 return &sessionAgent{
144 largeModel: csync.NewValue(opts.LargeModel),
145 smallModel: csync.NewValue(opts.SmallModel),
146 systemPromptPrefix: csync.NewValue(opts.SystemPromptPrefix),
147 systemPrompt: csync.NewValue(opts.SystemPrompt),
148 isSubAgent: opts.IsSubAgent,
149 sessions: opts.Sessions,
150 messages: opts.Messages,
151 disableAutoSummarize: opts.DisableAutoSummarize,
152 tools: csync.NewSliceFrom(opts.Tools),
153 isYolo: opts.IsYolo,
154 notify: opts.Notify,
155 messageQueue: csync.NewMap[string, []SessionAgentCall](),
156 activeRequests: csync.NewMap[string, context.CancelFunc](),
157 }
158}
159
160func (a *sessionAgent) Run(ctx context.Context, call SessionAgentCall) (*fantasy.AgentResult, error) {
161 if call.Prompt == "" && !message.ContainsTextAttachment(call.Attachments) {
162 return nil, ErrEmptyPrompt
163 }
164 if call.SessionID == "" {
165 return nil, ErrSessionMissing
166 }
167
168 // Queue the message if busy
169 if a.IsSessionBusy(call.SessionID) {
170 existing, ok := a.messageQueue.Get(call.SessionID)
171 if !ok {
172 existing = []SessionAgentCall{}
173 }
174 existing = append(existing, call)
175 a.messageQueue.Set(call.SessionID, existing)
176 return nil, nil
177 }
178
179 // Copy mutable fields under lock to avoid races with SetTools/SetModels.
180 agentTools := a.tools.Copy()
181 largeModel := a.largeModel.Get()
182 systemPrompt := a.systemPrompt.Get()
183 promptPrefix := a.systemPromptPrefix.Get()
184 var instructions strings.Builder
185
186 for _, server := range mcp.GetStates() {
187 if server.State != mcp.StateConnected {
188 continue
189 }
190 if s := server.Client.InitializeResult().Instructions; s != "" {
191 instructions.WriteString(s)
192 instructions.WriteString("\n\n")
193 }
194 }
195
196 if s := instructions.String(); s != "" {
197 systemPrompt += "\n\n<mcp-instructions>\n" + s + "\n</mcp-instructions>"
198 }
199
200 if len(agentTools) > 0 {
201 // Add Anthropic caching to the last tool.
202 agentTools[len(agentTools)-1].SetProviderOptions(a.getCacheControlOptions())
203 }
204
205 agent := fantasy.NewAgent(
206 largeModel.Model,
207 fantasy.WithSystemPrompt(systemPrompt),
208 fantasy.WithTools(agentTools...),
209 fantasy.WithUserAgent(userAgent),
210 )
211
212 sessionLock := sync.Mutex{}
213 currentSession, err := a.sessions.Get(ctx, call.SessionID)
214 if err != nil {
215 return nil, fmt.Errorf("failed to get session: %w", err)
216 }
217
218 msgs, err := a.getSessionMessages(ctx, currentSession)
219 if err != nil {
220 return nil, fmt.Errorf("failed to get session messages: %w", err)
221 }
222
223 var wg sync.WaitGroup
224 // Generate title if first message.
225 if len(msgs) == 0 {
226 titleCtx := ctx // Copy to avoid race with ctx reassignment below.
227 wg.Go(func() {
228 a.generateTitle(titleCtx, call.SessionID, call.Prompt)
229 })
230 }
231 defer wg.Wait()
232
233 // Add the user message to the session.
234 _, err = a.createUserMessage(ctx, call)
235 if err != nil {
236 return nil, err
237 }
238
239 // Add the session to the context.
240 ctx = context.WithValue(ctx, tools.SessionIDContextKey, call.SessionID)
241
242 genCtx, cancel := context.WithCancel(ctx)
243 a.activeRequests.Set(call.SessionID, cancel)
244
245 defer cancel()
246 defer a.activeRequests.Del(call.SessionID)
247
248 history, files := a.preparePrompt(msgs, call.Attachments...)
249
250 startTime := time.Now()
251 a.eventPromptSent(call.SessionID)
252
253 var currentAssistant *message.Message
254 var shouldSummarize bool
255 // Don't send MaxOutputTokens if 0 — some providers (e.g. LM Studio) reject it
256 var maxOutputTokens *int64
257 if call.MaxOutputTokens > 0 {
258 maxOutputTokens = &call.MaxOutputTokens
259 }
260 result, err := agent.Stream(genCtx, fantasy.AgentStreamCall{
261 Prompt: message.PromptWithTextAttachments(call.Prompt, call.Attachments),
262 Files: files,
263 Messages: history,
264 ProviderOptions: call.ProviderOptions,
265 MaxOutputTokens: maxOutputTokens,
266 TopP: call.TopP,
267 Temperature: call.Temperature,
268 PresencePenalty: call.PresencePenalty,
269 TopK: call.TopK,
270 FrequencyPenalty: call.FrequencyPenalty,
271 PrepareStep: func(callContext context.Context, options fantasy.PrepareStepFunctionOptions) (_ context.Context, prepared fantasy.PrepareStepResult, err error) {
272 prepared.Messages = options.Messages
273 for i := range prepared.Messages {
274 prepared.Messages[i].ProviderOptions = nil
275 }
276
277 // Use latest tools (updated by SetTools when MCP tools change).
278 prepared.Tools = a.tools.Copy()
279
280 queuedCalls, _ := a.messageQueue.Get(call.SessionID)
281 a.messageQueue.Del(call.SessionID)
282 for _, queued := range queuedCalls {
283 userMessage, createErr := a.createUserMessage(callContext, queued)
284 if createErr != nil {
285 return callContext, prepared, createErr
286 }
287 prepared.Messages = append(prepared.Messages, userMessage.ToAIMessage()...)
288 }
289
290 prepared.Messages = a.workaroundProviderMediaLimitations(prepared.Messages, largeModel)
291
292 lastSystemRoleInx := 0
293 systemMessageUpdated := false
294 for i, msg := range prepared.Messages {
295 // Only add cache control to the last message.
296 if msg.Role == fantasy.MessageRoleSystem {
297 lastSystemRoleInx = i
298 } else if !systemMessageUpdated {
299 prepared.Messages[lastSystemRoleInx].ProviderOptions = a.getCacheControlOptions()
300 systemMessageUpdated = true
301 }
302 // Than add cache control to the last 2 messages.
303 if i > len(prepared.Messages)-3 {
304 prepared.Messages[i].ProviderOptions = a.getCacheControlOptions()
305 }
306 }
307
308 if promptPrefix != "" {
309 prepared.Messages = append([]fantasy.Message{fantasy.NewSystemMessage(promptPrefix)}, prepared.Messages...)
310 }
311
312 var assistantMsg message.Message
313 assistantMsg, err = a.messages.Create(callContext, call.SessionID, message.CreateMessageParams{
314 Role: message.Assistant,
315 Parts: []message.ContentPart{},
316 Model: largeModel.ModelCfg.Model,
317 Provider: largeModel.ModelCfg.Provider,
318 })
319 if err != nil {
320 return callContext, prepared, err
321 }
322 callContext = context.WithValue(callContext, tools.MessageIDContextKey, assistantMsg.ID)
323 callContext = context.WithValue(callContext, tools.SupportsImagesContextKey, largeModel.CatwalkCfg.SupportsImages)
324 callContext = context.WithValue(callContext, tools.ModelNameContextKey, largeModel.CatwalkCfg.Name)
325 currentAssistant = &assistantMsg
326 return callContext, prepared, err
327 },
328 OnReasoningStart: func(id string, reasoning fantasy.ReasoningContent) error {
329 currentAssistant.AppendReasoningContent(reasoning.Text)
330 return a.messages.Update(genCtx, *currentAssistant)
331 },
332 OnReasoningDelta: func(id string, text string) error {
333 currentAssistant.AppendReasoningContent(text)
334 return a.messages.Update(genCtx, *currentAssistant)
335 },
336 OnReasoningEnd: func(id string, reasoning fantasy.ReasoningContent) error {
337 // handle anthropic signature
338 if anthropicData, ok := reasoning.ProviderMetadata[anthropic.Name]; ok {
339 if reasoning, ok := anthropicData.(*anthropic.ReasoningOptionMetadata); ok {
340 currentAssistant.AppendReasoningSignature(reasoning.Signature)
341 }
342 }
343 if googleData, ok := reasoning.ProviderMetadata[google.Name]; ok {
344 if reasoning, ok := googleData.(*google.ReasoningMetadata); ok {
345 currentAssistant.AppendThoughtSignature(reasoning.Signature, reasoning.ToolID)
346 }
347 }
348 if openaiData, ok := reasoning.ProviderMetadata[openai.Name]; ok {
349 if reasoning, ok := openaiData.(*openai.ResponsesReasoningMetadata); ok {
350 currentAssistant.SetReasoningResponsesData(reasoning)
351 }
352 }
353 currentAssistant.FinishThinking()
354 return a.messages.Update(genCtx, *currentAssistant)
355 },
356 OnTextDelta: func(id string, text string) error {
357 // Strip leading newline from initial text content. This is is
358 // particularly important in non-interactive mode where leading
359 // newlines are very visible.
360 if len(currentAssistant.Parts) == 0 {
361 text = strings.TrimPrefix(text, "\n")
362 }
363
364 currentAssistant.AppendContent(text)
365 return a.messages.Update(genCtx, *currentAssistant)
366 },
367 OnToolInputStart: func(id string, toolName string) error {
368 toolCall := message.ToolCall{
369 ID: id,
370 Name: toolName,
371 ProviderExecuted: false,
372 Finished: false,
373 }
374 currentAssistant.AddToolCall(toolCall)
375 // Use parent ctx instead of genCtx to ensure the update succeeds
376 // even if the request is canceled mid-stream
377 return a.messages.Update(ctx, *currentAssistant)
378 },
379 OnRetry: func(err *fantasy.ProviderError, delay time.Duration) {
380 // TODO: implement
381 },
382 OnToolCall: func(tc fantasy.ToolCallContent) error {
383 toolCall := message.ToolCall{
384 ID: tc.ToolCallID,
385 Name: tc.ToolName,
386 Input: tc.Input,
387 ProviderExecuted: false,
388 Finished: true,
389 }
390 currentAssistant.AddToolCall(toolCall)
391 // Use parent ctx instead of genCtx to ensure the update succeeds
392 // even if the request is canceled mid-stream
393 return a.messages.Update(ctx, *currentAssistant)
394 },
395 OnToolResult: func(result fantasy.ToolResultContent) error {
396 toolResult := a.convertToToolResult(result)
397 // Use parent ctx instead of genCtx to ensure the message is created
398 // even if the request is canceled mid-stream
399 _, createMsgErr := a.messages.Create(ctx, currentAssistant.SessionID, message.CreateMessageParams{
400 Role: message.Tool,
401 Parts: []message.ContentPart{
402 toolResult,
403 },
404 })
405 return createMsgErr
406 },
407 OnStepFinish: func(stepResult fantasy.StepResult) error {
408 finishReason := message.FinishReasonUnknown
409 switch stepResult.FinishReason {
410 case fantasy.FinishReasonLength:
411 finishReason = message.FinishReasonMaxTokens
412 case fantasy.FinishReasonStop:
413 finishReason = message.FinishReasonEndTurn
414 case fantasy.FinishReasonToolCalls:
415 finishReason = message.FinishReasonToolUse
416 }
417 currentAssistant.AddFinish(finishReason, "", "")
418 sessionLock.Lock()
419 defer sessionLock.Unlock()
420
421 updatedSession, getSessionErr := a.sessions.Get(ctx, call.SessionID)
422 if getSessionErr != nil {
423 return getSessionErr
424 }
425 a.updateSessionUsage(largeModel, &updatedSession, stepResult.Usage, a.openrouterCost(stepResult.ProviderMetadata))
426 _, sessionErr := a.sessions.SaveWithModels(ctx, updatedSession, map[config.SelectedModelType]config.SelectedModel{
427 config.SelectedModelTypeLarge: a.largeModel.Get().ModelCfg,
428 config.SelectedModelTypeSmall: a.smallModel.Get().ModelCfg,
429 })
430 if sessionErr != nil {
431 return sessionErr
432 }
433 currentSession = updatedSession
434 return a.messages.Update(genCtx, *currentAssistant)
435 },
436 StopWhen: []fantasy.StopCondition{
437 func(_ []fantasy.StepResult) bool {
438 cw := int64(largeModel.CatwalkCfg.ContextWindow)
439 // If context window is unknown (0), skip auto-summarize
440 // to avoid immediately truncating custom/local models.
441 if cw == 0 {
442 return false
443 }
444 tokens := currentSession.CompletionTokens + currentSession.PromptTokens
445 remaining := cw - tokens
446 var threshold int64
447 if cw > largeContextWindowThreshold {
448 threshold = largeContextWindowBuffer
449 } else {
450 threshold = int64(float64(cw) * smallContextWindowRatio)
451 }
452 if (remaining <= threshold) && !a.disableAutoSummarize {
453 shouldSummarize = true
454 return true
455 }
456 return false
457 },
458 func(steps []fantasy.StepResult) bool {
459 return hasRepeatedToolCalls(steps, loopDetectionWindowSize, loopDetectionMaxRepeats)
460 },
461 },
462 })
463
464 a.eventPromptResponded(call.SessionID, time.Since(startTime).Truncate(time.Second))
465
466 if err != nil {
467 isCancelErr := errors.Is(err, context.Canceled)
468 isPermissionErr := errors.Is(err, permission.ErrorPermissionDenied)
469 if currentAssistant == nil {
470 return result, err
471 }
472 // Ensure we finish thinking on error to close the reasoning state.
473 currentAssistant.FinishThinking()
474 toolCalls := currentAssistant.ToolCalls()
475 // INFO: we use the parent context here because the genCtx has been cancelled.
476 msgs, createErr := a.messages.List(ctx, currentAssistant.SessionID)
477 if createErr != nil {
478 return nil, createErr
479 }
480 for _, tc := range toolCalls {
481 if !tc.Finished {
482 tc.Finished = true
483 tc.Input = "{}"
484 currentAssistant.AddToolCall(tc)
485 updateErr := a.messages.Update(ctx, *currentAssistant)
486 if updateErr != nil {
487 return nil, updateErr
488 }
489 }
490
491 found := false
492 for _, msg := range msgs {
493 if msg.Role == message.Tool {
494 for _, tr := range msg.ToolResults() {
495 if tr.ToolCallID == tc.ID {
496 found = true
497 break
498 }
499 }
500 }
501 if found {
502 break
503 }
504 }
505 if found {
506 continue
507 }
508 content := "There was an error while executing the tool"
509 if isCancelErr {
510 content = "Error: user cancelled assistant tool calling"
511 } else if isPermissionErr {
512 content = "User denied permission"
513 }
514 toolResult := message.ToolResult{
515 ToolCallID: tc.ID,
516 Name: tc.Name,
517 Content: content,
518 IsError: true,
519 }
520 _, createErr = a.messages.Create(ctx, currentAssistant.SessionID, message.CreateMessageParams{
521 Role: message.Tool,
522 Parts: []message.ContentPart{
523 toolResult,
524 },
525 })
526 if createErr != nil {
527 return nil, createErr
528 }
529 }
530 var fantasyErr *fantasy.Error
531 var providerErr *fantasy.ProviderError
532 const defaultTitle = "Provider Error"
533 linkStyle := lipgloss.NewStyle().Foreground(charmtone.Guac).Underline(true)
534 if isCancelErr {
535 currentAssistant.AddFinish(message.FinishReasonCanceled, "User canceled request", "")
536 } else if isPermissionErr {
537 currentAssistant.AddFinish(message.FinishReasonPermissionDenied, "User denied permission", "")
538 } else if errors.Is(err, hyper.ErrUnauthorized) {
539 currentAssistant.AddFinish(message.FinishReasonError, "Unauthorized", `Please re-authenticate with Hyper. You can also run "crush auth" to re-authenticate.`)
540 if a.notify != nil {
541 a.notify.Publish(pubsub.CreatedEvent, notify.Notification{
542 SessionID: call.SessionID,
543 SessionTitle: currentSession.Title,
544 Type: notify.TypeReAuthenticate,
545 ProviderID: largeModel.ModelCfg.Provider,
546 })
547 }
548 } else if errors.Is(err, hyper.ErrNoCredits) {
549 url := hyper.BaseURL()
550 link := linkStyle.Hyperlink(url, "id=hyper").Render(url)
551 currentAssistant.AddFinish(message.FinishReasonError, "No credits", "You're out of credits. Add more at "+link)
552 } else if errors.As(err, &providerErr) {
553 if providerErr.Message == "The requested model is not supported." {
554 url := "https://github.com/settings/copilot/features"
555 link := linkStyle.Hyperlink(url, "id=copilot").Render(url)
556 currentAssistant.AddFinish(
557 message.FinishReasonError,
558 "Copilot model not enabled",
559 fmt.Sprintf("%q is not enabled in Copilot. Go to the following page to enable it. Then, wait 5 minutes before trying again. %s", largeModel.CatwalkCfg.Name, link),
560 )
561 } else {
562 currentAssistant.AddFinish(message.FinishReasonError, cmp.Or(stringext.Capitalize(providerErr.Title), defaultTitle), providerErr.Message)
563 }
564 } else if errors.As(err, &fantasyErr) {
565 currentAssistant.AddFinish(message.FinishReasonError, cmp.Or(stringext.Capitalize(fantasyErr.Title), defaultTitle), fantasyErr.Message)
566 } else {
567 currentAssistant.AddFinish(message.FinishReasonError, defaultTitle, err.Error())
568 }
569 // Note: we use the parent context here because the genCtx has been
570 // cancelled.
571 updateErr := a.messages.Update(ctx, *currentAssistant)
572 if updateErr != nil {
573 return nil, updateErr
574 }
575 return nil, err
576 }
577
578 // Send notification that agent has finished its turn (skip for
579 // nested/non-interactive sessions).
580 if !call.NonInteractive && a.notify != nil {
581 a.notify.Publish(pubsub.CreatedEvent, notify.Notification{
582 SessionID: call.SessionID,
583 SessionTitle: currentSession.Title,
584 Type: notify.TypeAgentFinished,
585 })
586 }
587
588 if shouldSummarize {
589 a.activeRequests.Del(call.SessionID)
590 if summarizeErr := a.Summarize(genCtx, call.SessionID, call.ProviderOptions); summarizeErr != nil {
591 return nil, summarizeErr
592 }
593 // If the agent wasn't done...
594 if len(currentAssistant.ToolCalls()) > 0 {
595 existing, ok := a.messageQueue.Get(call.SessionID)
596 if !ok {
597 existing = []SessionAgentCall{}
598 }
599 call.Prompt = fmt.Sprintf("The previous session was interrupted because it got too long, the initial user request was: `%s`", call.Prompt)
600 existing = append(existing, call)
601 a.messageQueue.Set(call.SessionID, existing)
602 }
603 }
604
605 // Release active request before processing queued messages.
606 a.activeRequests.Del(call.SessionID)
607 cancel()
608
609 queuedMessages, ok := a.messageQueue.Get(call.SessionID)
610 if !ok || len(queuedMessages) == 0 {
611 return result, err
612 }
613 // There are queued messages restart the loop.
614 firstQueuedMessage := queuedMessages[0]
615 a.messageQueue.Set(call.SessionID, queuedMessages[1:])
616 return a.Run(ctx, firstQueuedMessage)
617}
618
619func (a *sessionAgent) Summarize(ctx context.Context, sessionID string, opts fantasy.ProviderOptions) error {
620 if a.IsSessionBusy(sessionID) {
621 return ErrSessionBusy
622 }
623
624 // Copy mutable fields under lock to avoid races with SetModels.
625 largeModel := a.largeModel.Get()
626 systemPromptPrefix := a.systemPromptPrefix.Get()
627
628 currentSession, err := a.sessions.Get(ctx, sessionID)
629 if err != nil {
630 return fmt.Errorf("failed to get session: %w", err)
631 }
632 msgs, err := a.getSessionMessages(ctx, currentSession)
633 if err != nil {
634 return err
635 }
636 if len(msgs) == 0 {
637 // Nothing to summarize.
638 return nil
639 }
640
641 aiMsgs, _ := a.preparePrompt(msgs)
642
643 genCtx, cancel := context.WithCancel(ctx)
644 a.activeRequests.Set(sessionID, cancel)
645 defer a.activeRequests.Del(sessionID)
646 defer cancel()
647
648 agent := fantasy.NewAgent(largeModel.Model,
649 fantasy.WithSystemPrompt(string(summaryPrompt)),
650 fantasy.WithUserAgent(userAgent),
651 )
652 summaryMessage, err := a.messages.Create(ctx, sessionID, message.CreateMessageParams{
653 Role: message.Assistant,
654 Model: largeModel.Model.Model(),
655 Provider: largeModel.Model.Provider(),
656 IsSummaryMessage: true,
657 })
658 if err != nil {
659 return err
660 }
661
662 summaryPromptText := buildSummaryPrompt(currentSession.Todos)
663
664 resp, err := agent.Stream(genCtx, fantasy.AgentStreamCall{
665 Prompt: summaryPromptText,
666 Messages: aiMsgs,
667 ProviderOptions: opts,
668 PrepareStep: func(callContext context.Context, options fantasy.PrepareStepFunctionOptions) (_ context.Context, prepared fantasy.PrepareStepResult, err error) {
669 prepared.Messages = options.Messages
670 if systemPromptPrefix != "" {
671 prepared.Messages = append([]fantasy.Message{fantasy.NewSystemMessage(systemPromptPrefix)}, prepared.Messages...)
672 }
673 return callContext, prepared, nil
674 },
675 OnReasoningDelta: func(id string, text string) error {
676 summaryMessage.AppendReasoningContent(text)
677 return a.messages.Update(genCtx, summaryMessage)
678 },
679 OnReasoningEnd: func(id string, reasoning fantasy.ReasoningContent) error {
680 // Handle anthropic signature.
681 if anthropicData, ok := reasoning.ProviderMetadata["anthropic"]; ok {
682 if signature, ok := anthropicData.(*anthropic.ReasoningOptionMetadata); ok && signature.Signature != "" {
683 summaryMessage.AppendReasoningSignature(signature.Signature)
684 }
685 }
686 summaryMessage.FinishThinking()
687 return a.messages.Update(genCtx, summaryMessage)
688 },
689 OnTextDelta: func(id, text string) error {
690 summaryMessage.AppendContent(text)
691 return a.messages.Update(genCtx, summaryMessage)
692 },
693 })
694 if err != nil {
695 isCancelErr := errors.Is(err, context.Canceled)
696 if isCancelErr {
697 // User cancelled summarize we need to remove the summary message.
698 deleteErr := a.messages.Delete(ctx, summaryMessage.ID)
699 return deleteErr
700 }
701 return err
702 }
703
704 summaryMessage.AddFinish(message.FinishReasonEndTurn, "", "")
705 err = a.messages.Update(genCtx, summaryMessage)
706 if err != nil {
707 return err
708 }
709
710 var openrouterCost *float64
711 for _, step := range resp.Steps {
712 stepCost := a.openrouterCost(step.ProviderMetadata)
713 if stepCost != nil {
714 newCost := *stepCost
715 if openrouterCost != nil {
716 newCost += *openrouterCost
717 }
718 openrouterCost = &newCost
719 }
720 }
721
722 a.updateSessionUsage(largeModel, ¤tSession, resp.TotalUsage, openrouterCost)
723
724 // Just in case, get just the last usage info.
725 usage := resp.Response.Usage
726 currentSession.SummaryMessageID = summaryMessage.ID
727 currentSession.CompletionTokens = usage.OutputTokens
728 currentSession.PromptTokens = 0
729 _, err = a.sessions.SaveWithModels(genCtx, currentSession, map[config.SelectedModelType]config.SelectedModel{
730 config.SelectedModelTypeLarge: a.largeModel.Get().ModelCfg,
731 config.SelectedModelTypeSmall: a.smallModel.Get().ModelCfg,
732 })
733 return err
734}
735
736func (a *sessionAgent) getCacheControlOptions() fantasy.ProviderOptions {
737 if t, _ := strconv.ParseBool(os.Getenv("CRUSH_DISABLE_ANTHROPIC_CACHE")); t {
738 return fantasy.ProviderOptions{}
739 }
740 return fantasy.ProviderOptions{
741 anthropic.Name: &anthropic.ProviderCacheControlOptions{
742 CacheControl: anthropic.CacheControl{Type: "ephemeral"},
743 },
744 bedrock.Name: &anthropic.ProviderCacheControlOptions{
745 CacheControl: anthropic.CacheControl{Type: "ephemeral"},
746 },
747 vercel.Name: &anthropic.ProviderCacheControlOptions{
748 CacheControl: anthropic.CacheControl{Type: "ephemeral"},
749 },
750 }
751}
752
753func (a *sessionAgent) createUserMessage(ctx context.Context, call SessionAgentCall) (message.Message, error) {
754 parts := []message.ContentPart{message.TextContent{Text: call.Prompt}}
755 var attachmentParts []message.ContentPart
756 for _, attachment := range call.Attachments {
757 attachmentParts = append(attachmentParts, message.BinaryContent{Path: attachment.FilePath, MIMEType: attachment.MimeType, Data: attachment.Content})
758 }
759 parts = append(parts, attachmentParts...)
760 msg, err := a.messages.Create(ctx, call.SessionID, message.CreateMessageParams{
761 Role: message.User,
762 Parts: parts,
763 })
764 if err != nil {
765 return message.Message{}, fmt.Errorf("failed to create user message: %w", err)
766 }
767 return msg, nil
768}
769
770func (a *sessionAgent) preparePrompt(msgs []message.Message, attachments ...message.Attachment) ([]fantasy.Message, []fantasy.FilePart) {
771 var history []fantasy.Message
772 if !a.isSubAgent {
773 history = append(history, fantasy.NewUserMessage(
774 fmt.Sprintf("<system_reminder>%s</system_reminder>",
775 `This is a reminder that your todo list is currently empty. DO NOT mention this to the user explicitly because they are already aware.
776If you are working on tasks that would benefit from a todo list please use the "todos" tool to create one.
777If not, please feel free to ignore. Again do not mention this message to the user.`,
778 ),
779 ))
780 }
781 // Collect all tool call IDs present in assistant messages.
782 knownToolCallIDs := make(map[string]struct{})
783 for _, m := range msgs {
784 if m.Role == message.Assistant {
785 for _, tc := range m.ToolCalls() {
786 knownToolCallIDs[tc.ID] = struct{}{}
787 }
788 }
789 }
790
791 for _, m := range msgs {
792 if len(m.Parts) == 0 {
793 continue
794 }
795 // Assistant message without content or tool calls (cancelled before it
796 // returned anything).
797 if m.Role == message.Assistant && len(m.ToolCalls()) == 0 && m.Content().Text == "" && m.ReasoningContent().String() == "" {
798 continue
799 }
800 if m.Role == message.Tool {
801 if msg, ok := filterOrphanedToolResults(m, knownToolCallIDs); ok {
802 history = append(history, msg)
803 }
804 continue
805 }
806 history = append(history, m.ToAIMessage()...)
807 }
808
809 var files []fantasy.FilePart
810 for _, attachment := range attachments {
811 if attachment.IsText() {
812 continue
813 }
814 files = append(files, fantasy.FilePart{
815 Filename: attachment.FileName,
816 Data: attachment.Content,
817 MediaType: attachment.MimeType,
818 })
819 }
820
821 return history, files
822}
823
824// filterOrphanedToolResults converts a tool message to a fantasy.Message,
825// dropping any tool result parts whose tool_call_id has no matching tool call
826// in the known set. An orphaned result causes API validation to fail on every
827// subsequent turn, permanently locking the session. Returns the filtered
828// message and true if at least one valid part remains.
829func filterOrphanedToolResults(m message.Message, knownToolCallIDs map[string]struct{}) (fantasy.Message, bool) {
830 aiMsgs := m.ToAIMessage()
831 if len(aiMsgs) == 0 {
832 return fantasy.Message{}, false
833 }
834 var validParts []fantasy.MessagePart
835 for _, part := range aiMsgs[0].Content {
836 tr, ok := fantasy.AsMessagePart[fantasy.ToolResultPart](part)
837 if !ok {
838 validParts = append(validParts, part)
839 continue
840 }
841 if _, known := knownToolCallIDs[tr.ToolCallID]; known {
842 validParts = append(validParts, part)
843 } else {
844 slog.Warn("Dropping orphaned tool result with no matching tool call",
845 "tool_call_id", tr.ToolCallID,
846 )
847 }
848 }
849 if len(validParts) == 0 {
850 return fantasy.Message{}, false
851 }
852 msg := aiMsgs[0]
853 msg.Content = validParts
854 return msg, true
855}
856
857func (a *sessionAgent) getSessionMessages(ctx context.Context, session session.Session) ([]message.Message, error) {
858 msgs, err := a.messages.List(ctx, session.ID)
859 if err != nil {
860 return nil, fmt.Errorf("failed to list messages: %w", err)
861 }
862
863 if session.SummaryMessageID != "" {
864 summaryMsgIndex := -1
865 for i, msg := range msgs {
866 if msg.ID == session.SummaryMessageID {
867 summaryMsgIndex = i
868 break
869 }
870 }
871 if summaryMsgIndex != -1 {
872 msgs = msgs[summaryMsgIndex:]
873 msgs[0].Role = message.User
874 }
875 }
876 return msgs, nil
877}
878
879// generateTitle generates a session titled based on the initial prompt.
880func (a *sessionAgent) generateTitle(ctx context.Context, sessionID string, userPrompt string) {
881 if userPrompt == "" {
882 return
883 }
884
885 smallModel := a.smallModel.Get()
886 largeModel := a.largeModel.Get()
887 systemPromptPrefix := a.systemPromptPrefix.Get()
888
889 var maxOutputTokens int64 = 40
890 if smallModel.CatwalkCfg.CanReason {
891 maxOutputTokens = smallModel.CatwalkCfg.DefaultMaxTokens
892 }
893
894 newAgent := func(m fantasy.LanguageModel, p []byte, tok int64) fantasy.Agent {
895 return fantasy.NewAgent(m,
896 fantasy.WithSystemPrompt(string(p)+"\n /no_think"),
897 fantasy.WithMaxOutputTokens(tok),
898 fantasy.WithUserAgent(userAgent),
899 )
900 }
901
902 streamCall := fantasy.AgentStreamCall{
903 Prompt: fmt.Sprintf("Generate a concise title for the following content:\n\n%s\n <think>\n\n</think>", userPrompt),
904 PrepareStep: func(callCtx context.Context, opts fantasy.PrepareStepFunctionOptions) (_ context.Context, prepared fantasy.PrepareStepResult, err error) {
905 prepared.Messages = opts.Messages
906 if systemPromptPrefix != "" {
907 prepared.Messages = append([]fantasy.Message{
908 fantasy.NewSystemMessage(systemPromptPrefix),
909 }, prepared.Messages...)
910 }
911 return callCtx, prepared, nil
912 },
913 }
914
915 // Use the small model to generate the title.
916 model := smallModel
917 agent := newAgent(model.Model, titlePrompt, maxOutputTokens)
918 resp, err := agent.Stream(ctx, streamCall)
919 if err == nil {
920 // We successfully generated a title with the small model.
921 slog.Debug("Generated title with small model")
922 } else {
923 // It didn't work. Let's try with the big model.
924 slog.Error("Error generating title with small model; trying big model", "err", err)
925 model = largeModel
926 agent = newAgent(model.Model, titlePrompt, maxOutputTokens)
927 resp, err = agent.Stream(ctx, streamCall)
928 if err == nil {
929 slog.Debug("Generated title with large model")
930 } else {
931 // Welp, the large model didn't work either. Use the default
932 // session name and return.
933 slog.Error("Error generating title with large model", "err", err)
934 saveErr := a.sessions.Rename(ctx, sessionID, DefaultSessionName)
935 if saveErr != nil {
936 slog.Error("Failed to save session title", "error", saveErr)
937 }
938 return
939 }
940 }
941
942 if resp == nil {
943 // Actually, we didn't get a response so we can't. Use the default
944 // session name and return.
945 slog.Error("Response is nil; can't generate title")
946 saveErr := a.sessions.Rename(ctx, sessionID, DefaultSessionName)
947 if saveErr != nil {
948 slog.Error("Failed to save session title", "error", saveErr)
949 }
950 return
951 }
952
953 // Clean up title.
954 var title string
955 title = strings.ReplaceAll(resp.Response.Content.Text(), "\n", " ")
956
957 // Remove thinking tags if present.
958 title = thinkTagRegex.ReplaceAllString(title, "")
959 title = orphanThinkTagRegex.ReplaceAllString(title, "")
960
961 title = strings.TrimSpace(title)
962 title = cmp.Or(title, DefaultSessionName)
963
964 // Calculate usage and cost.
965 var openrouterCost *float64
966 for _, step := range resp.Steps {
967 stepCost := a.openrouterCost(step.ProviderMetadata)
968 if stepCost != nil {
969 newCost := *stepCost
970 if openrouterCost != nil {
971 newCost += *openrouterCost
972 }
973 openrouterCost = &newCost
974 }
975 }
976
977 modelConfig := model.CatwalkCfg
978 cost := modelConfig.CostPer1MInCached/1e6*float64(resp.TotalUsage.CacheCreationTokens) +
979 modelConfig.CostPer1MOutCached/1e6*float64(resp.TotalUsage.CacheReadTokens) +
980 modelConfig.CostPer1MIn/1e6*float64(resp.TotalUsage.InputTokens) +
981 modelConfig.CostPer1MOut/1e6*float64(resp.TotalUsage.OutputTokens)
982
983 // Use override cost if available (e.g., from OpenRouter).
984 if openrouterCost != nil {
985 cost = *openrouterCost
986 }
987
988 promptTokens := resp.TotalUsage.InputTokens + resp.TotalUsage.CacheCreationTokens
989 completionTokens := resp.TotalUsage.OutputTokens
990
991 // Atomically update only title and usage fields to avoid overriding other
992 // concurrent session updates.
993 saveErr := a.sessions.UpdateTitleAndUsage(ctx, sessionID, title, promptTokens, completionTokens, cost)
994 if saveErr != nil {
995 slog.Error("Failed to save session title and usage", "error", saveErr)
996 return
997 }
998}
999
1000func (a *sessionAgent) openrouterCost(metadata fantasy.ProviderMetadata) *float64 {
1001 openrouterMetadata, ok := metadata[openrouter.Name]
1002 if !ok {
1003 return nil
1004 }
1005
1006 opts, ok := openrouterMetadata.(*openrouter.ProviderMetadata)
1007 if !ok {
1008 return nil
1009 }
1010 return &opts.Usage.Cost
1011}
1012
1013func (a *sessionAgent) updateSessionUsage(model Model, session *session.Session, usage fantasy.Usage, overrideCost *float64) {
1014 modelConfig := model.CatwalkCfg
1015 cost := modelConfig.CostPer1MInCached/1e6*float64(usage.CacheCreationTokens) +
1016 modelConfig.CostPer1MOutCached/1e6*float64(usage.CacheReadTokens) +
1017 modelConfig.CostPer1MIn/1e6*float64(usage.InputTokens) +
1018 modelConfig.CostPer1MOut/1e6*float64(usage.OutputTokens)
1019
1020 a.eventTokensUsed(session.ID, model, usage, cost)
1021
1022 if overrideCost != nil {
1023 session.Cost += *overrideCost
1024 } else {
1025 session.Cost += cost
1026 }
1027
1028 session.CompletionTokens = usage.OutputTokens
1029 session.PromptTokens = usage.InputTokens + usage.CacheReadTokens
1030}
1031
1032func (a *sessionAgent) Cancel(sessionID string) {
1033 // Cancel regular requests. Don't use Take() here - we need the entry to
1034 // remain in activeRequests so IsBusy() returns true until the goroutine
1035 // fully completes (including error handling that may access the DB).
1036 // The defer in processRequest will clean up the entry.
1037 if cancel, ok := a.activeRequests.Get(sessionID); ok && cancel != nil {
1038 slog.Debug("Request cancellation initiated", "session_id", sessionID)
1039 cancel()
1040 }
1041
1042 // Also check for summarize requests.
1043 if cancel, ok := a.activeRequests.Get(sessionID + "-summarize"); ok && cancel != nil {
1044 slog.Debug("Summarize cancellation initiated", "session_id", sessionID)
1045 cancel()
1046 }
1047
1048 if a.QueuedPrompts(sessionID) > 0 {
1049 slog.Debug("Clearing queued prompts", "session_id", sessionID)
1050 a.messageQueue.Del(sessionID)
1051 }
1052}
1053
1054func (a *sessionAgent) ClearQueue(sessionID string) {
1055 if a.QueuedPrompts(sessionID) > 0 {
1056 slog.Debug("Clearing queued prompts", "session_id", sessionID)
1057 a.messageQueue.Del(sessionID)
1058 }
1059}
1060
1061func (a *sessionAgent) CancelAll() {
1062 if !a.IsBusy() {
1063 return
1064 }
1065 for key := range a.activeRequests.Seq2() {
1066 a.Cancel(key) // key is sessionID
1067 }
1068
1069 timeout := time.After(5 * time.Second)
1070 for a.IsBusy() {
1071 select {
1072 case <-timeout:
1073 return
1074 default:
1075 time.Sleep(200 * time.Millisecond)
1076 }
1077 }
1078}
1079
1080func (a *sessionAgent) IsBusy() bool {
1081 var busy bool
1082 for cancelFunc := range a.activeRequests.Seq() {
1083 if cancelFunc != nil {
1084 busy = true
1085 break
1086 }
1087 }
1088 return busy
1089}
1090
1091func (a *sessionAgent) IsSessionBusy(sessionID string) bool {
1092 _, busy := a.activeRequests.Get(sessionID)
1093 return busy
1094}
1095
1096func (a *sessionAgent) QueuedPrompts(sessionID string) int {
1097 l, ok := a.messageQueue.Get(sessionID)
1098 if !ok {
1099 return 0
1100 }
1101 return len(l)
1102}
1103
1104func (a *sessionAgent) QueuedPromptsList(sessionID string) []string {
1105 l, ok := a.messageQueue.Get(sessionID)
1106 if !ok {
1107 return nil
1108 }
1109 prompts := make([]string, len(l))
1110 for i, call := range l {
1111 prompts[i] = call.Prompt
1112 }
1113 return prompts
1114}
1115
1116func (a *sessionAgent) SetModels(large Model, small Model) {
1117 a.largeModel.Set(large)
1118 a.smallModel.Set(small)
1119}
1120
1121func (a *sessionAgent) SetTools(tools []fantasy.AgentTool) {
1122 a.tools.SetSlice(tools)
1123}
1124
1125func (a *sessionAgent) SetSystemPrompt(systemPrompt string) {
1126 a.systemPrompt.Set(systemPrompt)
1127}
1128
1129func (a *sessionAgent) Model() Model {
1130 return a.largeModel.Get()
1131}
1132
1133// convertToToolResult converts a fantasy tool result to a message tool result.
1134func (a *sessionAgent) convertToToolResult(result fantasy.ToolResultContent) message.ToolResult {
1135 baseResult := message.ToolResult{
1136 ToolCallID: result.ToolCallID,
1137 Name: result.ToolName,
1138 Metadata: result.ClientMetadata,
1139 }
1140
1141 switch result.Result.GetType() {
1142 case fantasy.ToolResultContentTypeText:
1143 if r, ok := fantasy.AsToolResultOutputType[fantasy.ToolResultOutputContentText](result.Result); ok {
1144 baseResult.Content = r.Text
1145 }
1146 case fantasy.ToolResultContentTypeError:
1147 if r, ok := fantasy.AsToolResultOutputType[fantasy.ToolResultOutputContentError](result.Result); ok {
1148 baseResult.Content = r.Error.Error()
1149 baseResult.IsError = true
1150 }
1151 case fantasy.ToolResultContentTypeMedia:
1152 if r, ok := fantasy.AsToolResultOutputType[fantasy.ToolResultOutputContentMedia](result.Result); ok {
1153 if !stringext.IsValidBase64(r.Data) {
1154 slog.Warn("Tool returned media with invalid base64 data, discarding image",
1155 "tool", result.ToolName,
1156 "tool_call_id", result.ToolCallID,
1157 )
1158 baseResult.Content = "Tool returned image data with invalid encoding"
1159 baseResult.IsError = true
1160 } else {
1161 content := r.Text
1162 if content == "" {
1163 content = fmt.Sprintf("Loaded %s content", r.MediaType)
1164 }
1165 baseResult.Content = content
1166 baseResult.Data = r.Data
1167 baseResult.MIMEType = r.MediaType
1168 }
1169 }
1170 }
1171
1172 return baseResult
1173}
1174
1175// workaroundProviderMediaLimitations converts media content in tool results to
1176// user messages for providers that don't natively support images in tool results.
1177//
1178// Problem: OpenAI, Google, OpenRouter, and other OpenAI-compatible providers
1179// don't support sending images/media in tool result messages - they only accept
1180// text in tool results. However, they DO support images in user messages.
1181//
1182// If we send media in tool results to these providers, the API returns an error.
1183//
1184// Solution: For these providers, we:
1185// 1. Replace the media in the tool result with a text placeholder
1186// 2. Inject a user message immediately after with the image as a file attachment
1187// 3. This maintains the tool execution flow while working around API limitations
1188//
1189// Anthropic and Bedrock support images natively in tool results, so we skip
1190// this workaround for them.
1191//
1192// Example transformation:
1193//
1194// BEFORE: [tool result: image data]
1195// AFTER: [tool result: "Image loaded - see attached"], [user: image attachment]
1196func (a *sessionAgent) workaroundProviderMediaLimitations(messages []fantasy.Message, largeModel Model) []fantasy.Message {
1197 providerSupportsMedia := largeModel.ModelCfg.Provider == string(catwalk.InferenceProviderAnthropic) ||
1198 largeModel.ModelCfg.Provider == string(catwalk.InferenceProviderBedrock)
1199
1200 if providerSupportsMedia {
1201 return messages
1202 }
1203
1204 convertedMessages := make([]fantasy.Message, 0, len(messages))
1205
1206 for _, msg := range messages {
1207 if msg.Role != fantasy.MessageRoleTool {
1208 convertedMessages = append(convertedMessages, msg)
1209 continue
1210 }
1211
1212 textParts := make([]fantasy.MessagePart, 0, len(msg.Content))
1213 var mediaFiles []fantasy.FilePart
1214
1215 for _, part := range msg.Content {
1216 toolResult, ok := fantasy.AsMessagePart[fantasy.ToolResultPart](part)
1217 if !ok {
1218 textParts = append(textParts, part)
1219 continue
1220 }
1221
1222 if media, ok := fantasy.AsToolResultOutputType[fantasy.ToolResultOutputContentMedia](toolResult.Output); ok {
1223 decoded, err := base64.StdEncoding.DecodeString(media.Data)
1224 if err != nil {
1225 slog.Warn("Failed to decode media data", "error", err)
1226 textParts = append(textParts, part)
1227 continue
1228 }
1229
1230 mediaFiles = append(mediaFiles, fantasy.FilePart{
1231 Data: decoded,
1232 MediaType: media.MediaType,
1233 Filename: fmt.Sprintf("tool-result-%s", toolResult.ToolCallID),
1234 })
1235
1236 textParts = append(textParts, fantasy.ToolResultPart{
1237 ToolCallID: toolResult.ToolCallID,
1238 Output: fantasy.ToolResultOutputContentText{
1239 Text: "[Image/media content loaded - see attached file]",
1240 },
1241 ProviderOptions: toolResult.ProviderOptions,
1242 })
1243 } else {
1244 textParts = append(textParts, part)
1245 }
1246 }
1247
1248 convertedMessages = append(convertedMessages, fantasy.Message{
1249 Role: fantasy.MessageRoleTool,
1250 Content: textParts,
1251 })
1252
1253 if len(mediaFiles) > 0 {
1254 convertedMessages = append(convertedMessages, fantasy.NewUserMessage(
1255 "Here is the media content from the tool result:",
1256 mediaFiles...,
1257 ))
1258 }
1259 }
1260
1261 return convertedMessages
1262}
1263
1264// buildSummaryPrompt constructs the prompt text for session summarization.
1265func buildSummaryPrompt(todos []session.Todo) string {
1266 var sb strings.Builder
1267 sb.WriteString("Provide a detailed summary of our conversation above.")
1268 if len(todos) > 0 {
1269 sb.WriteString("\n\n## Current Todo List\n\n")
1270 for _, t := range todos {
1271 fmt.Fprintf(&sb, "- [%s] %s\n", t.Status, t.Content)
1272 }
1273 sb.WriteString("\nInclude these tasks and their statuses in your summary. ")
1274 sb.WriteString("Instruct the resuming assistant to use the `todos` tool to continue tracking progress on these tasks.")
1275 }
1276 return sb.String()
1277}