agent.go

  1package agent
  2
  3import (
  4	"context"
  5	_ "embed"
  6	"encoding/json"
  7	"errors"
  8	"fmt"
  9	"log/slog"
 10	"strings"
 11	"sync"
 12	"time"
 13
 14	"github.com/charmbracelet/catwalk/pkg/catwalk"
 15	"github.com/charmbracelet/crush/internal/agent/tools"
 16	"github.com/charmbracelet/crush/internal/config"
 17	"github.com/charmbracelet/crush/internal/csync"
 18	"github.com/charmbracelet/crush/internal/message"
 19	"github.com/charmbracelet/crush/internal/permission"
 20	"github.com/charmbracelet/crush/internal/session"
 21	"github.com/charmbracelet/fantasy/ai"
 22	"github.com/charmbracelet/fantasy/anthropic"
 23)
 24
 25//go:embed templates/title.md
 26var titlePrompt []byte
 27
 28//go:embed templates/summary.md
 29var summaryPrompt []byte
 30
 31type SessionAgentCall struct {
 32	SessionID        string
 33	Prompt           string
 34	ProviderOptions  ai.ProviderOptions
 35	Attachments      []message.Attachment
 36	MaxOutputTokens  int64
 37	Temperature      *float64
 38	TopP             *float64
 39	TopK             *int64
 40	FrequencyPenalty *float64
 41	PresencePenalty  *float64
 42}
 43
 44type SessionAgent interface {
 45	Run(context.Context, SessionAgentCall) (*ai.AgentResult, error)
 46	SetModels(large Model, small Model)
 47	SetTools(tools []ai.AgentTool)
 48	Cancel(sessionID string)
 49	CancelAll()
 50	IsSessionBusy(sessionID string) bool
 51	IsBusy() bool
 52	QueuedPrompts(sessionID string) int
 53	ClearQueue(sessionID string)
 54	Summarize(context.Context, string) error
 55	Model() Model
 56}
 57
 58type Model struct {
 59	Model      ai.LanguageModel
 60	CatwalkCfg catwalk.Model
 61	ModelCfg   config.SelectedModel
 62}
 63
 64type sessionAgent struct {
 65	largeModel           Model
 66	smallModel           Model
 67	systemPrompt         string
 68	tools                []ai.AgentTool
 69	sessions             session.Service
 70	messages             message.Service
 71	disableAutoSummarize bool
 72
 73	messageQueue   *csync.Map[string, []SessionAgentCall]
 74	activeRequests *csync.Map[string, context.CancelFunc]
 75}
 76
 77type SessionAgentOptions struct {
 78	LargeModel           Model
 79	SmallModel           Model
 80	SystemPrompt         string
 81	DisableAutoSummarize bool
 82	Sessions             session.Service
 83	Messages             message.Service
 84	Tools                []ai.AgentTool
 85}
 86
 87func NewSessionAgent(
 88	opts SessionAgentOptions,
 89) SessionAgent {
 90	return &sessionAgent{
 91		largeModel:           opts.LargeModel,
 92		smallModel:           opts.SmallModel,
 93		systemPrompt:         opts.SystemPrompt,
 94		sessions:             opts.Sessions,
 95		messages:             opts.Messages,
 96		disableAutoSummarize: opts.DisableAutoSummarize,
 97		tools:                opts.Tools,
 98		messageQueue:         csync.NewMap[string, []SessionAgentCall](),
 99		activeRequests:       csync.NewMap[string, context.CancelFunc](),
100	}
101}
102
103func (a *sessionAgent) Run(ctx context.Context, call SessionAgentCall) (*ai.AgentResult, error) {
104	if call.Prompt == "" {
105		return nil, ErrEmptyPrompt
106	}
107	if call.SessionID == "" {
108		return nil, ErrSessionMissing
109	}
110
111	// Queue the message if busy
112	if a.IsSessionBusy(call.SessionID) {
113		existing, ok := a.messageQueue.Get(call.SessionID)
114		if !ok {
115			existing = []SessionAgentCall{}
116		}
117		existing = append(existing, call)
118		a.messageQueue.Set(call.SessionID, existing)
119		return nil, nil
120	}
121
122	if len(a.tools) > 0 {
123		// add anthropic caching to the last tool
124		a.tools[len(a.tools)-1].SetProviderOptions(a.getCacheControlOptions())
125	}
126
127	agent := ai.NewAgent(
128		a.largeModel.Model,
129		ai.WithSystemPrompt(a.systemPrompt),
130		ai.WithTools(a.tools...),
131	)
132
133	sessionLock := sync.Mutex{}
134	currentSession, err := a.sessions.Get(ctx, call.SessionID)
135	if err != nil {
136		return nil, fmt.Errorf("failed to get session: %w", err)
137	}
138
139	msgs, err := a.getSessionMessages(ctx, currentSession)
140	if err != nil {
141		return nil, fmt.Errorf("failed to get session messages: %w", err)
142	}
143
144	var wg sync.WaitGroup
145	// Generate title if first message
146	if len(msgs) == 0 {
147		wg.Go(func() {
148			sessionLock.Lock()
149			a.generateTitle(ctx, &currentSession, call.Prompt)
150			sessionLock.Unlock()
151		})
152	}
153
154	// Add the user message to the session
155	_, err = a.createUserMessage(ctx, call)
156	if err != nil {
157		return nil, err
158	}
159
160	// add the session to the context
161	ctx = context.WithValue(ctx, tools.SessionIDContextKey, call.SessionID)
162
163	genCtx, cancel := context.WithCancel(ctx)
164	a.activeRequests.Set(call.SessionID, cancel)
165
166	defer cancel()
167	defer a.activeRequests.Del(call.SessionID)
168
169	history, files := a.preparePrompt(msgs, call.Attachments...)
170
171	var currentAssistant *message.Message
172	var shouldSummarize bool
173	result, err := agent.Stream(genCtx, ai.AgentStreamCall{
174		Prompt:           call.Prompt,
175		Files:            files,
176		Messages:         history,
177		ProviderOptions:  call.ProviderOptions,
178		MaxOutputTokens:  &call.MaxOutputTokens,
179		TopP:             call.TopP,
180		Temperature:      call.Temperature,
181		PresencePenalty:  call.PresencePenalty,
182		TopK:             call.TopK,
183		FrequencyPenalty: call.FrequencyPenalty,
184		// Before each step create the new assistant message
185		PrepareStep: func(callContext context.Context, options ai.PrepareStepFunctionOptions) (_ context.Context, prepared ai.PrepareStepResult, err error) {
186			var assistantMsg message.Message
187			assistantMsg, err = a.messages.Create(callContext, call.SessionID, message.CreateMessageParams{
188				Role:     message.Assistant,
189				Parts:    []message.ContentPart{},
190				Model:    a.largeModel.ModelCfg.Model,
191				Provider: a.largeModel.ModelCfg.Provider,
192			})
193			if err != nil {
194				return callContext, prepared, err
195			}
196
197			callContext = context.WithValue(callContext, tools.MessageIDContextKey, assistantMsg.ID)
198
199			currentAssistant = &assistantMsg
200
201			prepared.Messages = options.Messages
202			// reset all cached items
203			for i := range prepared.Messages {
204				prepared.Messages[i].ProviderOptions = nil
205			}
206
207			queuedCalls, _ := a.messageQueue.Get(call.SessionID)
208			a.messageQueue.Del(call.SessionID)
209			for _, queued := range queuedCalls {
210				userMessage, createErr := a.createUserMessage(callContext, queued)
211				if createErr != nil {
212					return callContext, prepared, createErr
213				}
214				prepared.Messages = append(prepared.Messages, userMessage.ToAIMessage()...)
215			}
216
217			lastSystemRoleInx := 0
218			systemMessageUpdated := false
219			for i, msg := range prepared.Messages {
220				// only add cache control to the last message
221				if msg.Role == ai.MessageRoleSystem {
222					lastSystemRoleInx = i
223				} else if !systemMessageUpdated {
224					prepared.Messages[lastSystemRoleInx].ProviderOptions = a.getCacheControlOptions()
225					systemMessageUpdated = true
226				}
227				// than add cache control to the last 2 messages
228				if i > len(prepared.Messages)-3 {
229					prepared.Messages[i].ProviderOptions = a.getCacheControlOptions()
230				}
231			}
232			return callContext, prepared, err
233		},
234		OnReasoningDelta: func(id string, text string) error {
235			currentAssistant.AppendReasoningContent(text)
236			return a.messages.Update(genCtx, *currentAssistant)
237		},
238		OnReasoningEnd: func(id string, reasoning ai.ReasoningContent) error {
239			// handle anthropic signature
240			if anthropicData, ok := reasoning.ProviderMetadata[anthropic.Name]; ok {
241				if reasoning, ok := anthropicData.(*anthropic.ReasoningOptionMetadata); ok {
242					currentAssistant.AppendReasoningSignature(reasoning.Signature)
243				}
244			}
245			currentAssistant.FinishThinking()
246			return a.messages.Update(genCtx, *currentAssistant)
247		},
248		OnTextDelta: func(id string, text string) error {
249			currentAssistant.AppendContent(text)
250			return a.messages.Update(genCtx, *currentAssistant)
251		},
252		OnToolInputStart: func(id string, toolName string) error {
253			toolCall := message.ToolCall{
254				ID:               id,
255				Name:             toolName,
256				ProviderExecuted: false,
257				Finished:         false,
258			}
259			currentAssistant.AddToolCall(toolCall)
260			return a.messages.Update(genCtx, *currentAssistant)
261		},
262		OnRetry: func(err *ai.APICallError, delay time.Duration) {
263			// TODO: implement
264		},
265		OnToolCall: func(tc ai.ToolCallContent) error {
266			toolCall := message.ToolCall{
267				ID:               tc.ToolCallID,
268				Name:             tc.ToolName,
269				Input:            tc.Input,
270				ProviderExecuted: false,
271				Finished:         true,
272			}
273			currentAssistant.AddToolCall(toolCall)
274			return a.messages.Update(genCtx, *currentAssistant)
275		},
276		OnToolResult: func(result ai.ToolResultContent) error {
277			var resultContent string
278			isError := false
279			switch result.Result.GetType() {
280			case ai.ToolResultContentTypeText:
281				r, ok := ai.AsToolResultOutputType[ai.ToolResultOutputContentText](result.Result)
282				if ok {
283					resultContent = r.Text
284				}
285			case ai.ToolResultContentTypeError:
286				r, ok := ai.AsToolResultOutputType[ai.ToolResultOutputContentError](result.Result)
287				if ok {
288					isError = true
289					resultContent = r.Error.Error()
290				}
291			case ai.ToolResultContentTypeMedia:
292				// TODO: handle this message type
293			}
294			toolResult := message.ToolResult{
295				ToolCallID: result.ToolCallID,
296				Name:       result.ToolName,
297				Content:    resultContent,
298				IsError:    isError,
299				Metadata:   result.ClientMetadata,
300			}
301			_, createMsgErr := a.messages.Create(genCtx, currentAssistant.SessionID, message.CreateMessageParams{
302				Role: message.Tool,
303				Parts: []message.ContentPart{
304					toolResult,
305				},
306			})
307			if createMsgErr != nil {
308				return createMsgErr
309			}
310			return nil
311		},
312		OnStepFinish: func(stepResult ai.StepResult) error {
313			finishReason := message.FinishReasonUnknown
314			switch stepResult.FinishReason {
315			case ai.FinishReasonLength:
316				finishReason = message.FinishReasonMaxTokens
317			case ai.FinishReasonStop:
318				finishReason = message.FinishReasonEndTurn
319			case ai.FinishReasonToolCalls:
320				finishReason = message.FinishReasonToolUse
321			}
322			currentAssistant.AddFinish(finishReason, "", "")
323			a.updateSessionUsage(a.largeModel, &currentSession, stepResult.Usage)
324			sessionLock.Lock()
325			_, sessionErr := a.sessions.Save(genCtx, currentSession)
326			sessionLock.Unlock()
327			if sessionErr != nil {
328				return sessionErr
329			}
330			return a.messages.Update(genCtx, *currentAssistant)
331		},
332		StopWhen: []ai.StopCondition{
333			func(_ []ai.StepResult) bool {
334				contextWindow := a.largeModel.CatwalkCfg.ContextWindow
335				tokens := currentSession.CompletionTokens + currentSession.PromptTokens
336				percentage := (float64(tokens) / float64(contextWindow)) * 100
337				if (percentage > 80) && !a.disableAutoSummarize {
338					shouldSummarize = true
339					return true
340				}
341				return false
342			},
343		},
344	})
345	if err != nil {
346		isCancelErr := errors.Is(err, context.Canceled)
347		isPermissionErr := errors.Is(err, permission.ErrorPermissionDenied)
348		if currentAssistant == nil {
349			return result, err
350		}
351		toolCalls := currentAssistant.ToolCalls()
352		// INFO: we use the parent context here because the genCtx has been cancelled
353		msgs, createErr := a.messages.List(ctx, currentAssistant.SessionID)
354		if createErr != nil {
355			return nil, createErr
356		}
357		for _, tc := range toolCalls {
358			if !tc.Finished {
359				tc.Finished = true
360				tc.Input = "{}"
361				currentAssistant.AddToolCall(tc)
362			}
363
364			found := false
365			for _, msg := range msgs {
366				if msg.Role == message.Tool {
367					for _, tr := range msg.ToolResults() {
368						if tr.ToolCallID == tc.ID {
369							found = true
370							break
371						}
372					}
373				}
374				if found {
375					break
376				}
377			}
378			if found {
379				continue
380			}
381			content := "There was an error while executing the tool"
382			if isCancelErr {
383				content = "Tool execution canceled by user"
384			} else if isPermissionErr {
385				content = "Permission denied"
386			}
387			toolResult := message.ToolResult{
388				ToolCallID: tc.ID,
389				Name:       tc.Name,
390				Content:    content,
391				IsError:    true,
392			}
393			_, createErr = a.messages.Create(context.Background(), currentAssistant.SessionID, message.CreateMessageParams{
394				Role: message.Tool,
395				Parts: []message.ContentPart{
396					toolResult,
397				},
398			})
399			if createErr != nil {
400				return nil, createErr
401			}
402		}
403		if isCancelErr {
404			currentAssistant.AddFinish(message.FinishReasonCanceled, "Request cancelled", "")
405		} else if isPermissionErr {
406			currentAssistant.AddFinish(message.FinishReasonPermissionDenied, "Permission denied", "")
407		} else {
408			currentAssistant.AddFinish(message.FinishReasonError, "API Error", err.Error())
409		}
410		// INFO: we use the parent context here because the genCtx has been cancelled
411		updateErr := a.messages.Update(ctx, *currentAssistant)
412		if updateErr != nil {
413			return nil, updateErr
414		}
415		return nil, err
416	}
417	wg.Wait()
418
419	if shouldSummarize {
420		a.activeRequests.Del(call.SessionID)
421		if summarizeErr := a.Summarize(genCtx, call.SessionID); summarizeErr != nil {
422			return nil, summarizeErr
423		}
424	}
425
426	queuedMessages, ok := a.messageQueue.Get(call.SessionID)
427	if !ok || len(queuedMessages) == 0 {
428		return result, err
429	}
430	// there are queued messages restart the loop
431	firstQueuedMessage := queuedMessages[0]
432	a.messageQueue.Set(call.SessionID, queuedMessages[1:])
433	return a.Run(genCtx, firstQueuedMessage)
434}
435
436func (a *sessionAgent) Summarize(ctx context.Context, sessionID string) error {
437	if a.IsSessionBusy(sessionID) {
438		return ErrSessionBusy
439	}
440
441	currentSession, err := a.sessions.Get(ctx, sessionID)
442	if err != nil {
443		return fmt.Errorf("failed to get session: %w", err)
444	}
445	msgs, err := a.getSessionMessages(ctx, currentSession)
446	if err != nil {
447		return err
448	}
449	if len(msgs) == 0 {
450		// nothing to summarize
451		return nil
452	}
453
454	aiMsgs, _ := a.preparePrompt(msgs)
455
456	genCtx, cancel := context.WithCancel(ctx)
457	a.activeRequests.Set(sessionID, cancel)
458	defer a.activeRequests.Del(sessionID)
459	defer cancel()
460
461	agent := ai.NewAgent(a.largeModel.Model,
462		ai.WithSystemPrompt(string(summaryPrompt)),
463	)
464	summaryMessage, err := a.messages.Create(ctx, sessionID, message.CreateMessageParams{
465		Role:             message.Assistant,
466		Model:            a.largeModel.Model.Model(),
467		Provider:         a.largeModel.Model.Provider(),
468		IsSummaryMessage: true,
469	})
470	if err != nil {
471		return err
472	}
473
474	resp, err := agent.Stream(genCtx, ai.AgentStreamCall{
475		Prompt:   "Provide a detailed summary of our conversation above.",
476		Messages: aiMsgs,
477		OnReasoningDelta: func(id string, text string) error {
478			summaryMessage.AppendReasoningContent(text)
479			return a.messages.Update(genCtx, summaryMessage)
480		},
481		OnReasoningEnd: func(id string, reasoning ai.ReasoningContent) error {
482			// handle anthropic signature
483			if anthropicData, ok := reasoning.ProviderMetadata["anthropic"]; ok {
484				if signature, ok := anthropicData.(*anthropic.ReasoningOptionMetadata); ok && signature.Signature != "" {
485					summaryMessage.AppendReasoningSignature(signature.Signature)
486				}
487			}
488			summaryMessage.FinishThinking()
489			return a.messages.Update(genCtx, summaryMessage)
490		},
491		OnTextDelta: func(id, text string) error {
492			summaryMessage.AppendContent(text)
493			return a.messages.Update(genCtx, summaryMessage)
494		},
495	})
496	if err != nil {
497		isCancelErr := errors.Is(err, context.Canceled)
498		if isCancelErr {
499			// User cancelled summarize we need to remove the summary message
500			deleteErr := a.messages.Delete(ctx, summaryMessage.ID)
501			return deleteErr
502		}
503		return err
504	}
505
506	summaryMessage.AddFinish(message.FinishReasonEndTurn, "", "")
507	err = a.messages.Update(genCtx, summaryMessage)
508	if err != nil {
509		return err
510	}
511
512	a.updateSessionUsage(a.largeModel, &currentSession, resp.TotalUsage)
513
514	// just in case get just the last usage
515	usage := resp.Response.Usage
516	currentSession.SummaryMessageID = summaryMessage.ID
517	currentSession.CompletionTokens = usage.OutputTokens
518	currentSession.PromptTokens = 0
519	_, err = a.sessions.Save(genCtx, currentSession)
520	return err
521}
522
523func (a *sessionAgent) getCacheControlOptions() ai.ProviderOptions {
524	return ai.ProviderOptions{
525		anthropic.Name: &anthropic.ProviderCacheControlOptions{
526			CacheControl: anthropic.CacheControl{Type: "ephemeral"},
527		},
528	}
529}
530
531func (a *sessionAgent) createUserMessage(ctx context.Context, call SessionAgentCall) (message.Message, error) {
532	var attachmentParts []message.ContentPart
533	for _, attachment := range call.Attachments {
534		attachmentParts = append(attachmentParts, message.BinaryContent{Path: attachment.FilePath, MIMEType: attachment.MimeType, Data: attachment.Content})
535	}
536	parts := []message.ContentPart{message.TextContent{Text: call.Prompt}}
537	parts = append(parts, attachmentParts...)
538	msg, err := a.messages.Create(ctx, call.SessionID, message.CreateMessageParams{
539		Role:  message.User,
540		Parts: parts,
541	})
542	if err != nil {
543		return message.Message{}, fmt.Errorf("failed to create user message: %w", err)
544	}
545	return msg, nil
546}
547
548func (a *sessionAgent) preparePrompt(msgs []message.Message, attachments ...message.Attachment) ([]ai.Message, []ai.FilePart) {
549	var history []ai.Message
550	for _, m := range msgs {
551		if len(m.Parts) == 0 {
552			continue
553		}
554		// Assistant message without content or tool calls (cancelled before it returned anything)
555		if m.Role == message.Assistant && len(m.ToolCalls()) == 0 && m.Content().Text == "" && m.ReasoningContent().String() == "" {
556			continue
557		}
558		history = append(history, m.ToAIMessage()...)
559	}
560
561	var files []ai.FilePart
562	for _, attachment := range attachments {
563		files = append(files, ai.FilePart{
564			Filename:  attachment.FileName,
565			Data:      attachment.Content,
566			MediaType: attachment.MimeType,
567		})
568	}
569
570	return history, files
571}
572
573func (a *sessionAgent) getSessionMessages(ctx context.Context, session session.Session) ([]message.Message, error) {
574	msgs, err := a.messages.List(ctx, session.ID)
575	if err != nil {
576		return nil, fmt.Errorf("failed to list messages: %w", err)
577	}
578
579	if session.SummaryMessageID != "" {
580		summaryMsgInex := -1
581		for i, msg := range msgs {
582			if msg.ID == session.SummaryMessageID {
583				summaryMsgInex = i
584				break
585			}
586		}
587		if summaryMsgInex != -1 {
588			msgs = msgs[summaryMsgInex:]
589			msgs[0].Role = message.User
590		}
591	}
592	return msgs, nil
593}
594
595func (a *sessionAgent) generateTitle(ctx context.Context, session *session.Session, prompt string) {
596	if prompt == "" {
597		return
598	}
599
600	var maxOutput int64 = 40
601	if a.smallModel.CatwalkCfg.CanReason {
602		maxOutput = a.smallModel.CatwalkCfg.DefaultMaxTokens
603	}
604
605	agent := ai.NewAgent(a.smallModel.Model,
606		ai.WithSystemPrompt(string(titlePrompt)+"\n /no_think"),
607		ai.WithMaxOutputTokens(maxOutput),
608	)
609
610	resp, err := agent.Stream(ctx, ai.AgentStreamCall{
611		Prompt: fmt.Sprintf("Generate a concise title for the following content:\n\n%s\n <think>\n\n</think>", prompt),
612	})
613	if err != nil {
614		slog.Error("error generating title", "err", err)
615		return
616	}
617
618	data, _ := json.Marshal(resp)
619
620	slog.Info("Title Response")
621	slog.Info(string(data))
622	title := resp.Response.Content.Text()
623
624	title = strings.ReplaceAll(title, "\n", " ")
625	slog.Info(title)
626
627	// remove thinking tags if present
628	if idx := strings.Index(title, "</think>"); idx > 0 {
629		title = title[idx+len("</think>"):]
630	}
631
632	title = strings.TrimSpace(title)
633	if title == "" {
634		slog.Warn("failed to generate title", "warn", "empty title")
635		return
636	}
637
638	session.Title = title
639	a.updateSessionUsage(a.smallModel, session, resp.TotalUsage)
640	_, saveErr := a.sessions.Save(ctx, *session)
641	if saveErr != nil {
642		slog.Error("failed to save session title & usage", "error", saveErr)
643		return
644	}
645}
646
647func (a *sessionAgent) updateSessionUsage(model Model, session *session.Session, usage ai.Usage) {
648	modelConfig := model.CatwalkCfg
649	cost := modelConfig.CostPer1MInCached/1e6*float64(usage.CacheCreationTokens) +
650		modelConfig.CostPer1MOutCached/1e6*float64(usage.CacheReadTokens) +
651		modelConfig.CostPer1MIn/1e6*float64(usage.InputTokens) +
652		modelConfig.CostPer1MOut/1e6*float64(usage.OutputTokens)
653	session.Cost += cost
654	session.CompletionTokens = usage.OutputTokens + usage.CacheReadTokens
655	session.PromptTokens = usage.InputTokens + usage.CacheCreationTokens
656}
657
658func (a *sessionAgent) Cancel(sessionID string) {
659	// Cancel regular requests
660	if cancel, ok := a.activeRequests.Take(sessionID); ok && cancel != nil {
661		slog.Info("Request cancellation initiated", "session_id", sessionID)
662		cancel()
663	}
664
665	// Also check for summarize requests
666	if cancel, ok := a.activeRequests.Take(sessionID + "-summarize"); ok && cancel != nil {
667		slog.Info("Summarize cancellation initiated", "session_id", sessionID)
668		cancel()
669	}
670
671	if a.QueuedPrompts(sessionID) > 0 {
672		slog.Info("Clearing queued prompts", "session_id", sessionID)
673		a.messageQueue.Del(sessionID)
674	}
675}
676
677func (a *sessionAgent) ClearQueue(sessionID string) {
678	if a.QueuedPrompts(sessionID) > 0 {
679		slog.Info("Clearing queued prompts", "session_id", sessionID)
680		a.messageQueue.Del(sessionID)
681	}
682}
683
684func (a *sessionAgent) CancelAll() {
685	if !a.IsBusy() {
686		return
687	}
688	for key := range a.activeRequests.Seq2() {
689		a.Cancel(key) // key is sessionID
690	}
691
692	timeout := time.After(5 * time.Second)
693	for a.IsBusy() {
694		select {
695		case <-timeout:
696			return
697		default:
698			time.Sleep(200 * time.Millisecond)
699		}
700	}
701}
702
703func (a *sessionAgent) IsBusy() bool {
704	var busy bool
705	for cancelFunc := range a.activeRequests.Seq() {
706		if cancelFunc != nil {
707			busy = true
708			break
709		}
710	}
711	return busy
712}
713
714func (a *sessionAgent) IsSessionBusy(sessionID string) bool {
715	_, busy := a.activeRequests.Get(sessionID)
716	return busy
717}
718
719func (a *sessionAgent) QueuedPrompts(sessionID string) int {
720	l, ok := a.messageQueue.Get(sessionID)
721	if !ok {
722		return 0
723	}
724	return len(l)
725}
726
727func (a *sessionAgent) SetModels(large Model, small Model) {
728	a.largeModel = large
729	a.smallModel = small
730}
731
732func (a *sessionAgent) SetTools(tools []ai.AgentTool) {
733	a.tools = tools
734}
735
736func (a *sessionAgent) Model() Model {
737	return a.largeModel
738}