1package server
2
3import (
4 "context"
5 "encoding/json"
6 "fmt"
7 "net/http"
8 "strings"
9 "time"
10
11 "shelley.exe.dev/claudetool"
12 "shelley.exe.dev/db/generated"
13 "shelley.exe.dev/llm"
14)
15
16// SubagentRunner implements claudetool.SubagentRunner.
17type SubagentRunner struct {
18 server *Server
19}
20
21// NewSubagentRunner creates a new SubagentRunner.
22func NewSubagentRunner(s *Server) *SubagentRunner {
23 return &SubagentRunner{server: s}
24}
25
26// RunSubagent implements claudetool.SubagentRunner.
27func (r *SubagentRunner) RunSubagent(ctx context.Context, conversationID, prompt string, wait bool, timeout time.Duration) (string, error) {
28 s := r.server
29
30 // Notify the UI about the subagent conversation.
31 // This ensures the sidebar shows the subagent even if it's a newly created conversation.
32 go r.notifySubagentConversation(ctx, conversationID)
33
34 // Get or create conversation manager for the subagent
35 manager, err := s.getOrCreateConversationManager(ctx, conversationID)
36 if err != nil {
37 return "", fmt.Errorf("failed to get conversation manager: %w", err)
38 }
39
40 // Get the model ID from the server's default
41 // In predictable-only mode, use "predictable" as the model
42 modelID := s.defaultModel
43 if modelID == "" && s.predictableOnly {
44 modelID = "predictable"
45 }
46
47 // Get LLM service
48 llmService, err := s.llmManager.GetService(modelID)
49 if err != nil {
50 return "", fmt.Errorf("failed to get LLM service: %w", err)
51 }
52
53 // If the subagent is currently working, stop it first before sending new message
54 if manager.IsAgentWorking() {
55 s.logger.Info("Subagent is working, stopping before sending new message", "conversationID", conversationID)
56 if err := manager.CancelConversation(ctx); err != nil {
57 s.logger.Error("Failed to cancel subagent conversation", "error", err)
58 // Continue anyway - we still want to send the new message
59 }
60 // Re-hydrate the manager after cancellation
61 if err := manager.Hydrate(ctx); err != nil {
62 return "", fmt.Errorf("failed to hydrate after cancellation: %w", err)
63 }
64 }
65
66 // Create user message
67 userMessage := llm.Message{
68 Role: llm.MessageRoleUser,
69 Content: []llm.Content{{Type: llm.ContentTypeText, Text: prompt}},
70 }
71
72 // Accept the user message (this starts processing)
73 _, err = manager.AcceptUserMessage(ctx, llmService, modelID, userMessage)
74 if err != nil {
75 return "", fmt.Errorf("failed to accept user message: %w", err)
76 }
77
78 if !wait {
79 return fmt.Sprintf("Subagent started processing. Conversation ID: %s", conversationID), nil
80 }
81
82 // Wait for the agent to finish (or timeout)
83 return r.waitForResponse(ctx, conversationID, modelID, llmService, timeout)
84}
85
86func (r *SubagentRunner) waitForResponse(ctx context.Context, conversationID, modelID string, llmService llm.Service, timeout time.Duration) (string, error) {
87 s := r.server
88
89 deadline := time.Now().Add(timeout)
90 pollInterval := 500 * time.Millisecond
91
92 for {
93 select {
94 case <-ctx.Done():
95 return "", ctx.Err()
96 default:
97 }
98
99 if time.Now().After(deadline) {
100 // Timeout reached - generate a progress summary
101 return r.generateProgressSummary(ctx, conversationID, modelID, llmService)
102 }
103
104 // Check if agent is still working
105 working, err := r.isAgentWorking(ctx, conversationID)
106 if err != nil {
107 return "", fmt.Errorf("failed to check agent status: %w", err)
108 }
109
110 if !working {
111 // Agent is done, get the last message
112 return r.getLastAssistantResponse(ctx, conversationID)
113 }
114
115 // Wait before polling again
116 select {
117 case <-ctx.Done():
118 return "", ctx.Err()
119 case <-time.After(pollInterval):
120 }
121
122 // Don't hog the conversation manager mutex
123 s.mu.Lock()
124 if mgr, ok := s.activeConversations[conversationID]; ok {
125 mgr.Touch()
126 }
127 s.mu.Unlock()
128 }
129}
130
131func (r *SubagentRunner) isAgentWorking(ctx context.Context, conversationID string) (bool, error) {
132 s := r.server
133
134 // Get the conversation manager - it tracks the working state
135 s.mu.Lock()
136 mgr, ok := s.activeConversations[conversationID]
137 s.mu.Unlock()
138
139 if !ok {
140 // No active manager means the agent is not working
141 return false, nil
142 }
143
144 return mgr.IsAgentWorking(), nil
145}
146
147func (r *SubagentRunner) getLastAssistantResponse(ctx context.Context, conversationID string) (string, error) {
148 s := r.server
149
150 // Get the latest message
151 msg, err := s.db.GetLatestMessage(ctx, conversationID)
152 if err != nil {
153 return "", fmt.Errorf("failed to get latest message: %w", err)
154 }
155
156 // Extract text content
157 if msg.LlmData == nil {
158 return "", nil
159 }
160
161 var llmMsg llm.Message
162 if err := json.Unmarshal([]byte(*msg.LlmData), &llmMsg); err != nil {
163 return "", fmt.Errorf("failed to parse message: %w", err)
164 }
165
166 var texts []string
167 for _, content := range llmMsg.Content {
168 if content.Type == llm.ContentTypeText && content.Text != "" {
169 texts = append(texts, content.Text)
170 }
171 }
172
173 return strings.Join(texts, "\n"), nil
174}
175
176// generateProgressSummary makes a non-conversation LLM call to summarize the subagent's progress.
177// This is called when the timeout is reached and the subagent is still working.
178func (r *SubagentRunner) generateProgressSummary(ctx context.Context, conversationID, modelID string, llmService llm.Service) (string, error) {
179 s := r.server
180
181 // Get the conversation messages
182 var messages []generated.Message
183 err := s.db.Queries(ctx, func(q *generated.Queries) error {
184 var err error
185 messages, err = q.ListMessages(ctx, conversationID)
186 return err
187 })
188 if err != nil {
189 s.logger.Error("Failed to get messages for progress summary", "error", err)
190 return "[Subagent is still working (timeout reached). Failed to generate progress summary.]", nil
191 }
192
193 if len(messages) == 0 {
194 return "[Subagent is still working (timeout reached). No messages yet.]", nil
195 }
196
197 // Build a summary of the conversation for the LLM
198 conversationSummary := r.buildConversationSummary(messages)
199
200 // Make a non-conversation LLM call to summarize progress
201 summaryPrompt := `You are summarizing the current progress of a subagent task for a parent agent.
202
203The subagent was given a task and has been working on it, but the timeout was reached before it completed.
204Below is the conversation history showing what the subagent has done so far.
205
206Please provide a brief, actionable summary (2-4 sentences) that tells the parent agent:
2071. What the subagent has accomplished so far
2082. What it appears to be currently working on
2093. Whether it seems to be making progress or stuck
210
211Conversation history:
212` + conversationSummary + `
213
214Provide your summary now:`
215
216 req := &llm.Request{
217 Messages: []llm.Message{
218 {
219 Role: llm.MessageRoleUser,
220 Content: []llm.Content{{Type: llm.ContentTypeText, Text: summaryPrompt}},
221 },
222 },
223 }
224
225 // Use a short timeout for the summary call
226 summaryCtx, cancel := context.WithTimeout(ctx, 30*time.Second)
227 defer cancel()
228
229 resp, err := llmService.Do(summaryCtx, req)
230 if err != nil {
231 s.logger.Error("Failed to generate progress summary via LLM", "error", err)
232 return "[Subagent is still working (timeout reached). Failed to generate progress summary.]", nil
233 }
234
235 // Extract the summary text
236 var summaryText string
237 for _, content := range resp.Content {
238 if content.Type == llm.ContentTypeText && content.Text != "" {
239 summaryText = content.Text
240 break
241 }
242 }
243
244 if summaryText == "" {
245 return "[Subagent is still working (timeout reached). No summary available.]", nil
246 }
247
248 return fmt.Sprintf("[Subagent is still working (timeout reached). Progress summary:]\n%s", summaryText), nil
249}
250
251// buildConversationSummary creates a text summary of the conversation messages for the LLM.
252func (r *SubagentRunner) buildConversationSummary(messages []generated.Message) string {
253 var sb strings.Builder
254
255 for _, msg := range messages {
256 // Skip system messages
257 if msg.Type == "system" {
258 continue
259 }
260
261 if msg.LlmData == nil {
262 continue
263 }
264
265 var llmMsg llm.Message
266 if err := json.Unmarshal([]byte(*msg.LlmData), &llmMsg); err != nil {
267 continue
268 }
269
270 roleStr := "User"
271 if llmMsg.Role == llm.MessageRoleAssistant {
272 roleStr = "Assistant"
273 }
274
275 for _, content := range llmMsg.Content {
276 switch content.Type {
277 case llm.ContentTypeText:
278 if content.Text != "" {
279 // Truncate very long text
280 text := content.Text
281 if len(text) > 500 {
282 text = text[:500] + "...[truncated]"
283 }
284 sb.WriteString(fmt.Sprintf("[%s]: %s\n\n", roleStr, text))
285 }
286 case llm.ContentTypeToolUse:
287 // Truncate tool input if long
288 inputStr := string(content.ToolInput)
289 if len(inputStr) > 200 {
290 inputStr = inputStr[:200] + "...[truncated]"
291 }
292 sb.WriteString(fmt.Sprintf("[%s used tool %s]: %s\n\n", roleStr, content.ToolName, inputStr))
293 case llm.ContentTypeToolResult:
294 // Summarize tool results
295 resultText := ""
296 for _, r := range content.ToolResult {
297 if r.Type == llm.ContentTypeText && r.Text != "" {
298 resultText = r.Text
299 break
300 }
301 }
302 if len(resultText) > 300 {
303 resultText = resultText[:300] + "...[truncated]"
304 }
305 errorStr := ""
306 if content.ToolError {
307 errorStr = " (error)"
308 }
309 sb.WriteString(fmt.Sprintf("[Tool result%s]: %s\n\n", errorStr, resultText))
310 }
311 }
312 }
313
314 // Limit total size
315 result := sb.String()
316 if len(result) > 8000 {
317 // Keep the last 8000 chars (most recent activity)
318 result = "...[earlier messages truncated]...\n" + result[len(result)-8000:]
319 }
320
321 return result
322}
323
324// notifySubagentConversation fetches the subagent conversation and publishes it
325// to all SSE streams so the UI can update the sidebar.
326func (r *SubagentRunner) notifySubagentConversation(ctx context.Context, conversationID string) {
327 s := r.server
328
329 // Fetch the conversation from the database
330 var conv generated.Conversation
331 err := s.db.Queries(ctx, func(q *generated.Queries) error {
332 var err error
333 conv, err = q.GetConversation(ctx, conversationID)
334 return err
335 })
336 if err != nil {
337 s.logger.Error("Failed to get subagent conversation for notification", "error", err, "conversationID", conversationID)
338 return
339 }
340
341 // Only notify if this is actually a subagent (has parent)
342 if conv.ParentConversationID == nil {
343 return
344 }
345
346 // Publish the subagent conversation to all active streams
347 s.publishConversationListUpdate(ConversationListUpdate{
348 Type: "update",
349 Conversation: &conv,
350 })
351
352 s.logger.Debug("Notified UI about subagent conversation",
353 "conversationID", conversationID,
354 "parentID", *conv.ParentConversationID,
355 "slug", conv.Slug)
356}
357
358// createSubagentToolSetConfig creates a ToolSetConfig for subagent conversations.
359// Subagent conversations don't have nested subagents to avoid complexity.
360func (s *Server) createSubagentToolSetConfig(conversationID string) claudetool.ToolSetConfig {
361 return claudetool.ToolSetConfig{
362 LLMProvider: s.llmManager,
363 EnableJITInstall: true,
364 EnableBrowser: true, // Subagents can use browser tools
365 // No SubagentRunner/DB - subagents can't spawn nested subagents
366 }
367}
368
369// Ensure SubagentRunner implements claudetool.SubagentRunner.
370var _ claudetool.SubagentRunner = (*SubagentRunner)(nil)
371
372// handleGetSubagents returns the list of subagents for a conversation.
373func (s *Server) handleGetSubagents(w http.ResponseWriter, r *http.Request, conversationID string) {
374 if r.Method != "GET" {
375 http.Error(w, "Method not allowed", 405)
376 return
377 }
378
379 subagents, err := s.db.GetSubagents(r.Context(), conversationID)
380 if err != nil {
381 s.logger.Error("Failed to get subagents", "conversationID", conversationID, "error", err)
382 http.Error(w, "Failed to get subagents", 500)
383 return
384 }
385
386 w.Header().Set("Content-Type", "application/json")
387 json.NewEncoder(w).Encode(subagents)
388}