1use crate::{
2 ContextServerRegistry, CopyPathTool, CreateDirectoryTool, DbLanguageModel, DbThread,
3 DeletePathTool, DiagnosticsTool, EditFileTool, FetchTool, FindPathTool, GrepTool,
4 ListDirectoryTool, MovePathTool, NowTool, OpenTool, ProjectSnapshot, ReadFileTool,
5 RestoreFileFromDiskTool, SaveFileTool, StreamingEditFileTool, SubagentTool,
6 SystemPromptTemplate, Template, Templates, TerminalTool, ThinkingTool, ToolPermissionDecision,
7 WebSearchTool, decide_permission_from_settings,
8};
9use acp_thread::{MentionUri, UserMessageId};
10use action_log::ActionLog;
11use feature_flags::{FeatureFlagAppExt as _, SubagentsFeatureFlag};
12
13use agent_client_protocol as acp;
14use agent_settings::{
15 AgentProfileId, AgentProfileSettings, AgentSettings, SUMMARIZE_THREAD_DETAILED_PROMPT,
16 SUMMARIZE_THREAD_PROMPT,
17};
18use anyhow::{Context as _, Result, anyhow};
19use chrono::{DateTime, Utc};
20use client::UserStore;
21use cloud_api_types::Plan;
22use cloud_llm_client::CompletionIntent;
23use collections::{HashMap, HashSet, IndexMap};
24use fs::Fs;
25use futures::stream;
26use futures::{
27 FutureExt,
28 channel::{mpsc, oneshot},
29 future::Shared,
30 stream::FuturesUnordered,
31};
32use gpui::{
33 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Task, WeakEntity,
34};
35use language_model::{
36 LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelId,
37 LanguageModelImage, LanguageModelProviderId, LanguageModelRegistry, LanguageModelRequest,
38 LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult,
39 LanguageModelToolResultContent, LanguageModelToolSchemaFormat, LanguageModelToolUse,
40 LanguageModelToolUseId, Role, SelectedModel, StopReason, TokenUsage, ZED_CLOUD_PROVIDER_ID,
41};
42use project::Project;
43use prompt_store::ProjectContext;
44use schemars::{JsonSchema, Schema};
45use serde::{Deserialize, Serialize};
46use settings::{LanguageModelSelection, Settings, ToolPermissionMode, update_settings_file};
47use smol::stream::StreamExt;
48use std::{
49 collections::BTreeMap,
50 ops::RangeInclusive,
51 path::Path,
52 rc::Rc,
53 sync::Arc,
54 time::{Duration, Instant},
55};
56use std::{fmt::Write, path::PathBuf};
57use util::{ResultExt, debug_panic, markdown::MarkdownCodeBlock, paths::PathStyle};
58use uuid::Uuid;
59
60const TOOL_CANCELED_MESSAGE: &str = "Tool canceled by user";
61pub const MAX_TOOL_NAME_LENGTH: usize = 64;
62pub const MAX_SUBAGENT_DEPTH: u8 = 4;
63pub const MAX_PARALLEL_SUBAGENTS: usize = 8;
64
65/// Context passed to a subagent thread for lifecycle management
66#[derive(Clone)]
67pub struct SubagentContext {
68 /// ID of the parent thread
69 pub parent_thread_id: acp::SessionId,
70
71 /// ID of the tool call that spawned this subagent
72 pub tool_use_id: LanguageModelToolUseId,
73
74 /// Current depth level (0 = root agent, 1 = first-level subagent, etc.)
75 pub depth: u8,
76
77 /// Prompt to send when subagent completes successfully
78 pub summary_prompt: String,
79
80 /// Prompt to send when context is running low (≤25% remaining)
81 pub context_low_prompt: String,
82}
83
84/// The ID of the user prompt that initiated a request.
85///
86/// This equates to the user physically submitting a message to the model (e.g., by pressing the Enter key).
87#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Serialize, Deserialize)]
88pub struct PromptId(Arc<str>);
89
90impl PromptId {
91 pub fn new() -> Self {
92 Self(Uuid::new_v4().to_string().into())
93 }
94}
95
96impl std::fmt::Display for PromptId {
97 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
98 write!(f, "{}", self.0)
99 }
100}
101
102pub(crate) const MAX_RETRY_ATTEMPTS: u8 = 4;
103pub(crate) const BASE_RETRY_DELAY: Duration = Duration::from_secs(5);
104
105#[derive(Debug, Clone)]
106enum RetryStrategy {
107 ExponentialBackoff {
108 initial_delay: Duration,
109 max_attempts: u8,
110 },
111 Fixed {
112 delay: Duration,
113 max_attempts: u8,
114 },
115}
116
117#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
118pub enum Message {
119 User(UserMessage),
120 Agent(AgentMessage),
121 Resume,
122}
123
124impl Message {
125 pub fn as_agent_message(&self) -> Option<&AgentMessage> {
126 match self {
127 Message::Agent(agent_message) => Some(agent_message),
128 _ => None,
129 }
130 }
131
132 pub fn to_request(&self) -> Vec<LanguageModelRequestMessage> {
133 match self {
134 Message::User(message) => {
135 if message.content.is_empty() {
136 vec![]
137 } else {
138 vec![message.to_request()]
139 }
140 }
141 Message::Agent(message) => message.to_request(),
142 Message::Resume => vec![LanguageModelRequestMessage {
143 role: Role::User,
144 content: vec!["Continue where you left off".into()],
145 cache: false,
146 reasoning_details: None,
147 }],
148 }
149 }
150
151 pub fn to_markdown(&self) -> String {
152 match self {
153 Message::User(message) => message.to_markdown(),
154 Message::Agent(message) => message.to_markdown(),
155 Message::Resume => "[resume]\n".into(),
156 }
157 }
158
159 pub fn role(&self) -> Role {
160 match self {
161 Message::User(_) | Message::Resume => Role::User,
162 Message::Agent(_) => Role::Assistant,
163 }
164 }
165}
166
167#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
168pub struct UserMessage {
169 pub id: UserMessageId,
170 pub content: Vec<UserMessageContent>,
171}
172
173#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
174pub enum UserMessageContent {
175 Text(String),
176 Mention { uri: MentionUri, content: String },
177 Image(LanguageModelImage),
178}
179
180impl UserMessage {
181 pub fn to_markdown(&self) -> String {
182 let mut markdown = String::from("## User\n\n");
183
184 for content in &self.content {
185 match content {
186 UserMessageContent::Text(text) => {
187 markdown.push_str(text);
188 markdown.push('\n');
189 }
190 UserMessageContent::Image(_) => {
191 markdown.push_str("<image />\n");
192 }
193 UserMessageContent::Mention { uri, content } => {
194 if !content.is_empty() {
195 let _ = writeln!(&mut markdown, "{}\n\n{}", uri.as_link(), content);
196 } else {
197 let _ = writeln!(&mut markdown, "{}", uri.as_link());
198 }
199 }
200 }
201 }
202
203 markdown
204 }
205
206 fn to_request(&self) -> LanguageModelRequestMessage {
207 let mut message = LanguageModelRequestMessage {
208 role: Role::User,
209 content: Vec::with_capacity(self.content.len()),
210 cache: false,
211 reasoning_details: None,
212 };
213
214 const OPEN_CONTEXT: &str = "<context>\n\
215 The following items were attached by the user. \
216 They are up-to-date and don't need to be re-read.\n\n";
217
218 const OPEN_FILES_TAG: &str = "<files>";
219 const OPEN_DIRECTORIES_TAG: &str = "<directories>";
220 const OPEN_SYMBOLS_TAG: &str = "<symbols>";
221 const OPEN_SELECTIONS_TAG: &str = "<selections>";
222 const OPEN_THREADS_TAG: &str = "<threads>";
223 const OPEN_FETCH_TAG: &str = "<fetched_urls>";
224 const OPEN_RULES_TAG: &str =
225 "<rules>\nThe user has specified the following rules that should be applied:\n";
226 const OPEN_DIAGNOSTICS_TAG: &str = "<diagnostics>";
227
228 let mut file_context = OPEN_FILES_TAG.to_string();
229 let mut directory_context = OPEN_DIRECTORIES_TAG.to_string();
230 let mut symbol_context = OPEN_SYMBOLS_TAG.to_string();
231 let mut selection_context = OPEN_SELECTIONS_TAG.to_string();
232 let mut thread_context = OPEN_THREADS_TAG.to_string();
233 let mut fetch_context = OPEN_FETCH_TAG.to_string();
234 let mut rules_context = OPEN_RULES_TAG.to_string();
235 let mut diagnostics_context = OPEN_DIAGNOSTICS_TAG.to_string();
236
237 for chunk in &self.content {
238 let chunk = match chunk {
239 UserMessageContent::Text(text) => {
240 language_model::MessageContent::Text(text.clone())
241 }
242 UserMessageContent::Image(value) => {
243 language_model::MessageContent::Image(value.clone())
244 }
245 UserMessageContent::Mention { uri, content } => {
246 match uri {
247 MentionUri::File { abs_path } => {
248 write!(
249 &mut file_context,
250 "\n{}",
251 MarkdownCodeBlock {
252 tag: &codeblock_tag(abs_path, None),
253 text: &content.to_string(),
254 }
255 )
256 .ok();
257 }
258 MentionUri::PastedImage => {
259 debug_panic!("pasted image URI should not be used in mention content")
260 }
261 MentionUri::Directory { .. } => {
262 write!(&mut directory_context, "\n{}\n", content).ok();
263 }
264 MentionUri::Symbol {
265 abs_path: path,
266 line_range,
267 ..
268 } => {
269 write!(
270 &mut symbol_context,
271 "\n{}",
272 MarkdownCodeBlock {
273 tag: &codeblock_tag(path, Some(line_range)),
274 text: content
275 }
276 )
277 .ok();
278 }
279 MentionUri::Selection {
280 abs_path: path,
281 line_range,
282 ..
283 } => {
284 write!(
285 &mut selection_context,
286 "\n{}",
287 MarkdownCodeBlock {
288 tag: &codeblock_tag(
289 path.as_deref().unwrap_or("Untitled".as_ref()),
290 Some(line_range)
291 ),
292 text: content
293 }
294 )
295 .ok();
296 }
297 MentionUri::Thread { .. } => {
298 write!(&mut thread_context, "\n{}\n", content).ok();
299 }
300 MentionUri::TextThread { .. } => {
301 write!(&mut thread_context, "\n{}\n", content).ok();
302 }
303 MentionUri::Rule { .. } => {
304 write!(
305 &mut rules_context,
306 "\n{}",
307 MarkdownCodeBlock {
308 tag: "",
309 text: content
310 }
311 )
312 .ok();
313 }
314 MentionUri::Fetch { url } => {
315 write!(&mut fetch_context, "\nFetch: {}\n\n{}", url, content).ok();
316 }
317 MentionUri::Diagnostics { .. } => {
318 write!(&mut diagnostics_context, "\n{}\n", content).ok();
319 }
320 }
321
322 language_model::MessageContent::Text(uri.as_link().to_string())
323 }
324 };
325
326 message.content.push(chunk);
327 }
328
329 let len_before_context = message.content.len();
330
331 if file_context.len() > OPEN_FILES_TAG.len() {
332 file_context.push_str("</files>\n");
333 message
334 .content
335 .push(language_model::MessageContent::Text(file_context));
336 }
337
338 if directory_context.len() > OPEN_DIRECTORIES_TAG.len() {
339 directory_context.push_str("</directories>\n");
340 message
341 .content
342 .push(language_model::MessageContent::Text(directory_context));
343 }
344
345 if symbol_context.len() > OPEN_SYMBOLS_TAG.len() {
346 symbol_context.push_str("</symbols>\n");
347 message
348 .content
349 .push(language_model::MessageContent::Text(symbol_context));
350 }
351
352 if selection_context.len() > OPEN_SELECTIONS_TAG.len() {
353 selection_context.push_str("</selections>\n");
354 message
355 .content
356 .push(language_model::MessageContent::Text(selection_context));
357 }
358
359 if thread_context.len() > OPEN_THREADS_TAG.len() {
360 thread_context.push_str("</threads>\n");
361 message
362 .content
363 .push(language_model::MessageContent::Text(thread_context));
364 }
365
366 if fetch_context.len() > OPEN_FETCH_TAG.len() {
367 fetch_context.push_str("</fetched_urls>\n");
368 message
369 .content
370 .push(language_model::MessageContent::Text(fetch_context));
371 }
372
373 if rules_context.len() > OPEN_RULES_TAG.len() {
374 rules_context.push_str("</user_rules>\n");
375 message
376 .content
377 .push(language_model::MessageContent::Text(rules_context));
378 }
379
380 if diagnostics_context.len() > OPEN_DIAGNOSTICS_TAG.len() {
381 diagnostics_context.push_str("</diagnostics>\n");
382 message
383 .content
384 .push(language_model::MessageContent::Text(diagnostics_context));
385 }
386
387 if message.content.len() > len_before_context {
388 message.content.insert(
389 len_before_context,
390 language_model::MessageContent::Text(OPEN_CONTEXT.into()),
391 );
392 message
393 .content
394 .push(language_model::MessageContent::Text("</context>".into()));
395 }
396
397 message
398 }
399}
400
401fn codeblock_tag(full_path: &Path, line_range: Option<&RangeInclusive<u32>>) -> String {
402 let mut result = String::new();
403
404 if let Some(extension) = full_path.extension().and_then(|ext| ext.to_str()) {
405 let _ = write!(result, "{} ", extension);
406 }
407
408 let _ = write!(result, "{}", full_path.display());
409
410 if let Some(range) = line_range {
411 if range.start() == range.end() {
412 let _ = write!(result, ":{}", range.start() + 1);
413 } else {
414 let _ = write!(result, ":{}-{}", range.start() + 1, range.end() + 1);
415 }
416 }
417
418 result
419}
420
421impl AgentMessage {
422 pub fn to_markdown(&self) -> String {
423 let mut markdown = String::from("## Assistant\n\n");
424
425 for content in &self.content {
426 match content {
427 AgentMessageContent::Text(text) => {
428 markdown.push_str(text);
429 markdown.push('\n');
430 }
431 AgentMessageContent::Thinking { text, .. } => {
432 markdown.push_str("<think>");
433 markdown.push_str(text);
434 markdown.push_str("</think>\n");
435 }
436 AgentMessageContent::RedactedThinking(_) => {
437 markdown.push_str("<redacted_thinking />\n")
438 }
439 AgentMessageContent::ToolUse(tool_use) => {
440 markdown.push_str(&format!(
441 "**Tool Use**: {} (ID: {})\n",
442 tool_use.name, tool_use.id
443 ));
444 markdown.push_str(&format!(
445 "{}\n",
446 MarkdownCodeBlock {
447 tag: "json",
448 text: &format!("{:#}", tool_use.input)
449 }
450 ));
451 }
452 }
453 }
454
455 for tool_result in self.tool_results.values() {
456 markdown.push_str(&format!(
457 "**Tool Result**: {} (ID: {})\n\n",
458 tool_result.tool_name, tool_result.tool_use_id
459 ));
460 if tool_result.is_error {
461 markdown.push_str("**ERROR:**\n");
462 }
463
464 match &tool_result.content {
465 LanguageModelToolResultContent::Text(text) => {
466 writeln!(markdown, "{text}\n").ok();
467 }
468 LanguageModelToolResultContent::Image(_) => {
469 writeln!(markdown, "<image />\n").ok();
470 }
471 }
472
473 if let Some(output) = tool_result.output.as_ref() {
474 writeln!(
475 markdown,
476 "**Debug Output**:\n\n```json\n{}\n```\n",
477 serde_json::to_string_pretty(output).unwrap()
478 )
479 .unwrap();
480 }
481 }
482
483 markdown
484 }
485
486 pub fn to_request(&self) -> Vec<LanguageModelRequestMessage> {
487 let mut assistant_message = LanguageModelRequestMessage {
488 role: Role::Assistant,
489 content: Vec::with_capacity(self.content.len()),
490 cache: false,
491 reasoning_details: self.reasoning_details.clone(),
492 };
493 for chunk in &self.content {
494 match chunk {
495 AgentMessageContent::Text(text) => {
496 assistant_message
497 .content
498 .push(language_model::MessageContent::Text(text.clone()));
499 }
500 AgentMessageContent::Thinking { text, signature } => {
501 assistant_message
502 .content
503 .push(language_model::MessageContent::Thinking {
504 text: text.clone(),
505 signature: signature.clone(),
506 });
507 }
508 AgentMessageContent::RedactedThinking(value) => {
509 assistant_message.content.push(
510 language_model::MessageContent::RedactedThinking(value.clone()),
511 );
512 }
513 AgentMessageContent::ToolUse(tool_use) => {
514 if self.tool_results.contains_key(&tool_use.id) {
515 assistant_message
516 .content
517 .push(language_model::MessageContent::ToolUse(tool_use.clone()));
518 }
519 }
520 };
521 }
522
523 let mut user_message = LanguageModelRequestMessage {
524 role: Role::User,
525 content: Vec::new(),
526 cache: false,
527 reasoning_details: None,
528 };
529
530 for tool_result in self.tool_results.values() {
531 let mut tool_result = tool_result.clone();
532 // Surprisingly, the API fails if we return an empty string here.
533 // It thinks we are sending a tool use without a tool result.
534 if tool_result.content.is_empty() {
535 tool_result.content = "<Tool returned an empty string>".into();
536 }
537 user_message
538 .content
539 .push(language_model::MessageContent::ToolResult(tool_result));
540 }
541
542 let mut messages = Vec::new();
543 if !assistant_message.content.is_empty() {
544 messages.push(assistant_message);
545 }
546 if !user_message.content.is_empty() {
547 messages.push(user_message);
548 }
549 messages
550 }
551}
552
553#[derive(Default, Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
554pub struct AgentMessage {
555 pub content: Vec<AgentMessageContent>,
556 pub tool_results: IndexMap<LanguageModelToolUseId, LanguageModelToolResult>,
557 pub reasoning_details: Option<serde_json::Value>,
558}
559
560#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
561pub enum AgentMessageContent {
562 Text(String),
563 Thinking {
564 text: String,
565 signature: Option<String>,
566 },
567 RedactedThinking(String),
568 ToolUse(LanguageModelToolUse),
569}
570
571pub trait TerminalHandle {
572 fn id(&self, cx: &AsyncApp) -> Result<acp::TerminalId>;
573 fn current_output(&self, cx: &AsyncApp) -> Result<acp::TerminalOutputResponse>;
574 fn wait_for_exit(&self, cx: &AsyncApp) -> Result<Shared<Task<acp::TerminalExitStatus>>>;
575 fn kill(&self, cx: &AsyncApp) -> Result<()>;
576 fn was_stopped_by_user(&self, cx: &AsyncApp) -> Result<bool>;
577}
578
579pub trait ThreadEnvironment {
580 fn create_terminal(
581 &self,
582 command: String,
583 cwd: Option<PathBuf>,
584 output_byte_limit: Option<u64>,
585 cx: &mut AsyncApp,
586 ) -> Task<Result<Rc<dyn TerminalHandle>>>;
587}
588
589#[derive(Debug)]
590pub enum ThreadEvent {
591 UserMessage(UserMessage),
592 AgentText(String),
593 AgentThinking(String),
594 ToolCall(acp::ToolCall),
595 ToolCallUpdate(acp_thread::ToolCallUpdate),
596 ToolCallAuthorization(ToolCallAuthorization),
597 Retry(acp_thread::RetryStatus),
598 Stop(acp::StopReason),
599}
600
601#[derive(Debug)]
602pub struct NewTerminal {
603 pub command: String,
604 pub output_byte_limit: Option<u64>,
605 pub cwd: Option<PathBuf>,
606 pub response: oneshot::Sender<Result<Entity<acp_thread::Terminal>>>,
607}
608
609#[derive(Debug, Clone)]
610pub struct ToolPermissionContext {
611 pub tool_name: String,
612 pub input_value: String,
613}
614
615impl ToolPermissionContext {
616 pub fn new(tool_name: impl Into<String>, input_value: impl Into<String>) -> Self {
617 Self {
618 tool_name: tool_name.into(),
619 input_value: input_value.into(),
620 }
621 }
622
623 /// Builds the permission options for this tool context.
624 ///
625 /// This is the canonical source for permission option generation.
626 /// Tests should use this function rather than manually constructing options.
627 pub fn build_permission_options(&self) -> acp_thread::PermissionOptions {
628 use crate::pattern_extraction::*;
629
630 let tool_name = &self.tool_name;
631 let input_value = &self.input_value;
632
633 let (pattern, pattern_display) = match tool_name.as_str() {
634 "terminal" => (
635 extract_terminal_pattern(input_value),
636 extract_terminal_pattern_display(input_value),
637 ),
638 "edit_file" | "delete_path" | "move_path" | "create_directory" | "save_file" => (
639 extract_path_pattern(input_value),
640 extract_path_pattern_display(input_value),
641 ),
642 "fetch" => (
643 extract_url_pattern(input_value),
644 extract_url_pattern_display(input_value),
645 ),
646 _ => (None, None),
647 };
648
649 let mut choices = Vec::new();
650
651 let mut push_choice = |label: String, allow_id, deny_id, allow_kind, deny_kind| {
652 choices.push(acp_thread::PermissionOptionChoice {
653 allow: acp::PermissionOption::new(
654 acp::PermissionOptionId::new(allow_id),
655 label.clone(),
656 allow_kind,
657 ),
658 deny: acp::PermissionOption::new(
659 acp::PermissionOptionId::new(deny_id),
660 label,
661 deny_kind,
662 ),
663 });
664 };
665
666 push_choice(
667 format!("Always for {}", tool_name.replace('_', " ")),
668 format!("always_allow:{}", tool_name),
669 format!("always_deny:{}", tool_name),
670 acp::PermissionOptionKind::AllowAlways,
671 acp::PermissionOptionKind::RejectAlways,
672 );
673
674 if let (Some(pattern), Some(display)) = (pattern, pattern_display) {
675 let button_text = match tool_name.as_str() {
676 "terminal" => format!("Always for `{}` commands", display),
677 "fetch" => format!("Always for `{}`", display),
678 _ => format!("Always for `{}`", display),
679 };
680 push_choice(
681 button_text,
682 format!("always_allow_pattern:{}:{}", tool_name, pattern),
683 format!("always_deny_pattern:{}:{}", tool_name, pattern),
684 acp::PermissionOptionKind::AllowAlways,
685 acp::PermissionOptionKind::RejectAlways,
686 );
687 }
688
689 push_choice(
690 "Only this time".to_string(),
691 "allow".to_string(),
692 "deny".to_string(),
693 acp::PermissionOptionKind::AllowOnce,
694 acp::PermissionOptionKind::RejectOnce,
695 );
696
697 acp_thread::PermissionOptions::Dropdown(choices)
698 }
699}
700
701#[derive(Debug)]
702pub struct ToolCallAuthorization {
703 pub tool_call: acp::ToolCallUpdate,
704 pub options: acp_thread::PermissionOptions,
705 pub response: oneshot::Sender<acp::PermissionOptionId>,
706 pub context: Option<ToolPermissionContext>,
707}
708
709#[derive(Debug, thiserror::Error)]
710enum CompletionError {
711 #[error("max tokens")]
712 MaxTokens,
713 #[error("refusal")]
714 Refusal,
715 #[error(transparent)]
716 Other(#[from] anyhow::Error),
717}
718
719pub struct Thread {
720 id: acp::SessionId,
721 prompt_id: PromptId,
722 updated_at: DateTime<Utc>,
723 title: Option<SharedString>,
724 pending_title_generation: Option<Task<()>>,
725 pending_summary_generation: Option<Shared<Task<Option<SharedString>>>>,
726 summary: Option<SharedString>,
727 messages: Vec<Message>,
728 user_store: Entity<UserStore>,
729 /// Holds the task that handles agent interaction until the end of the turn.
730 /// Survives across multiple requests as the model performs tool calls and
731 /// we run tools, report their results.
732 running_turn: Option<RunningTurn>,
733 /// Flag indicating the UI has a queued message waiting to be sent.
734 /// Used to signal that the turn should end at the next message boundary.
735 has_queued_message: bool,
736 pending_message: Option<AgentMessage>,
737 tools: BTreeMap<SharedString, Arc<dyn AnyAgentTool>>,
738 request_token_usage: HashMap<UserMessageId, language_model::TokenUsage>,
739 #[allow(unused)]
740 cumulative_token_usage: TokenUsage,
741 #[allow(unused)]
742 initial_project_snapshot: Shared<Task<Option<Arc<ProjectSnapshot>>>>,
743 context_server_registry: Entity<ContextServerRegistry>,
744 profile_id: AgentProfileId,
745 project_context: Entity<ProjectContext>,
746 templates: Arc<Templates>,
747 model: Option<Arc<dyn LanguageModel>>,
748 summarization_model: Option<Arc<dyn LanguageModel>>,
749 thinking_enabled: bool,
750 prompt_capabilities_tx: watch::Sender<acp::PromptCapabilities>,
751 pub(crate) prompt_capabilities_rx: watch::Receiver<acp::PromptCapabilities>,
752 pub(crate) project: Entity<Project>,
753 pub(crate) action_log: Entity<ActionLog>,
754 /// Tracks the last time files were read by the agent, to detect external modifications
755 pub(crate) file_read_times: HashMap<PathBuf, fs::MTime>,
756 /// True if this thread was imported from a shared thread and can be synced.
757 imported: bool,
758 /// If this is a subagent thread, contains context about the parent
759 subagent_context: Option<SubagentContext>,
760 /// Weak references to running subagent threads for cancellation propagation
761 running_subagents: Vec<WeakEntity<Thread>>,
762}
763
764impl Thread {
765 fn prompt_capabilities(model: Option<&dyn LanguageModel>) -> acp::PromptCapabilities {
766 let image = model.map_or(true, |model| model.supports_images());
767 acp::PromptCapabilities::new()
768 .image(image)
769 .embedded_context(true)
770 }
771
772 pub fn new(
773 project: Entity<Project>,
774 project_context: Entity<ProjectContext>,
775 context_server_registry: Entity<ContextServerRegistry>,
776 templates: Arc<Templates>,
777 model: Option<Arc<dyn LanguageModel>>,
778 cx: &mut Context<Self>,
779 ) -> Self {
780 let profile_id = AgentSettings::get_global(cx).default_profile.clone();
781 let action_log = cx.new(|_cx| ActionLog::new(project.clone()));
782 let (prompt_capabilities_tx, prompt_capabilities_rx) =
783 watch::channel(Self::prompt_capabilities(model.as_deref()));
784 Self {
785 id: acp::SessionId::new(uuid::Uuid::new_v4().to_string()),
786 prompt_id: PromptId::new(),
787 updated_at: Utc::now(),
788 title: None,
789 pending_title_generation: None,
790 pending_summary_generation: None,
791 summary: None,
792 messages: Vec::new(),
793 user_store: project.read(cx).user_store(),
794 running_turn: None,
795 has_queued_message: false,
796 pending_message: None,
797 tools: BTreeMap::default(),
798 request_token_usage: HashMap::default(),
799 cumulative_token_usage: TokenUsage::default(),
800 initial_project_snapshot: {
801 let project_snapshot = Self::project_snapshot(project.clone(), cx);
802 cx.foreground_executor()
803 .spawn(async move { Some(project_snapshot.await) })
804 .shared()
805 },
806 context_server_registry,
807 profile_id,
808 project_context,
809 templates,
810 model,
811 summarization_model: None,
812 thinking_enabled: true,
813 prompt_capabilities_tx,
814 prompt_capabilities_rx,
815 project,
816 action_log,
817 file_read_times: HashMap::default(),
818 imported: false,
819 subagent_context: None,
820 running_subagents: Vec::new(),
821 }
822 }
823
824 pub fn new_subagent(
825 project: Entity<Project>,
826 project_context: Entity<ProjectContext>,
827 context_server_registry: Entity<ContextServerRegistry>,
828 templates: Arc<Templates>,
829 model: Arc<dyn LanguageModel>,
830 subagent_context: SubagentContext,
831 parent_tools: BTreeMap<SharedString, Arc<dyn AnyAgentTool>>,
832 cx: &mut Context<Self>,
833 ) -> Self {
834 let profile_id = AgentSettings::get_global(cx).default_profile.clone();
835 let action_log = cx.new(|_cx| ActionLog::new(project.clone()));
836 let (prompt_capabilities_tx, prompt_capabilities_rx) =
837 watch::channel(Self::prompt_capabilities(Some(model.as_ref())));
838
839 // Rebind tools that hold thread references to use this subagent's thread
840 // instead of the parent's thread. This is critical for tools like EditFileTool
841 // that make model requests using the thread's ID.
842 let weak_self = cx.weak_entity();
843 let tools: BTreeMap<SharedString, Arc<dyn AnyAgentTool>> = parent_tools
844 .into_iter()
845 .map(|(name, tool)| {
846 let rebound = tool.rebind_thread(weak_self.clone()).unwrap_or(tool);
847 (name, rebound)
848 })
849 .collect();
850
851 Self {
852 id: acp::SessionId::new(uuid::Uuid::new_v4().to_string()),
853 prompt_id: PromptId::new(),
854 updated_at: Utc::now(),
855 title: None,
856 pending_title_generation: None,
857 pending_summary_generation: None,
858 summary: None,
859 messages: Vec::new(),
860 user_store: project.read(cx).user_store(),
861 running_turn: None,
862 has_queued_message: false,
863 pending_message: None,
864 tools,
865 request_token_usage: HashMap::default(),
866 cumulative_token_usage: TokenUsage::default(),
867 initial_project_snapshot: Task::ready(None).shared(),
868 context_server_registry,
869 profile_id,
870 project_context,
871 templates,
872 model: Some(model),
873 summarization_model: None,
874 thinking_enabled: true,
875 prompt_capabilities_tx,
876 prompt_capabilities_rx,
877 project,
878 action_log,
879 file_read_times: HashMap::default(),
880 imported: false,
881 subagent_context: Some(subagent_context),
882 running_subagents: Vec::new(),
883 }
884 }
885
886 pub fn id(&self) -> &acp::SessionId {
887 &self.id
888 }
889
890 /// Returns true if this thread was imported from a shared thread.
891 pub fn is_imported(&self) -> bool {
892 self.imported
893 }
894
895 pub fn replay(
896 &mut self,
897 cx: &mut Context<Self>,
898 ) -> mpsc::UnboundedReceiver<Result<ThreadEvent>> {
899 let (tx, rx) = mpsc::unbounded();
900 let stream = ThreadEventStream(tx);
901 for message in &self.messages {
902 match message {
903 Message::User(user_message) => stream.send_user_message(user_message),
904 Message::Agent(assistant_message) => {
905 for content in &assistant_message.content {
906 match content {
907 AgentMessageContent::Text(text) => stream.send_text(text),
908 AgentMessageContent::Thinking { text, .. } => {
909 stream.send_thinking(text)
910 }
911 AgentMessageContent::RedactedThinking(_) => {}
912 AgentMessageContent::ToolUse(tool_use) => {
913 self.replay_tool_call(
914 tool_use,
915 assistant_message.tool_results.get(&tool_use.id),
916 &stream,
917 cx,
918 );
919 }
920 }
921 }
922 }
923 Message::Resume => {}
924 }
925 }
926 rx
927 }
928
929 fn replay_tool_call(
930 &self,
931 tool_use: &LanguageModelToolUse,
932 tool_result: Option<&LanguageModelToolResult>,
933 stream: &ThreadEventStream,
934 cx: &mut Context<Self>,
935 ) {
936 let tool = self.tools.get(tool_use.name.as_ref()).cloned().or_else(|| {
937 self.context_server_registry
938 .read(cx)
939 .servers()
940 .find_map(|(_, tools)| {
941 if let Some(tool) = tools.get(tool_use.name.as_ref()) {
942 Some(tool.clone())
943 } else {
944 None
945 }
946 })
947 });
948
949 let Some(tool) = tool else {
950 stream
951 .0
952 .unbounded_send(Ok(ThreadEvent::ToolCall(
953 acp::ToolCall::new(tool_use.id.to_string(), tool_use.name.to_string())
954 .status(acp::ToolCallStatus::Failed)
955 .raw_input(tool_use.input.clone()),
956 )))
957 .ok();
958 return;
959 };
960
961 let title = tool.initial_title(tool_use.input.clone(), cx);
962 let kind = tool.kind();
963 stream.send_tool_call(
964 &tool_use.id,
965 &tool_use.name,
966 title,
967 kind,
968 tool_use.input.clone(),
969 );
970
971 let output = tool_result
972 .as_ref()
973 .and_then(|result| result.output.clone());
974 if let Some(output) = output.clone() {
975 // For replay, we use a dummy cancellation receiver since the tool already completed
976 let (_cancellation_tx, cancellation_rx) = watch::channel(false);
977 let tool_event_stream = ToolCallEventStream::new(
978 tool_use.id.clone(),
979 stream.clone(),
980 Some(self.project.read(cx).fs().clone()),
981 cancellation_rx,
982 );
983 tool.replay(tool_use.input.clone(), output, tool_event_stream, cx)
984 .log_err();
985 }
986
987 stream.update_tool_call_fields(
988 &tool_use.id,
989 acp::ToolCallUpdateFields::new()
990 .status(
991 tool_result
992 .as_ref()
993 .map_or(acp::ToolCallStatus::Failed, |result| {
994 if result.is_error {
995 acp::ToolCallStatus::Failed
996 } else {
997 acp::ToolCallStatus::Completed
998 }
999 }),
1000 )
1001 .raw_output(output),
1002 );
1003 }
1004
1005 pub fn from_db(
1006 id: acp::SessionId,
1007 db_thread: DbThread,
1008 project: Entity<Project>,
1009 project_context: Entity<ProjectContext>,
1010 context_server_registry: Entity<ContextServerRegistry>,
1011 templates: Arc<Templates>,
1012 cx: &mut Context<Self>,
1013 ) -> Self {
1014 let profile_id = db_thread
1015 .profile
1016 .unwrap_or_else(|| AgentSettings::get_global(cx).default_profile.clone());
1017
1018 let mut model = LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
1019 db_thread
1020 .model
1021 .and_then(|model| {
1022 let model = SelectedModel {
1023 provider: model.provider.clone().into(),
1024 model: model.model.into(),
1025 };
1026 registry.select_model(&model, cx)
1027 })
1028 .or_else(|| registry.default_model())
1029 .map(|model| model.model)
1030 });
1031
1032 if model.is_none() {
1033 model = Self::resolve_profile_model(&profile_id, cx);
1034 }
1035 if model.is_none() {
1036 model = LanguageModelRegistry::global(cx).update(cx, |registry, _cx| {
1037 registry.default_model().map(|model| model.model)
1038 });
1039 }
1040
1041 let (prompt_capabilities_tx, prompt_capabilities_rx) =
1042 watch::channel(Self::prompt_capabilities(model.as_deref()));
1043
1044 let action_log = cx.new(|_| ActionLog::new(project.clone()));
1045
1046 Self {
1047 id,
1048 prompt_id: PromptId::new(),
1049 title: if db_thread.title.is_empty() {
1050 None
1051 } else {
1052 Some(db_thread.title.clone())
1053 },
1054 pending_title_generation: None,
1055 pending_summary_generation: None,
1056 summary: db_thread.detailed_summary,
1057 messages: db_thread.messages,
1058 user_store: project.read(cx).user_store(),
1059 running_turn: None,
1060 has_queued_message: false,
1061 pending_message: None,
1062 tools: BTreeMap::default(),
1063 request_token_usage: db_thread.request_token_usage.clone(),
1064 cumulative_token_usage: db_thread.cumulative_token_usage,
1065 initial_project_snapshot: Task::ready(db_thread.initial_project_snapshot).shared(),
1066 context_server_registry,
1067 profile_id,
1068 project_context,
1069 templates,
1070 model,
1071 summarization_model: None,
1072 // TODO: Persist this on the `DbThread`.
1073 thinking_enabled: true,
1074 project,
1075 action_log,
1076 updated_at: db_thread.updated_at,
1077 prompt_capabilities_tx,
1078 prompt_capabilities_rx,
1079 file_read_times: HashMap::default(),
1080 imported: db_thread.imported,
1081 subagent_context: None,
1082 running_subagents: Vec::new(),
1083 }
1084 }
1085
1086 pub fn to_db(&self, cx: &App) -> Task<DbThread> {
1087 let initial_project_snapshot = self.initial_project_snapshot.clone();
1088 let mut thread = DbThread {
1089 title: self.title(),
1090 messages: self.messages.clone(),
1091 updated_at: self.updated_at,
1092 detailed_summary: self.summary.clone(),
1093 initial_project_snapshot: None,
1094 cumulative_token_usage: self.cumulative_token_usage,
1095 request_token_usage: self.request_token_usage.clone(),
1096 model: self.model.as_ref().map(|model| DbLanguageModel {
1097 provider: model.provider_id().to_string(),
1098 model: model.name().0.to_string(),
1099 }),
1100 profile: Some(self.profile_id.clone()),
1101 imported: self.imported,
1102 };
1103
1104 cx.background_spawn(async move {
1105 let initial_project_snapshot = initial_project_snapshot.await;
1106 thread.initial_project_snapshot = initial_project_snapshot;
1107 thread
1108 })
1109 }
1110
1111 /// Create a snapshot of the current project state including git information and unsaved buffers.
1112 fn project_snapshot(
1113 project: Entity<Project>,
1114 cx: &mut Context<Self>,
1115 ) -> Task<Arc<ProjectSnapshot>> {
1116 let task = project::telemetry_snapshot::TelemetrySnapshot::new(&project, cx);
1117 cx.spawn(async move |_, _| {
1118 let snapshot = task.await;
1119
1120 Arc::new(ProjectSnapshot {
1121 worktree_snapshots: snapshot.worktree_snapshots,
1122 timestamp: Utc::now(),
1123 })
1124 })
1125 }
1126
1127 pub fn project_context(&self) -> &Entity<ProjectContext> {
1128 &self.project_context
1129 }
1130
1131 pub fn project(&self) -> &Entity<Project> {
1132 &self.project
1133 }
1134
1135 pub fn action_log(&self) -> &Entity<ActionLog> {
1136 &self.action_log
1137 }
1138
1139 pub fn is_empty(&self) -> bool {
1140 self.messages.is_empty() && self.title.is_none()
1141 }
1142
1143 pub fn model(&self) -> Option<&Arc<dyn LanguageModel>> {
1144 self.model.as_ref()
1145 }
1146
1147 pub fn set_model(&mut self, model: Arc<dyn LanguageModel>, cx: &mut Context<Self>) {
1148 let old_usage = self.latest_token_usage();
1149 self.model = Some(model);
1150 let new_caps = Self::prompt_capabilities(self.model.as_deref());
1151 let new_usage = self.latest_token_usage();
1152 if old_usage != new_usage {
1153 cx.emit(TokenUsageUpdated(new_usage));
1154 }
1155 self.prompt_capabilities_tx.send(new_caps).log_err();
1156 cx.notify()
1157 }
1158
1159 pub fn summarization_model(&self) -> Option<&Arc<dyn LanguageModel>> {
1160 self.summarization_model.as_ref()
1161 }
1162
1163 pub fn set_summarization_model(
1164 &mut self,
1165 model: Option<Arc<dyn LanguageModel>>,
1166 cx: &mut Context<Self>,
1167 ) {
1168 self.summarization_model = model;
1169 cx.notify()
1170 }
1171
1172 pub fn thinking_enabled(&self) -> bool {
1173 self.thinking_enabled
1174 }
1175
1176 pub fn set_thinking_enabled(&mut self, enabled: bool, cx: &mut Context<Self>) {
1177 self.thinking_enabled = enabled;
1178 cx.notify();
1179 }
1180
1181 pub fn last_message(&self) -> Option<Message> {
1182 if let Some(message) = self.pending_message.clone() {
1183 Some(Message::Agent(message))
1184 } else {
1185 self.messages.last().cloned()
1186 }
1187 }
1188
1189 pub fn add_default_tools(
1190 &mut self,
1191 environment: Rc<dyn ThreadEnvironment>,
1192 cx: &mut Context<Self>,
1193 ) {
1194 let language_registry = self.project.read(cx).languages().clone();
1195 self.add_tool(CopyPathTool::new(self.project.clone()));
1196 self.add_tool(CreateDirectoryTool::new(self.project.clone()));
1197 self.add_tool(DeletePathTool::new(
1198 self.project.clone(),
1199 self.action_log.clone(),
1200 ));
1201 self.add_tool(DiagnosticsTool::new(self.project.clone()));
1202 self.add_tool(EditFileTool::new(
1203 self.project.clone(),
1204 cx.weak_entity(),
1205 language_registry.clone(),
1206 Templates::new(),
1207 ));
1208 self.add_tool(StreamingEditFileTool::new(
1209 self.project.clone(),
1210 cx.weak_entity(),
1211 language_registry,
1212 Templates::new(),
1213 ));
1214 self.add_tool(FetchTool::new(self.project.read(cx).client().http_client()));
1215 self.add_tool(FindPathTool::new(self.project.clone()));
1216 self.add_tool(GrepTool::new(self.project.clone()));
1217 self.add_tool(ListDirectoryTool::new(self.project.clone()));
1218 self.add_tool(MovePathTool::new(self.project.clone()));
1219 self.add_tool(NowTool);
1220 self.add_tool(OpenTool::new(self.project.clone()));
1221 self.add_tool(ReadFileTool::new(
1222 cx.weak_entity(),
1223 self.project.clone(),
1224 self.action_log.clone(),
1225 ));
1226 self.add_tool(SaveFileTool::new(self.project.clone()));
1227 self.add_tool(RestoreFileFromDiskTool::new(self.project.clone()));
1228 self.add_tool(TerminalTool::new(self.project.clone(), environment));
1229 self.add_tool(ThinkingTool);
1230 self.add_tool(WebSearchTool);
1231
1232 if cx.has_flag::<SubagentsFeatureFlag>() && self.depth() < MAX_SUBAGENT_DEPTH {
1233 let parent_tools = self.tools.clone();
1234 self.add_tool(SubagentTool::new(
1235 cx.weak_entity(),
1236 self.project.clone(),
1237 self.project_context.clone(),
1238 self.context_server_registry.clone(),
1239 self.templates.clone(),
1240 self.depth(),
1241 parent_tools,
1242 ));
1243 }
1244 }
1245
1246 pub fn add_tool<T: AgentTool>(&mut self, tool: T) {
1247 self.tools.insert(T::name().into(), tool.erase());
1248 }
1249
1250 pub fn remove_tool(&mut self, name: &str) -> bool {
1251 self.tools.remove(name).is_some()
1252 }
1253
1254 pub fn restrict_tools(&mut self, allowed: &collections::HashSet<SharedString>) {
1255 self.tools.retain(|name, _| allowed.contains(name));
1256 }
1257
1258 pub fn profile(&self) -> &AgentProfileId {
1259 &self.profile_id
1260 }
1261
1262 pub fn set_profile(&mut self, profile_id: AgentProfileId, cx: &mut Context<Self>) {
1263 if self.profile_id == profile_id {
1264 return;
1265 }
1266
1267 self.profile_id = profile_id;
1268
1269 // Swap to the profile's preferred model when available.
1270 if let Some(model) = Self::resolve_profile_model(&self.profile_id, cx) {
1271 self.set_model(model, cx);
1272 }
1273 }
1274
1275 pub fn cancel(&mut self, cx: &mut Context<Self>) -> Task<()> {
1276 for subagent in self.running_subagents.drain(..) {
1277 if let Some(subagent) = subagent.upgrade() {
1278 subagent.update(cx, |thread, cx| thread.cancel(cx)).detach();
1279 }
1280 }
1281
1282 let Some(running_turn) = self.running_turn.take() else {
1283 self.flush_pending_message(cx);
1284 return Task::ready(());
1285 };
1286
1287 let turn_task = running_turn.cancel();
1288
1289 cx.spawn(async move |this, cx| {
1290 turn_task.await;
1291 this.update(cx, |this, cx| {
1292 this.flush_pending_message(cx);
1293 })
1294 .ok();
1295 })
1296 }
1297
1298 pub fn set_has_queued_message(&mut self, has_queued: bool) {
1299 self.has_queued_message = has_queued;
1300 }
1301
1302 pub fn has_queued_message(&self) -> bool {
1303 self.has_queued_message
1304 }
1305
1306 fn update_token_usage(&mut self, update: language_model::TokenUsage, cx: &mut Context<Self>) {
1307 let Some(last_user_message) = self.last_user_message() else {
1308 return;
1309 };
1310
1311 self.request_token_usage
1312 .insert(last_user_message.id.clone(), update);
1313 cx.emit(TokenUsageUpdated(self.latest_token_usage()));
1314 cx.notify();
1315 }
1316
1317 pub fn truncate(&mut self, message_id: UserMessageId, cx: &mut Context<Self>) -> Result<()> {
1318 self.cancel(cx).detach();
1319 // Clear pending message since cancel will try to flush it asynchronously,
1320 // and we don't want that content to be added after we truncate
1321 self.pending_message.take();
1322 let Some(position) = self.messages.iter().position(
1323 |msg| matches!(msg, Message::User(UserMessage { id, .. }) if id == &message_id),
1324 ) else {
1325 return Err(anyhow!("Message not found"));
1326 };
1327
1328 for message in self.messages.drain(position..) {
1329 match message {
1330 Message::User(message) => {
1331 self.request_token_usage.remove(&message.id);
1332 }
1333 Message::Agent(_) | Message::Resume => {}
1334 }
1335 }
1336 self.clear_summary();
1337 cx.notify();
1338 Ok(())
1339 }
1340
1341 pub fn latest_request_token_usage(&self) -> Option<language_model::TokenUsage> {
1342 let last_user_message = self.last_user_message()?;
1343 let tokens = self.request_token_usage.get(&last_user_message.id)?;
1344 Some(*tokens)
1345 }
1346
1347 pub fn latest_token_usage(&self) -> Option<acp_thread::TokenUsage> {
1348 let usage = self.latest_request_token_usage()?;
1349 let model = self.model.clone()?;
1350 Some(acp_thread::TokenUsage {
1351 max_tokens: model.max_token_count(),
1352 used_tokens: usage.total_tokens(),
1353 input_tokens: usage.input_tokens,
1354 output_tokens: usage.output_tokens,
1355 })
1356 }
1357
1358 /// Get the total input token count as of the message before the given message.
1359 ///
1360 /// Returns `None` if:
1361 /// - `target_id` is the first message (no previous message)
1362 /// - The previous message hasn't received a response yet (no usage data)
1363 /// - `target_id` is not found in the messages
1364 pub fn tokens_before_message(&self, target_id: &UserMessageId) -> Option<u64> {
1365 let mut previous_user_message_id: Option<&UserMessageId> = None;
1366
1367 for message in &self.messages {
1368 if let Message::User(user_msg) = message {
1369 if &user_msg.id == target_id {
1370 let prev_id = previous_user_message_id?;
1371 let usage = self.request_token_usage.get(prev_id)?;
1372 return Some(usage.input_tokens);
1373 }
1374 previous_user_message_id = Some(&user_msg.id);
1375 }
1376 }
1377 None
1378 }
1379
1380 /// Look up the active profile and resolve its preferred model if one is configured.
1381 fn resolve_profile_model(
1382 profile_id: &AgentProfileId,
1383 cx: &mut Context<Self>,
1384 ) -> Option<Arc<dyn LanguageModel>> {
1385 let selection = AgentSettings::get_global(cx)
1386 .profiles
1387 .get(profile_id)?
1388 .default_model
1389 .clone()?;
1390 Self::resolve_model_from_selection(&selection, cx)
1391 }
1392
1393 /// Translate a stored model selection into the configured model from the registry.
1394 fn resolve_model_from_selection(
1395 selection: &LanguageModelSelection,
1396 cx: &mut Context<Self>,
1397 ) -> Option<Arc<dyn LanguageModel>> {
1398 let selected = SelectedModel {
1399 provider: LanguageModelProviderId::from(selection.provider.0.clone()),
1400 model: LanguageModelId::from(selection.model.clone()),
1401 };
1402 LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
1403 registry
1404 .select_model(&selected, cx)
1405 .map(|configured| configured.model)
1406 })
1407 }
1408
1409 pub fn resume(
1410 &mut self,
1411 cx: &mut Context<Self>,
1412 ) -> Result<mpsc::UnboundedReceiver<Result<ThreadEvent>>> {
1413 self.messages.push(Message::Resume);
1414 cx.notify();
1415
1416 log::debug!("Total messages in thread: {}", self.messages.len());
1417 self.run_turn(cx)
1418 }
1419
1420 /// Sending a message results in the model streaming a response, which could include tool calls.
1421 /// After calling tools, the model will stops and waits for any outstanding tool calls to be completed and their results sent.
1422 /// The returned channel will report all the occurrences in which the model stops before erroring or ending its turn.
1423 pub fn send<T>(
1424 &mut self,
1425 id: UserMessageId,
1426 content: impl IntoIterator<Item = T>,
1427 cx: &mut Context<Self>,
1428 ) -> Result<mpsc::UnboundedReceiver<Result<ThreadEvent>>>
1429 where
1430 T: Into<UserMessageContent>,
1431 {
1432 let content = content.into_iter().map(Into::into).collect::<Vec<_>>();
1433 log::debug!("Thread::send content: {:?}", content);
1434
1435 self.messages
1436 .push(Message::User(UserMessage { id, content }));
1437 cx.notify();
1438
1439 self.send_existing(cx)
1440 }
1441
1442 pub fn send_existing(
1443 &mut self,
1444 cx: &mut Context<Self>,
1445 ) -> Result<mpsc::UnboundedReceiver<Result<ThreadEvent>>> {
1446 let model = self.model().context("No language model configured")?;
1447
1448 log::info!("Thread::send called with model: {}", model.name().0);
1449 self.advance_prompt_id();
1450
1451 log::debug!("Total messages in thread: {}", self.messages.len());
1452 self.run_turn(cx)
1453 }
1454
1455 pub fn push_acp_user_block(
1456 &mut self,
1457 id: UserMessageId,
1458 blocks: impl IntoIterator<Item = acp::ContentBlock>,
1459 path_style: PathStyle,
1460 cx: &mut Context<Self>,
1461 ) {
1462 let content = blocks
1463 .into_iter()
1464 .map(|block| UserMessageContent::from_content_block(block, path_style))
1465 .collect::<Vec<_>>();
1466 self.messages
1467 .push(Message::User(UserMessage { id, content }));
1468 cx.notify();
1469 }
1470
1471 pub fn push_acp_agent_block(&mut self, block: acp::ContentBlock, cx: &mut Context<Self>) {
1472 let text = match block {
1473 acp::ContentBlock::Text(text_content) => text_content.text,
1474 acp::ContentBlock::Image(_) => "[image]".to_string(),
1475 acp::ContentBlock::Audio(_) => "[audio]".to_string(),
1476 acp::ContentBlock::ResourceLink(resource_link) => resource_link.uri,
1477 acp::ContentBlock::Resource(resource) => match resource.resource {
1478 acp::EmbeddedResourceResource::TextResourceContents(resource) => resource.uri,
1479 acp::EmbeddedResourceResource::BlobResourceContents(resource) => resource.uri,
1480 _ => "[resource]".to_string(),
1481 },
1482 _ => "[unknown]".to_string(),
1483 };
1484
1485 self.messages.push(Message::Agent(AgentMessage {
1486 content: vec![AgentMessageContent::Text(text)],
1487 ..Default::default()
1488 }));
1489 cx.notify();
1490 }
1491
1492 #[cfg(feature = "eval")]
1493 pub fn proceed(
1494 &mut self,
1495 cx: &mut Context<Self>,
1496 ) -> Result<mpsc::UnboundedReceiver<Result<ThreadEvent>>> {
1497 self.run_turn(cx)
1498 }
1499
1500 fn run_turn(
1501 &mut self,
1502 cx: &mut Context<Self>,
1503 ) -> Result<mpsc::UnboundedReceiver<Result<ThreadEvent>>> {
1504 // Flush the old pending message synchronously before cancelling,
1505 // to avoid a race where the detached cancel task might flush the NEW
1506 // turn's pending message instead of the old one.
1507 self.flush_pending_message(cx);
1508 self.cancel(cx).detach();
1509
1510 let model = self.model.clone().context("No language model configured")?;
1511 let profile = AgentSettings::get_global(cx)
1512 .profiles
1513 .get(&self.profile_id)
1514 .context("Profile not found")?;
1515 let (events_tx, events_rx) = mpsc::unbounded::<Result<ThreadEvent>>();
1516 let event_stream = ThreadEventStream(events_tx);
1517 let message_ix = self.messages.len().saturating_sub(1);
1518 self.clear_summary();
1519 let (cancellation_tx, mut cancellation_rx) = watch::channel(false);
1520 self.running_turn = Some(RunningTurn {
1521 event_stream: event_stream.clone(),
1522 tools: self.enabled_tools(profile, &model, cx),
1523 cancellation_tx,
1524 _task: cx.spawn(async move |this, cx| {
1525 log::debug!("Starting agent turn execution");
1526
1527 let turn_result = Self::run_turn_internal(
1528 &this,
1529 model,
1530 &event_stream,
1531 cancellation_rx.clone(),
1532 cx,
1533 )
1534 .await;
1535
1536 // Check if we were cancelled - if so, cancel() already took running_turn
1537 // and we shouldn't touch it (it might be a NEW turn now)
1538 let was_cancelled = *cancellation_rx.borrow();
1539 if was_cancelled {
1540 log::debug!("Turn was cancelled, skipping cleanup");
1541 return;
1542 }
1543
1544 _ = this.update(cx, |this, cx| this.flush_pending_message(cx));
1545
1546 match turn_result {
1547 Ok(()) => {
1548 log::debug!("Turn execution completed");
1549 event_stream.send_stop(acp::StopReason::EndTurn);
1550 }
1551 Err(error) => {
1552 log::error!("Turn execution failed: {:?}", error);
1553 match error.downcast::<CompletionError>() {
1554 Ok(CompletionError::Refusal) => {
1555 event_stream.send_stop(acp::StopReason::Refusal);
1556 _ = this.update(cx, |this, _| this.messages.truncate(message_ix));
1557 }
1558 Ok(CompletionError::MaxTokens) => {
1559 event_stream.send_stop(acp::StopReason::MaxTokens);
1560 }
1561 Ok(CompletionError::Other(error)) | Err(error) => {
1562 event_stream.send_error(error);
1563 }
1564 }
1565 }
1566 }
1567
1568 _ = this.update(cx, |this, _| this.running_turn.take());
1569 }),
1570 });
1571 Ok(events_rx)
1572 }
1573
1574 async fn run_turn_internal(
1575 this: &WeakEntity<Self>,
1576 model: Arc<dyn LanguageModel>,
1577 event_stream: &ThreadEventStream,
1578 mut cancellation_rx: watch::Receiver<bool>,
1579 cx: &mut AsyncApp,
1580 ) -> Result<()> {
1581 let mut attempt = 0;
1582 let mut intent = CompletionIntent::UserPrompt;
1583 loop {
1584 let request =
1585 this.update(cx, |this, cx| this.build_completion_request(intent, cx))??;
1586
1587 telemetry::event!(
1588 "Agent Thread Completion",
1589 thread_id = this.read_with(cx, |this, _| this.id.to_string())?,
1590 prompt_id = this.read_with(cx, |this, _| this.prompt_id.to_string())?,
1591 model = model.telemetry_id(),
1592 model_provider = model.provider_id().to_string(),
1593 attempt
1594 );
1595
1596 log::debug!("Calling model.stream_completion, attempt {}", attempt);
1597
1598 let (mut events, mut error) = match model.stream_completion(request, cx).await {
1599 Ok(events) => (events.fuse(), None),
1600 Err(err) => (stream::empty().boxed().fuse(), Some(err)),
1601 };
1602 let mut tool_results = FuturesUnordered::new();
1603 let mut cancelled = false;
1604 loop {
1605 // Race between getting the first event and cancellation
1606 let first_event = futures::select! {
1607 event = events.next().fuse() => event,
1608 _ = cancellation_rx.changed().fuse() => {
1609 if *cancellation_rx.borrow() {
1610 cancelled = true;
1611 break;
1612 }
1613 continue;
1614 }
1615 };
1616 let Some(first_event) = first_event else {
1617 break;
1618 };
1619
1620 // Collect all immediately available events to process as a batch
1621 let mut batch = vec![first_event];
1622 while let Some(event) = events.next().now_or_never().flatten() {
1623 batch.push(event);
1624 }
1625
1626 // Process the batch in a single update
1627 let batch_result = this.update(cx, |this, cx| {
1628 let mut batch_tool_results = Vec::new();
1629 let mut batch_error = None;
1630
1631 for event in batch {
1632 log::trace!("Received completion event: {:?}", event);
1633 match event {
1634 Ok(event) => {
1635 match this.handle_completion_event(
1636 event,
1637 event_stream,
1638 cancellation_rx.clone(),
1639 cx,
1640 ) {
1641 Ok(Some(task)) => batch_tool_results.push(task),
1642 Ok(None) => {}
1643 Err(err) => {
1644 batch_error = Some(err);
1645 break;
1646 }
1647 }
1648 }
1649 Err(err) => {
1650 batch_error = Some(err.into());
1651 break;
1652 }
1653 }
1654 }
1655
1656 cx.notify();
1657 (batch_tool_results, batch_error)
1658 })?;
1659
1660 tool_results.extend(batch_result.0);
1661 if let Some(err) = batch_result.1 {
1662 error = Some(err.downcast()?);
1663 break;
1664 }
1665 }
1666
1667 // Drop the stream to release the rate limit permit before tool execution.
1668 // The stream holds a semaphore guard that limits concurrent requests.
1669 // Without this, the permit would be held during potentially long-running
1670 // tool execution, which could cause deadlocks when tools spawn subagents
1671 // that need their own permits.
1672 drop(events);
1673
1674 let end_turn = tool_results.is_empty();
1675 while let Some(tool_result) = tool_results.next().await {
1676 log::debug!("Tool finished {:?}", tool_result);
1677
1678 event_stream.update_tool_call_fields(
1679 &tool_result.tool_use_id,
1680 acp::ToolCallUpdateFields::new()
1681 .status(if tool_result.is_error {
1682 acp::ToolCallStatus::Failed
1683 } else {
1684 acp::ToolCallStatus::Completed
1685 })
1686 .raw_output(tool_result.output.clone()),
1687 );
1688 this.update(cx, |this, _cx| {
1689 this.pending_message()
1690 .tool_results
1691 .insert(tool_result.tool_use_id.clone(), tool_result);
1692 })?;
1693 }
1694
1695 this.update(cx, |this, cx| {
1696 this.flush_pending_message(cx);
1697 if this.title.is_none() && this.pending_title_generation.is_none() {
1698 this.generate_title(cx);
1699 }
1700 })?;
1701
1702 if cancelled {
1703 log::debug!("Turn cancelled by user, exiting");
1704 return Ok(());
1705 }
1706
1707 if let Some(error) = error {
1708 attempt += 1;
1709 let retry = this.update(cx, |this, cx| {
1710 let user_store = this.user_store.read(cx);
1711 this.handle_completion_error(error, attempt, user_store.plan())
1712 })??;
1713 let timer = cx.background_executor().timer(retry.duration);
1714 event_stream.send_retry(retry);
1715 timer.await;
1716 this.update(cx, |this, _cx| {
1717 if let Some(Message::Agent(message)) = this.messages.last() {
1718 if message.tool_results.is_empty() {
1719 intent = CompletionIntent::UserPrompt;
1720 this.messages.push(Message::Resume);
1721 }
1722 }
1723 })?;
1724 } else if end_turn {
1725 return Ok(());
1726 } else {
1727 let has_queued = this.update(cx, |this, _| this.has_queued_message())?;
1728 if has_queued {
1729 log::debug!("Queued message found, ending turn at message boundary");
1730 return Ok(());
1731 }
1732 intent = CompletionIntent::ToolResults;
1733 attempt = 0;
1734 }
1735 }
1736 }
1737
1738 fn handle_completion_error(
1739 &mut self,
1740 error: LanguageModelCompletionError,
1741 attempt: u8,
1742 plan: Option<Plan>,
1743 ) -> Result<acp_thread::RetryStatus> {
1744 let Some(model) = self.model.as_ref() else {
1745 return Err(anyhow!(error));
1746 };
1747
1748 let auto_retry = if model.provider_id() == ZED_CLOUD_PROVIDER_ID {
1749 plan.is_some()
1750 } else {
1751 true
1752 };
1753
1754 if !auto_retry {
1755 return Err(anyhow!(error));
1756 }
1757
1758 let Some(strategy) = Self::retry_strategy_for(&error) else {
1759 return Err(anyhow!(error));
1760 };
1761
1762 let max_attempts = match &strategy {
1763 RetryStrategy::ExponentialBackoff { max_attempts, .. } => *max_attempts,
1764 RetryStrategy::Fixed { max_attempts, .. } => *max_attempts,
1765 };
1766
1767 if attempt > max_attempts {
1768 return Err(anyhow!(error));
1769 }
1770
1771 let delay = match &strategy {
1772 RetryStrategy::ExponentialBackoff { initial_delay, .. } => {
1773 let delay_secs = initial_delay.as_secs() * 2u64.pow((attempt - 1) as u32);
1774 Duration::from_secs(delay_secs)
1775 }
1776 RetryStrategy::Fixed { delay, .. } => *delay,
1777 };
1778 log::debug!("Retry attempt {attempt} with delay {delay:?}");
1779
1780 Ok(acp_thread::RetryStatus {
1781 last_error: error.to_string().into(),
1782 attempt: attempt as usize,
1783 max_attempts: max_attempts as usize,
1784 started_at: Instant::now(),
1785 duration: delay,
1786 })
1787 }
1788
1789 /// A helper method that's called on every streamed completion event.
1790 /// Returns an optional tool result task, which the main agentic loop will
1791 /// send back to the model when it resolves.
1792 fn handle_completion_event(
1793 &mut self,
1794 event: LanguageModelCompletionEvent,
1795 event_stream: &ThreadEventStream,
1796 cancellation_rx: watch::Receiver<bool>,
1797 cx: &mut Context<Self>,
1798 ) -> Result<Option<Task<LanguageModelToolResult>>> {
1799 log::trace!("Handling streamed completion event: {:?}", event);
1800 use LanguageModelCompletionEvent::*;
1801
1802 match event {
1803 StartMessage { .. } => {
1804 self.flush_pending_message(cx);
1805 self.pending_message = Some(AgentMessage::default());
1806 }
1807 Text(new_text) => self.handle_text_event(new_text, event_stream),
1808 Thinking { text, signature } => {
1809 self.handle_thinking_event(text, signature, event_stream)
1810 }
1811 RedactedThinking { data } => self.handle_redacted_thinking_event(data),
1812 ReasoningDetails(details) => {
1813 let last_message = self.pending_message();
1814 // Store the last non-empty reasoning_details (overwrites earlier ones)
1815 // This ensures we keep the encrypted reasoning with signatures, not the early text reasoning
1816 if let serde_json::Value::Array(ref arr) = details {
1817 if !arr.is_empty() {
1818 last_message.reasoning_details = Some(details);
1819 }
1820 } else {
1821 last_message.reasoning_details = Some(details);
1822 }
1823 }
1824 ToolUse(tool_use) => {
1825 return Ok(self.handle_tool_use_event(tool_use, event_stream, cancellation_rx, cx));
1826 }
1827 ToolUseJsonParseError {
1828 id,
1829 tool_name,
1830 raw_input,
1831 json_parse_error,
1832 } => {
1833 return Ok(Some(Task::ready(
1834 self.handle_tool_use_json_parse_error_event(
1835 id,
1836 tool_name,
1837 raw_input,
1838 json_parse_error,
1839 ),
1840 )));
1841 }
1842 UsageUpdate(usage) => {
1843 telemetry::event!(
1844 "Agent Thread Completion Usage Updated",
1845 thread_id = self.id.to_string(),
1846 prompt_id = self.prompt_id.to_string(),
1847 model = self.model.as_ref().map(|m| m.telemetry_id()),
1848 model_provider = self.model.as_ref().map(|m| m.provider_id().to_string()),
1849 input_tokens = usage.input_tokens,
1850 output_tokens = usage.output_tokens,
1851 cache_creation_input_tokens = usage.cache_creation_input_tokens,
1852 cache_read_input_tokens = usage.cache_read_input_tokens,
1853 );
1854 self.update_token_usage(usage, cx);
1855 }
1856 Stop(StopReason::Refusal) => return Err(CompletionError::Refusal.into()),
1857 Stop(StopReason::MaxTokens) => return Err(CompletionError::MaxTokens.into()),
1858 Stop(StopReason::ToolUse | StopReason::EndTurn) => {}
1859 Started | Queued { .. } => {}
1860 }
1861
1862 Ok(None)
1863 }
1864
1865 fn handle_text_event(&mut self, new_text: String, event_stream: &ThreadEventStream) {
1866 event_stream.send_text(&new_text);
1867
1868 let last_message = self.pending_message();
1869 if let Some(AgentMessageContent::Text(text)) = last_message.content.last_mut() {
1870 text.push_str(&new_text);
1871 } else {
1872 last_message
1873 .content
1874 .push(AgentMessageContent::Text(new_text));
1875 }
1876 }
1877
1878 fn handle_thinking_event(
1879 &mut self,
1880 new_text: String,
1881 new_signature: Option<String>,
1882 event_stream: &ThreadEventStream,
1883 ) {
1884 event_stream.send_thinking(&new_text);
1885
1886 let last_message = self.pending_message();
1887 if let Some(AgentMessageContent::Thinking { text, signature }) =
1888 last_message.content.last_mut()
1889 {
1890 text.push_str(&new_text);
1891 *signature = new_signature.or(signature.take());
1892 } else {
1893 last_message.content.push(AgentMessageContent::Thinking {
1894 text: new_text,
1895 signature: new_signature,
1896 });
1897 }
1898 }
1899
1900 fn handle_redacted_thinking_event(&mut self, data: String) {
1901 let last_message = self.pending_message();
1902 last_message
1903 .content
1904 .push(AgentMessageContent::RedactedThinking(data));
1905 }
1906
1907 fn handle_tool_use_event(
1908 &mut self,
1909 tool_use: LanguageModelToolUse,
1910 event_stream: &ThreadEventStream,
1911 cancellation_rx: watch::Receiver<bool>,
1912 cx: &mut Context<Self>,
1913 ) -> Option<Task<LanguageModelToolResult>> {
1914 cx.notify();
1915
1916 let tool = self.tool(tool_use.name.as_ref());
1917 let mut title = SharedString::from(&tool_use.name);
1918 let mut kind = acp::ToolKind::Other;
1919 if let Some(tool) = tool.as_ref() {
1920 title = tool.initial_title(tool_use.input.clone(), cx);
1921 kind = tool.kind();
1922 }
1923
1924 // Ensure the last message ends in the current tool use
1925 let last_message = self.pending_message();
1926 let push_new_tool_use = last_message.content.last_mut().is_none_or(|content| {
1927 if let AgentMessageContent::ToolUse(last_tool_use) = content {
1928 if last_tool_use.id == tool_use.id {
1929 *last_tool_use = tool_use.clone();
1930 false
1931 } else {
1932 true
1933 }
1934 } else {
1935 true
1936 }
1937 });
1938
1939 if push_new_tool_use {
1940 event_stream.send_tool_call(
1941 &tool_use.id,
1942 &tool_use.name,
1943 title,
1944 kind,
1945 tool_use.input.clone(),
1946 );
1947 last_message
1948 .content
1949 .push(AgentMessageContent::ToolUse(tool_use.clone()));
1950 } else {
1951 event_stream.update_tool_call_fields(
1952 &tool_use.id,
1953 acp::ToolCallUpdateFields::new()
1954 .title(title.as_str())
1955 .kind(kind)
1956 .raw_input(tool_use.input.clone()),
1957 );
1958 }
1959
1960 if !tool_use.is_input_complete {
1961 return None;
1962 }
1963
1964 let Some(tool) = tool else {
1965 let content = format!("No tool named {} exists", tool_use.name);
1966 return Some(Task::ready(LanguageModelToolResult {
1967 content: LanguageModelToolResultContent::Text(Arc::from(content)),
1968 tool_use_id: tool_use.id,
1969 tool_name: tool_use.name,
1970 is_error: true,
1971 output: None,
1972 }));
1973 };
1974
1975 let fs = self.project.read(cx).fs().clone();
1976 let tool_event_stream = ToolCallEventStream::new(
1977 tool_use.id.clone(),
1978 event_stream.clone(),
1979 Some(fs),
1980 cancellation_rx,
1981 );
1982 tool_event_stream.update_fields(
1983 acp::ToolCallUpdateFields::new().status(acp::ToolCallStatus::InProgress),
1984 );
1985 let supports_images = self.model().is_some_and(|model| model.supports_images());
1986 let tool_result = tool.run(tool_use.input, tool_event_stream, cx);
1987 log::debug!("Running tool {}", tool_use.name);
1988 Some(cx.foreground_executor().spawn(async move {
1989 let tool_result = tool_result.await.and_then(|output| {
1990 if let LanguageModelToolResultContent::Image(_) = &output.llm_output
1991 && !supports_images
1992 {
1993 return Err(anyhow!(
1994 "Attempted to read an image, but this model doesn't support it.",
1995 ));
1996 }
1997 Ok(output)
1998 });
1999
2000 match tool_result {
2001 Ok(output) => LanguageModelToolResult {
2002 tool_use_id: tool_use.id,
2003 tool_name: tool_use.name,
2004 is_error: false,
2005 content: output.llm_output,
2006 output: Some(output.raw_output),
2007 },
2008 Err(error) => LanguageModelToolResult {
2009 tool_use_id: tool_use.id,
2010 tool_name: tool_use.name,
2011 is_error: true,
2012 content: LanguageModelToolResultContent::Text(Arc::from(error.to_string())),
2013 output: Some(error.to_string().into()),
2014 },
2015 }
2016 }))
2017 }
2018
2019 fn handle_tool_use_json_parse_error_event(
2020 &mut self,
2021 tool_use_id: LanguageModelToolUseId,
2022 tool_name: Arc<str>,
2023 raw_input: Arc<str>,
2024 json_parse_error: String,
2025 ) -> LanguageModelToolResult {
2026 let tool_output = format!("Error parsing input JSON: {json_parse_error}");
2027 LanguageModelToolResult {
2028 tool_use_id,
2029 tool_name,
2030 is_error: true,
2031 content: LanguageModelToolResultContent::Text(tool_output.into()),
2032 output: Some(serde_json::Value::String(raw_input.to_string())),
2033 }
2034 }
2035
2036 pub fn title(&self) -> SharedString {
2037 self.title.clone().unwrap_or("New Thread".into())
2038 }
2039
2040 pub fn is_generating_summary(&self) -> bool {
2041 self.pending_summary_generation.is_some()
2042 }
2043
2044 pub fn is_generating_title(&self) -> bool {
2045 self.pending_title_generation.is_some()
2046 }
2047
2048 pub fn summary(&mut self, cx: &mut Context<Self>) -> Shared<Task<Option<SharedString>>> {
2049 if let Some(summary) = self.summary.as_ref() {
2050 return Task::ready(Some(summary.clone())).shared();
2051 }
2052 if let Some(task) = self.pending_summary_generation.clone() {
2053 return task;
2054 }
2055 let Some(model) = self.summarization_model.clone() else {
2056 log::error!("No summarization model available");
2057 return Task::ready(None).shared();
2058 };
2059 let mut request = LanguageModelRequest {
2060 intent: Some(CompletionIntent::ThreadContextSummarization),
2061 temperature: AgentSettings::temperature_for_model(&model, cx),
2062 ..Default::default()
2063 };
2064
2065 for message in &self.messages {
2066 request.messages.extend(message.to_request());
2067 }
2068
2069 request.messages.push(LanguageModelRequestMessage {
2070 role: Role::User,
2071 content: vec![SUMMARIZE_THREAD_DETAILED_PROMPT.into()],
2072 cache: false,
2073 reasoning_details: None,
2074 });
2075
2076 let task = cx
2077 .spawn(async move |this, cx| {
2078 let mut summary = String::new();
2079 let mut messages = model.stream_completion(request, cx).await.log_err()?;
2080 while let Some(event) = messages.next().await {
2081 let event = event.log_err()?;
2082 let text = match event {
2083 LanguageModelCompletionEvent::Text(text) => text,
2084 _ => continue,
2085 };
2086
2087 let mut lines = text.lines();
2088 summary.extend(lines.next());
2089 }
2090
2091 log::debug!("Setting summary: {}", summary);
2092 let summary = SharedString::from(summary);
2093
2094 this.update(cx, |this, cx| {
2095 this.summary = Some(summary.clone());
2096 this.pending_summary_generation = None;
2097 cx.notify()
2098 })
2099 .ok()?;
2100
2101 Some(summary)
2102 })
2103 .shared();
2104 self.pending_summary_generation = Some(task.clone());
2105 task
2106 }
2107
2108 pub fn generate_title(&mut self, cx: &mut Context<Self>) {
2109 let Some(model) = self.summarization_model.clone() else {
2110 return;
2111 };
2112
2113 log::debug!(
2114 "Generating title with model: {:?}",
2115 self.summarization_model.as_ref().map(|model| model.name())
2116 );
2117 let mut request = LanguageModelRequest {
2118 intent: Some(CompletionIntent::ThreadSummarization),
2119 temperature: AgentSettings::temperature_for_model(&model, cx),
2120 ..Default::default()
2121 };
2122
2123 for message in &self.messages {
2124 request.messages.extend(message.to_request());
2125 }
2126
2127 request.messages.push(LanguageModelRequestMessage {
2128 role: Role::User,
2129 content: vec![SUMMARIZE_THREAD_PROMPT.into()],
2130 cache: false,
2131 reasoning_details: None,
2132 });
2133 self.pending_title_generation = Some(cx.spawn(async move |this, cx| {
2134 let mut title = String::new();
2135
2136 let generate = async {
2137 let mut messages = model.stream_completion(request, cx).await?;
2138 while let Some(event) = messages.next().await {
2139 let event = event?;
2140 let text = match event {
2141 LanguageModelCompletionEvent::Text(text) => text,
2142 _ => continue,
2143 };
2144
2145 let mut lines = text.lines();
2146 title.extend(lines.next());
2147
2148 // Stop if the LLM generated multiple lines.
2149 if lines.next().is_some() {
2150 break;
2151 }
2152 }
2153 anyhow::Ok(())
2154 };
2155
2156 if generate.await.context("failed to generate title").is_ok() {
2157 _ = this.update(cx, |this, cx| this.set_title(title.into(), cx));
2158 }
2159 _ = this.update(cx, |this, _| this.pending_title_generation = None);
2160 }));
2161 }
2162
2163 pub fn set_title(&mut self, title: SharedString, cx: &mut Context<Self>) {
2164 self.pending_title_generation = None;
2165 if Some(&title) != self.title.as_ref() {
2166 self.title = Some(title);
2167 cx.emit(TitleUpdated);
2168 cx.notify();
2169 }
2170 }
2171
2172 fn clear_summary(&mut self) {
2173 self.summary = None;
2174 self.pending_summary_generation = None;
2175 }
2176
2177 fn last_user_message(&self) -> Option<&UserMessage> {
2178 self.messages
2179 .iter()
2180 .rev()
2181 .find_map(|message| match message {
2182 Message::User(user_message) => Some(user_message),
2183 Message::Agent(_) => None,
2184 Message::Resume => None,
2185 })
2186 }
2187
2188 fn pending_message(&mut self) -> &mut AgentMessage {
2189 self.pending_message.get_or_insert_default()
2190 }
2191
2192 fn flush_pending_message(&mut self, cx: &mut Context<Self>) {
2193 let Some(mut message) = self.pending_message.take() else {
2194 return;
2195 };
2196
2197 if message.content.is_empty() {
2198 return;
2199 }
2200
2201 for content in &message.content {
2202 let AgentMessageContent::ToolUse(tool_use) = content else {
2203 continue;
2204 };
2205
2206 if !message.tool_results.contains_key(&tool_use.id) {
2207 message.tool_results.insert(
2208 tool_use.id.clone(),
2209 LanguageModelToolResult {
2210 tool_use_id: tool_use.id.clone(),
2211 tool_name: tool_use.name.clone(),
2212 is_error: true,
2213 content: LanguageModelToolResultContent::Text(TOOL_CANCELED_MESSAGE.into()),
2214 output: None,
2215 },
2216 );
2217 }
2218 }
2219
2220 self.messages.push(Message::Agent(message));
2221 self.updated_at = Utc::now();
2222 self.clear_summary();
2223 cx.notify()
2224 }
2225
2226 pub(crate) fn build_completion_request(
2227 &self,
2228 completion_intent: CompletionIntent,
2229 cx: &App,
2230 ) -> Result<LanguageModelRequest> {
2231 let model = self.model().context("No language model configured")?;
2232 let tools = if let Some(turn) = self.running_turn.as_ref() {
2233 turn.tools
2234 .iter()
2235 .filter_map(|(tool_name, tool)| {
2236 log::trace!("Including tool: {}", tool_name);
2237 Some(LanguageModelRequestTool {
2238 name: tool_name.to_string(),
2239 description: tool.description().to_string(),
2240 input_schema: tool.input_schema(model.tool_input_format()).log_err()?,
2241 })
2242 })
2243 .collect::<Vec<_>>()
2244 } else {
2245 Vec::new()
2246 };
2247
2248 log::debug!("Building completion request");
2249 log::debug!("Completion intent: {:?}", completion_intent);
2250
2251 let available_tools: Vec<_> = self
2252 .running_turn
2253 .as_ref()
2254 .map(|turn| turn.tools.keys().cloned().collect())
2255 .unwrap_or_default();
2256
2257 log::debug!("Request includes {} tools", available_tools.len());
2258 let messages = self.build_request_messages(available_tools, cx);
2259 log::debug!("Request will include {} messages", messages.len());
2260
2261 let request = LanguageModelRequest {
2262 thread_id: Some(self.id.to_string()),
2263 prompt_id: Some(self.prompt_id.to_string()),
2264 intent: Some(completion_intent),
2265 messages,
2266 tools,
2267 tool_choice: None,
2268 stop: Vec::new(),
2269 temperature: AgentSettings::temperature_for_model(model, cx),
2270 thinking_allowed: self.thinking_enabled,
2271 };
2272
2273 log::debug!("Completion request built successfully");
2274 Ok(request)
2275 }
2276
2277 fn enabled_tools(
2278 &self,
2279 profile: &AgentProfileSettings,
2280 model: &Arc<dyn LanguageModel>,
2281 cx: &App,
2282 ) -> BTreeMap<SharedString, Arc<dyn AnyAgentTool>> {
2283 fn truncate(tool_name: &SharedString) -> SharedString {
2284 if tool_name.len() > MAX_TOOL_NAME_LENGTH {
2285 let mut truncated = tool_name.to_string();
2286 truncated.truncate(MAX_TOOL_NAME_LENGTH);
2287 truncated.into()
2288 } else {
2289 tool_name.clone()
2290 }
2291 }
2292
2293 let use_streaming_edit_tool = false;
2294
2295 let mut tools = self
2296 .tools
2297 .iter()
2298 .filter_map(|(tool_name, tool)| {
2299 // For streaming_edit_file, check profile against "edit_file" since that's what users configure
2300 let profile_tool_name = if tool_name == "streaming_edit_file" {
2301 "edit_file"
2302 } else {
2303 tool_name.as_ref()
2304 };
2305
2306 if tool.supports_provider(&model.provider_id())
2307 && profile.is_tool_enabled(profile_tool_name)
2308 {
2309 match (tool_name.as_ref(), use_streaming_edit_tool) {
2310 ("streaming_edit_file", false) | ("edit_file", true) => None,
2311 ("streaming_edit_file", true) => {
2312 // Expose streaming tool as "edit_file"
2313 Some((SharedString::from("edit_file"), tool.clone()))
2314 }
2315 _ => Some((truncate(tool_name), tool.clone())),
2316 }
2317 } else {
2318 None
2319 }
2320 })
2321 .collect::<BTreeMap<_, _>>();
2322
2323 let mut context_server_tools = Vec::new();
2324 let mut seen_tools = tools.keys().cloned().collect::<HashSet<_>>();
2325 let mut duplicate_tool_names = HashSet::default();
2326 for (server_id, server_tools) in self.context_server_registry.read(cx).servers() {
2327 for (tool_name, tool) in server_tools {
2328 if profile.is_context_server_tool_enabled(&server_id.0, &tool_name) {
2329 let tool_name = truncate(tool_name);
2330 if !seen_tools.insert(tool_name.clone()) {
2331 duplicate_tool_names.insert(tool_name.clone());
2332 }
2333 context_server_tools.push((server_id.clone(), tool_name, tool.clone()));
2334 }
2335 }
2336 }
2337
2338 // When there are duplicate tool names, disambiguate by prefixing them
2339 // with the server ID. In the rare case there isn't enough space for the
2340 // disambiguated tool name, keep only the last tool with this name.
2341 for (server_id, tool_name, tool) in context_server_tools {
2342 if duplicate_tool_names.contains(&tool_name) {
2343 let available = MAX_TOOL_NAME_LENGTH.saturating_sub(tool_name.len());
2344 if available >= 2 {
2345 let mut disambiguated = server_id.0.to_string();
2346 disambiguated.truncate(available - 1);
2347 disambiguated.push('_');
2348 disambiguated.push_str(&tool_name);
2349 tools.insert(disambiguated.into(), tool.clone());
2350 } else {
2351 tools.insert(tool_name, tool.clone());
2352 }
2353 } else {
2354 tools.insert(tool_name, tool.clone());
2355 }
2356 }
2357
2358 tools
2359 }
2360
2361 fn tool(&self, name: &str) -> Option<Arc<dyn AnyAgentTool>> {
2362 self.running_turn.as_ref()?.tools.get(name).cloned()
2363 }
2364
2365 pub fn has_tool(&self, name: &str) -> bool {
2366 self.running_turn
2367 .as_ref()
2368 .is_some_and(|turn| turn.tools.contains_key(name))
2369 }
2370
2371 #[cfg(any(test, feature = "test-support"))]
2372 pub fn has_registered_tool(&self, name: &str) -> bool {
2373 self.tools.contains_key(name)
2374 }
2375
2376 pub fn registered_tool_names(&self) -> Vec<SharedString> {
2377 self.tools.keys().cloned().collect()
2378 }
2379
2380 pub fn register_running_subagent(&mut self, subagent: WeakEntity<Thread>) {
2381 self.running_subagents.push(subagent);
2382 }
2383
2384 pub fn unregister_running_subagent(&mut self, subagent: &WeakEntity<Thread>) {
2385 self.running_subagents
2386 .retain(|s| s.entity_id() != subagent.entity_id());
2387 }
2388
2389 pub fn running_subagent_count(&self) -> usize {
2390 self.running_subagents
2391 .iter()
2392 .filter(|s| s.upgrade().is_some())
2393 .count()
2394 }
2395
2396 pub fn is_subagent(&self) -> bool {
2397 self.subagent_context.is_some()
2398 }
2399
2400 pub fn depth(&self) -> u8 {
2401 self.subagent_context.as_ref().map(|c| c.depth).unwrap_or(0)
2402 }
2403
2404 pub fn is_turn_complete(&self) -> bool {
2405 self.running_turn.is_none()
2406 }
2407
2408 pub fn submit_user_message(
2409 &mut self,
2410 content: impl Into<String>,
2411 cx: &mut Context<Self>,
2412 ) -> Result<mpsc::UnboundedReceiver<Result<ThreadEvent>>> {
2413 let content = content.into();
2414 self.messages.push(Message::User(UserMessage {
2415 id: UserMessageId::new(),
2416 content: vec![UserMessageContent::Text(content)],
2417 }));
2418 cx.notify();
2419 self.send_existing(cx)
2420 }
2421
2422 pub fn interrupt_for_summary(
2423 &mut self,
2424 cx: &mut Context<Self>,
2425 ) -> Result<mpsc::UnboundedReceiver<Result<ThreadEvent>>> {
2426 let context = self
2427 .subagent_context
2428 .as_ref()
2429 .context("Not a subagent thread")?;
2430 let prompt = context.context_low_prompt.clone();
2431 self.cancel(cx).detach();
2432 self.submit_user_message(prompt, cx)
2433 }
2434
2435 pub fn request_final_summary(
2436 &mut self,
2437 cx: &mut Context<Self>,
2438 ) -> Result<mpsc::UnboundedReceiver<Result<ThreadEvent>>> {
2439 let context = self
2440 .subagent_context
2441 .as_ref()
2442 .context("Not a subagent thread")?;
2443 let prompt = context.summary_prompt.clone();
2444 self.submit_user_message(prompt, cx)
2445 }
2446
2447 fn build_request_messages(
2448 &self,
2449 available_tools: Vec<SharedString>,
2450 cx: &App,
2451 ) -> Vec<LanguageModelRequestMessage> {
2452 log::trace!(
2453 "Building request messages from {} thread messages",
2454 self.messages.len()
2455 );
2456
2457 let system_prompt = SystemPromptTemplate {
2458 project: self.project_context.read(cx),
2459 available_tools,
2460 model_name: self.model.as_ref().map(|m| m.name().0.to_string()),
2461 }
2462 .render(&self.templates)
2463 .context("failed to build system prompt")
2464 .expect("Invalid template");
2465 let mut messages = vec![LanguageModelRequestMessage {
2466 role: Role::System,
2467 content: vec![system_prompt.into()],
2468 cache: false,
2469 reasoning_details: None,
2470 }];
2471 for message in &self.messages {
2472 messages.extend(message.to_request());
2473 }
2474
2475 if let Some(last_message) = messages.last_mut() {
2476 last_message.cache = true;
2477 }
2478
2479 if let Some(message) = self.pending_message.as_ref() {
2480 messages.extend(message.to_request());
2481 }
2482
2483 messages
2484 }
2485
2486 pub fn to_markdown(&self) -> String {
2487 let mut markdown = String::new();
2488 for (ix, message) in self.messages.iter().enumerate() {
2489 if ix > 0 {
2490 markdown.push('\n');
2491 }
2492 markdown.push_str(&message.to_markdown());
2493 }
2494
2495 if let Some(message) = self.pending_message.as_ref() {
2496 markdown.push('\n');
2497 markdown.push_str(&message.to_markdown());
2498 }
2499
2500 markdown
2501 }
2502
2503 fn advance_prompt_id(&mut self) {
2504 self.prompt_id = PromptId::new();
2505 }
2506
2507 fn retry_strategy_for(error: &LanguageModelCompletionError) -> Option<RetryStrategy> {
2508 use LanguageModelCompletionError::*;
2509 use http_client::StatusCode;
2510
2511 // General strategy here:
2512 // - If retrying won't help (e.g. invalid API key or payload too large), return None so we don't retry at all.
2513 // - If it's a time-based issue (e.g. server overloaded, rate limit exceeded), retry up to 4 times with exponential backoff.
2514 // - If it's an issue that *might* be fixed by retrying (e.g. internal server error), retry up to 3 times.
2515 match error {
2516 HttpResponseError {
2517 status_code: StatusCode::TOO_MANY_REQUESTS,
2518 ..
2519 } => Some(RetryStrategy::ExponentialBackoff {
2520 initial_delay: BASE_RETRY_DELAY,
2521 max_attempts: MAX_RETRY_ATTEMPTS,
2522 }),
2523 ServerOverloaded { retry_after, .. } | RateLimitExceeded { retry_after, .. } => {
2524 Some(RetryStrategy::Fixed {
2525 delay: retry_after.unwrap_or(BASE_RETRY_DELAY),
2526 max_attempts: MAX_RETRY_ATTEMPTS,
2527 })
2528 }
2529 UpstreamProviderError {
2530 status,
2531 retry_after,
2532 ..
2533 } => match *status {
2534 StatusCode::TOO_MANY_REQUESTS | StatusCode::SERVICE_UNAVAILABLE => {
2535 Some(RetryStrategy::Fixed {
2536 delay: retry_after.unwrap_or(BASE_RETRY_DELAY),
2537 max_attempts: MAX_RETRY_ATTEMPTS,
2538 })
2539 }
2540 StatusCode::INTERNAL_SERVER_ERROR => Some(RetryStrategy::Fixed {
2541 delay: retry_after.unwrap_or(BASE_RETRY_DELAY),
2542 // Internal Server Error could be anything, retry up to 3 times.
2543 max_attempts: 3,
2544 }),
2545 status => {
2546 // There is no StatusCode variant for the unofficial HTTP 529 ("The service is overloaded"),
2547 // but we frequently get them in practice. See https://http.dev/529
2548 if status.as_u16() == 529 {
2549 Some(RetryStrategy::Fixed {
2550 delay: retry_after.unwrap_or(BASE_RETRY_DELAY),
2551 max_attempts: MAX_RETRY_ATTEMPTS,
2552 })
2553 } else {
2554 Some(RetryStrategy::Fixed {
2555 delay: retry_after.unwrap_or(BASE_RETRY_DELAY),
2556 max_attempts: 2,
2557 })
2558 }
2559 }
2560 },
2561 ApiInternalServerError { .. } => Some(RetryStrategy::Fixed {
2562 delay: BASE_RETRY_DELAY,
2563 max_attempts: 3,
2564 }),
2565 ApiReadResponseError { .. }
2566 | HttpSend { .. }
2567 | DeserializeResponse { .. }
2568 | BadRequestFormat { .. } => Some(RetryStrategy::Fixed {
2569 delay: BASE_RETRY_DELAY,
2570 max_attempts: 3,
2571 }),
2572 // Retrying these errors definitely shouldn't help.
2573 HttpResponseError {
2574 status_code:
2575 StatusCode::PAYLOAD_TOO_LARGE | StatusCode::FORBIDDEN | StatusCode::UNAUTHORIZED,
2576 ..
2577 }
2578 | AuthenticationError { .. }
2579 | PermissionError { .. }
2580 | NoApiKey { .. }
2581 | ApiEndpointNotFound { .. }
2582 | PromptTooLarge { .. } => None,
2583 // These errors might be transient, so retry them
2584 SerializeRequest { .. } | BuildRequestBody { .. } => Some(RetryStrategy::Fixed {
2585 delay: BASE_RETRY_DELAY,
2586 max_attempts: 1,
2587 }),
2588 // Retry all other 4xx and 5xx errors once.
2589 HttpResponseError { status_code, .. }
2590 if status_code.is_client_error() || status_code.is_server_error() =>
2591 {
2592 Some(RetryStrategy::Fixed {
2593 delay: BASE_RETRY_DELAY,
2594 max_attempts: 3,
2595 })
2596 }
2597 Other(err) if err.is::<language_model::PaymentRequiredError>() => {
2598 // Retrying won't help for Payment Required errors.
2599 None
2600 }
2601 // Conservatively assume that any other errors are non-retryable
2602 HttpResponseError { .. } | Other(..) => Some(RetryStrategy::Fixed {
2603 delay: BASE_RETRY_DELAY,
2604 max_attempts: 2,
2605 }),
2606 }
2607 }
2608}
2609
2610struct RunningTurn {
2611 /// Holds the task that handles agent interaction until the end of the turn.
2612 /// Survives across multiple requests as the model performs tool calls and
2613 /// we run tools, report their results.
2614 _task: Task<()>,
2615 /// The current event stream for the running turn. Used to report a final
2616 /// cancellation event if we cancel the turn.
2617 event_stream: ThreadEventStream,
2618 /// The tools that were enabled for this turn.
2619 tools: BTreeMap<SharedString, Arc<dyn AnyAgentTool>>,
2620 /// Sender to signal tool cancellation. When cancel is called, this is
2621 /// set to true so all tools can detect user-initiated cancellation.
2622 cancellation_tx: watch::Sender<bool>,
2623}
2624
2625impl RunningTurn {
2626 fn cancel(mut self) -> Task<()> {
2627 log::debug!("Cancelling in progress turn");
2628 self.cancellation_tx.send(true).ok();
2629 self.event_stream.send_canceled();
2630 self._task
2631 }
2632}
2633
2634pub struct TokenUsageUpdated(pub Option<acp_thread::TokenUsage>);
2635
2636impl EventEmitter<TokenUsageUpdated> for Thread {}
2637
2638pub struct TitleUpdated;
2639
2640impl EventEmitter<TitleUpdated> for Thread {}
2641
2642pub trait AgentTool
2643where
2644 Self: 'static + Sized,
2645{
2646 type Input: for<'de> Deserialize<'de> + Serialize + JsonSchema;
2647 type Output: for<'de> Deserialize<'de> + Serialize + Into<LanguageModelToolResultContent>;
2648
2649 fn name() -> &'static str;
2650
2651 fn description() -> SharedString {
2652 let schema = schemars::schema_for!(Self::Input);
2653 SharedString::new(
2654 schema
2655 .get("description")
2656 .and_then(|description| description.as_str())
2657 .unwrap_or_default(),
2658 )
2659 }
2660
2661 fn kind() -> acp::ToolKind;
2662
2663 /// The initial tool title to display. Can be updated during the tool run.
2664 fn initial_title(
2665 &self,
2666 input: Result<Self::Input, serde_json::Value>,
2667 cx: &mut App,
2668 ) -> SharedString;
2669
2670 /// Returns the JSON schema that describes the tool's input.
2671 fn input_schema(format: LanguageModelToolSchemaFormat) -> Schema {
2672 language_model::tool_schema::root_schema_for::<Self::Input>(format)
2673 }
2674
2675 /// Some tools rely on a provider for the underlying billing or other reasons.
2676 /// Allow the tool to check if they are compatible, or should be filtered out.
2677 fn supports_provider(_provider: &LanguageModelProviderId) -> bool {
2678 true
2679 }
2680
2681 /// Runs the tool with the provided input.
2682 fn run(
2683 self: Arc<Self>,
2684 input: Self::Input,
2685 event_stream: ToolCallEventStream,
2686 cx: &mut App,
2687 ) -> Task<Result<Self::Output>>;
2688
2689 /// Emits events for a previous execution of the tool.
2690 fn replay(
2691 &self,
2692 _input: Self::Input,
2693 _output: Self::Output,
2694 _event_stream: ToolCallEventStream,
2695 _cx: &mut App,
2696 ) -> Result<()> {
2697 Ok(())
2698 }
2699
2700 fn erase(self) -> Arc<dyn AnyAgentTool> {
2701 Arc::new(Erased(Arc::new(self)))
2702 }
2703
2704 /// Create a new instance of this tool bound to a different thread.
2705 /// This is used when creating subagents, so that tools like EditFileTool
2706 /// that hold a thread reference will use the subagent's thread instead
2707 /// of the parent's thread.
2708 /// Returns None if the tool doesn't need rebinding (most tools).
2709 fn rebind_thread(&self, _new_thread: WeakEntity<Thread>) -> Option<Arc<dyn AnyAgentTool>> {
2710 None
2711 }
2712}
2713
2714pub struct Erased<T>(T);
2715
2716pub struct AgentToolOutput {
2717 pub llm_output: LanguageModelToolResultContent,
2718 pub raw_output: serde_json::Value,
2719}
2720
2721pub trait AnyAgentTool {
2722 fn name(&self) -> SharedString;
2723 fn description(&self) -> SharedString;
2724 fn kind(&self) -> acp::ToolKind;
2725 fn initial_title(&self, input: serde_json::Value, _cx: &mut App) -> SharedString;
2726 fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result<serde_json::Value>;
2727 fn supports_provider(&self, _provider: &LanguageModelProviderId) -> bool {
2728 true
2729 }
2730 fn run(
2731 self: Arc<Self>,
2732 input: serde_json::Value,
2733 event_stream: ToolCallEventStream,
2734 cx: &mut App,
2735 ) -> Task<Result<AgentToolOutput>>;
2736 fn replay(
2737 &self,
2738 input: serde_json::Value,
2739 output: serde_json::Value,
2740 event_stream: ToolCallEventStream,
2741 cx: &mut App,
2742 ) -> Result<()>;
2743 /// Create a new instance of this tool bound to a different thread.
2744 /// This is used when creating subagents, so that tools like EditFileTool
2745 /// that hold a thread reference will use the subagent's thread instead
2746 /// of the parent's thread.
2747 /// Returns None if the tool doesn't need rebinding (most tools).
2748 fn rebind_thread(&self, _new_thread: WeakEntity<Thread>) -> Option<Arc<dyn AnyAgentTool>> {
2749 None
2750 }
2751}
2752
2753impl<T> AnyAgentTool for Erased<Arc<T>>
2754where
2755 T: AgentTool,
2756{
2757 fn name(&self) -> SharedString {
2758 T::name().into()
2759 }
2760
2761 fn description(&self) -> SharedString {
2762 T::description()
2763 }
2764
2765 fn kind(&self) -> agent_client_protocol::ToolKind {
2766 T::kind()
2767 }
2768
2769 fn initial_title(&self, input: serde_json::Value, _cx: &mut App) -> SharedString {
2770 let parsed_input = serde_json::from_value(input.clone()).map_err(|_| input);
2771 self.0.initial_title(parsed_input, _cx)
2772 }
2773
2774 fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result<serde_json::Value> {
2775 let mut json = serde_json::to_value(T::input_schema(format))?;
2776 language_model::tool_schema::adapt_schema_to_format(&mut json, format)?;
2777 Ok(json)
2778 }
2779
2780 fn supports_provider(&self, provider: &LanguageModelProviderId) -> bool {
2781 T::supports_provider(provider)
2782 }
2783
2784 fn run(
2785 self: Arc<Self>,
2786 input: serde_json::Value,
2787 event_stream: ToolCallEventStream,
2788 cx: &mut App,
2789 ) -> Task<Result<AgentToolOutput>> {
2790 cx.spawn(async move |cx| {
2791 let input = serde_json::from_value(input)?;
2792 let output = cx
2793 .update(|cx| self.0.clone().run(input, event_stream, cx))
2794 .await?;
2795 let raw_output = serde_json::to_value(&output)?;
2796 Ok(AgentToolOutput {
2797 llm_output: output.into(),
2798 raw_output,
2799 })
2800 })
2801 }
2802
2803 fn replay(
2804 &self,
2805 input: serde_json::Value,
2806 output: serde_json::Value,
2807 event_stream: ToolCallEventStream,
2808 cx: &mut App,
2809 ) -> Result<()> {
2810 let input = serde_json::from_value(input)?;
2811 let output = serde_json::from_value(output)?;
2812 self.0.replay(input, output, event_stream, cx)
2813 }
2814
2815 fn rebind_thread(&self, new_thread: WeakEntity<Thread>) -> Option<Arc<dyn AnyAgentTool>> {
2816 self.0.rebind_thread(new_thread)
2817 }
2818}
2819
2820#[derive(Clone)]
2821struct ThreadEventStream(mpsc::UnboundedSender<Result<ThreadEvent>>);
2822
2823impl ThreadEventStream {
2824 fn send_user_message(&self, message: &UserMessage) {
2825 self.0
2826 .unbounded_send(Ok(ThreadEvent::UserMessage(message.clone())))
2827 .ok();
2828 }
2829
2830 fn send_text(&self, text: &str) {
2831 self.0
2832 .unbounded_send(Ok(ThreadEvent::AgentText(text.to_string())))
2833 .ok();
2834 }
2835
2836 fn send_thinking(&self, text: &str) {
2837 self.0
2838 .unbounded_send(Ok(ThreadEvent::AgentThinking(text.to_string())))
2839 .ok();
2840 }
2841
2842 fn send_tool_call(
2843 &self,
2844 id: &LanguageModelToolUseId,
2845 tool_name: &str,
2846 title: SharedString,
2847 kind: acp::ToolKind,
2848 input: serde_json::Value,
2849 ) {
2850 self.0
2851 .unbounded_send(Ok(ThreadEvent::ToolCall(Self::initial_tool_call(
2852 id,
2853 tool_name,
2854 title.to_string(),
2855 kind,
2856 input,
2857 ))))
2858 .ok();
2859 }
2860
2861 fn initial_tool_call(
2862 id: &LanguageModelToolUseId,
2863 tool_name: &str,
2864 title: String,
2865 kind: acp::ToolKind,
2866 input: serde_json::Value,
2867 ) -> acp::ToolCall {
2868 acp::ToolCall::new(id.to_string(), title)
2869 .kind(kind)
2870 .raw_input(input)
2871 .meta(acp_thread::meta_with_tool_name(tool_name))
2872 }
2873
2874 fn update_tool_call_fields(
2875 &self,
2876 tool_use_id: &LanguageModelToolUseId,
2877 fields: acp::ToolCallUpdateFields,
2878 ) {
2879 self.0
2880 .unbounded_send(Ok(ThreadEvent::ToolCallUpdate(
2881 acp::ToolCallUpdate::new(tool_use_id.to_string(), fields).into(),
2882 )))
2883 .ok();
2884 }
2885
2886 fn send_retry(&self, status: acp_thread::RetryStatus) {
2887 self.0.unbounded_send(Ok(ThreadEvent::Retry(status))).ok();
2888 }
2889
2890 fn send_stop(&self, reason: acp::StopReason) {
2891 self.0.unbounded_send(Ok(ThreadEvent::Stop(reason))).ok();
2892 }
2893
2894 fn send_canceled(&self) {
2895 self.0
2896 .unbounded_send(Ok(ThreadEvent::Stop(acp::StopReason::Cancelled)))
2897 .ok();
2898 }
2899
2900 fn send_error(&self, error: impl Into<anyhow::Error>) {
2901 self.0.unbounded_send(Err(error.into())).ok();
2902 }
2903}
2904
2905#[derive(Clone)]
2906pub struct ToolCallEventStream {
2907 tool_use_id: LanguageModelToolUseId,
2908 stream: ThreadEventStream,
2909 fs: Option<Arc<dyn Fs>>,
2910 cancellation_rx: watch::Receiver<bool>,
2911}
2912
2913impl ToolCallEventStream {
2914 #[cfg(any(test, feature = "test-support"))]
2915 pub fn test() -> (Self, ToolCallEventStreamReceiver) {
2916 let (stream, receiver, _cancellation_tx) = Self::test_with_cancellation();
2917 (stream, receiver)
2918 }
2919
2920 #[cfg(any(test, feature = "test-support"))]
2921 pub fn test_with_cancellation() -> (Self, ToolCallEventStreamReceiver, watch::Sender<bool>) {
2922 let (events_tx, events_rx) = mpsc::unbounded::<Result<ThreadEvent>>();
2923 let (cancellation_tx, cancellation_rx) = watch::channel(false);
2924
2925 let stream = ToolCallEventStream::new(
2926 "test_id".into(),
2927 ThreadEventStream(events_tx),
2928 None,
2929 cancellation_rx,
2930 );
2931
2932 (
2933 stream,
2934 ToolCallEventStreamReceiver(events_rx),
2935 cancellation_tx,
2936 )
2937 }
2938
2939 /// Signal cancellation for this event stream. Only available in tests.
2940 #[cfg(any(test, feature = "test-support"))]
2941 pub fn signal_cancellation_with_sender(cancellation_tx: &mut watch::Sender<bool>) {
2942 cancellation_tx.send(true).ok();
2943 }
2944
2945 fn new(
2946 tool_use_id: LanguageModelToolUseId,
2947 stream: ThreadEventStream,
2948 fs: Option<Arc<dyn Fs>>,
2949 cancellation_rx: watch::Receiver<bool>,
2950 ) -> Self {
2951 Self {
2952 tool_use_id,
2953 stream,
2954 fs,
2955 cancellation_rx,
2956 }
2957 }
2958
2959 /// Returns a future that resolves when the user cancels the tool call.
2960 /// Tools should select on this alongside their main work to detect user cancellation.
2961 pub fn cancelled_by_user(&self) -> impl std::future::Future<Output = ()> + '_ {
2962 let mut rx = self.cancellation_rx.clone();
2963 async move {
2964 loop {
2965 if *rx.borrow() {
2966 return;
2967 }
2968 if rx.changed().await.is_err() {
2969 // Sender dropped, will never be cancelled
2970 std::future::pending::<()>().await;
2971 }
2972 }
2973 }
2974 }
2975
2976 /// Returns true if the user has cancelled this tool call.
2977 /// This is useful for checking cancellation state after an operation completes,
2978 /// to determine if the completion was due to user cancellation.
2979 pub fn was_cancelled_by_user(&self) -> bool {
2980 *self.cancellation_rx.clone().borrow()
2981 }
2982
2983 pub fn tool_use_id(&self) -> &LanguageModelToolUseId {
2984 &self.tool_use_id
2985 }
2986
2987 pub fn update_fields(&self, fields: acp::ToolCallUpdateFields) {
2988 self.stream
2989 .update_tool_call_fields(&self.tool_use_id, fields);
2990 }
2991
2992 pub fn update_diff(&self, diff: Entity<acp_thread::Diff>) {
2993 self.stream
2994 .0
2995 .unbounded_send(Ok(ThreadEvent::ToolCallUpdate(
2996 acp_thread::ToolCallUpdateDiff {
2997 id: acp::ToolCallId::new(self.tool_use_id.to_string()),
2998 diff,
2999 }
3000 .into(),
3001 )))
3002 .ok();
3003 }
3004
3005 pub fn update_subagent_thread(&self, thread: Entity<acp_thread::AcpThread>) {
3006 self.stream
3007 .0
3008 .unbounded_send(Ok(ThreadEvent::ToolCallUpdate(
3009 acp_thread::ToolCallUpdateSubagentThread {
3010 id: acp::ToolCallId::new(self.tool_use_id.to_string()),
3011 thread,
3012 }
3013 .into(),
3014 )))
3015 .ok();
3016 }
3017
3018 /// Authorize a third-party tool (e.g., MCP tool from a context server).
3019 ///
3020 /// Unlike built-in tools, third-party tools don't support pattern-based permissions.
3021 /// They only support `default_mode` (allow/deny/confirm) per tool.
3022 ///
3023 /// Uses the dropdown authorization flow with two granularities:
3024 /// - "Always for <display_name> MCP tool" → sets `tools.<tool_id>.default_mode = "allow"` or "deny"
3025 /// - "Only this time" → allow/deny once
3026 pub fn authorize_third_party_tool(
3027 &self,
3028 title: impl Into<String>,
3029 tool_id: String,
3030 display_name: String,
3031 cx: &mut App,
3032 ) -> Task<Result<()>> {
3033 let settings = agent_settings::AgentSettings::get_global(cx);
3034
3035 let decision = decide_permission_from_settings(&tool_id, "", &settings);
3036
3037 match decision {
3038 ToolPermissionDecision::Allow => return Task::ready(Ok(())),
3039 ToolPermissionDecision::Deny(reason) => return Task::ready(Err(anyhow!(reason))),
3040 ToolPermissionDecision::Confirm => {}
3041 }
3042
3043 let (response_tx, response_rx) = oneshot::channel();
3044 self.stream
3045 .0
3046 .unbounded_send(Ok(ThreadEvent::ToolCallAuthorization(
3047 ToolCallAuthorization {
3048 tool_call: acp::ToolCallUpdate::new(
3049 self.tool_use_id.to_string(),
3050 acp::ToolCallUpdateFields::new().title(title.into()),
3051 ),
3052 options: acp_thread::PermissionOptions::Dropdown(vec![
3053 acp_thread::PermissionOptionChoice {
3054 allow: acp::PermissionOption::new(
3055 acp::PermissionOptionId::new(format!(
3056 "always_allow_mcp:{}",
3057 tool_id
3058 )),
3059 format!("Always for {} MCP tool", display_name),
3060 acp::PermissionOptionKind::AllowAlways,
3061 ),
3062 deny: acp::PermissionOption::new(
3063 acp::PermissionOptionId::new(format!(
3064 "always_deny_mcp:{}",
3065 tool_id
3066 )),
3067 format!("Always for {} MCP tool", display_name),
3068 acp::PermissionOptionKind::RejectAlways,
3069 ),
3070 },
3071 acp_thread::PermissionOptionChoice {
3072 allow: acp::PermissionOption::new(
3073 acp::PermissionOptionId::new("allow"),
3074 "Only this time",
3075 acp::PermissionOptionKind::AllowOnce,
3076 ),
3077 deny: acp::PermissionOption::new(
3078 acp::PermissionOptionId::new("deny"),
3079 "Only this time",
3080 acp::PermissionOptionKind::RejectOnce,
3081 ),
3082 },
3083 ]),
3084 response: response_tx,
3085 context: None,
3086 },
3087 )))
3088 .ok();
3089
3090 let fs = self.fs.clone();
3091 cx.spawn(async move |cx| {
3092 let response_str = response_rx.await?.0.to_string();
3093
3094 if response_str == format!("always_allow_mcp:{}", tool_id) {
3095 if let Some(fs) = fs.clone() {
3096 cx.update(|cx| {
3097 update_settings_file(fs, cx, move |settings, _| {
3098 settings
3099 .agent
3100 .get_or_insert_default()
3101 .set_tool_default_mode(&tool_id, ToolPermissionMode::Allow);
3102 });
3103 });
3104 }
3105 return Ok(());
3106 }
3107 if response_str == format!("always_deny_mcp:{}", tool_id) {
3108 if let Some(fs) = fs.clone() {
3109 cx.update(|cx| {
3110 update_settings_file(fs, cx, move |settings, _| {
3111 settings
3112 .agent
3113 .get_or_insert_default()
3114 .set_tool_default_mode(&tool_id, ToolPermissionMode::Deny);
3115 });
3116 });
3117 }
3118 return Err(anyhow!("Permission to run tool denied by user"));
3119 }
3120
3121 if response_str == "allow" {
3122 return Ok(());
3123 }
3124
3125 Err(anyhow!("Permission to run tool denied by user"))
3126 })
3127 }
3128
3129 pub fn authorize(
3130 &self,
3131 title: impl Into<String>,
3132 context: ToolPermissionContext,
3133 cx: &mut App,
3134 ) -> Task<Result<()>> {
3135 use settings::ToolPermissionMode;
3136
3137 let options = context.build_permission_options();
3138
3139 let (response_tx, response_rx) = oneshot::channel();
3140 self.stream
3141 .0
3142 .unbounded_send(Ok(ThreadEvent::ToolCallAuthorization(
3143 ToolCallAuthorization {
3144 tool_call: acp::ToolCallUpdate::new(
3145 self.tool_use_id.to_string(),
3146 acp::ToolCallUpdateFields::new().title(title.into()),
3147 ),
3148 options,
3149 response: response_tx,
3150 context: Some(context),
3151 },
3152 )))
3153 .ok();
3154
3155 let fs = self.fs.clone();
3156 cx.spawn(async move |cx| {
3157 let response_str = response_rx.await?.0.to_string();
3158
3159 // Handle "always allow tool" - e.g., "always_allow:terminal"
3160 if let Some(tool) = response_str.strip_prefix("always_allow:") {
3161 if let Some(fs) = fs.clone() {
3162 let tool = tool.to_string();
3163 cx.update(|cx| {
3164 update_settings_file(fs, cx, move |settings, _| {
3165 settings
3166 .agent
3167 .get_or_insert_default()
3168 .set_tool_default_mode(&tool, ToolPermissionMode::Allow);
3169 });
3170 });
3171 }
3172 return Ok(());
3173 }
3174
3175 // Handle "always deny tool" - e.g., "always_deny:terminal"
3176 if let Some(tool) = response_str.strip_prefix("always_deny:") {
3177 if let Some(fs) = fs.clone() {
3178 let tool = tool.to_string();
3179 cx.update(|cx| {
3180 update_settings_file(fs, cx, move |settings, _| {
3181 settings
3182 .agent
3183 .get_or_insert_default()
3184 .set_tool_default_mode(&tool, ToolPermissionMode::Deny);
3185 });
3186 });
3187 }
3188 return Err(anyhow!("Permission to run tool denied by user"));
3189 }
3190
3191 // Handle "always allow pattern" - e.g., "always_allow_pattern:terminal:^cargo\s"
3192 if response_str.starts_with("always_allow_pattern:") {
3193 let parts: Vec<&str> = response_str.splitn(3, ':').collect();
3194 if parts.len() == 3 {
3195 let pattern_tool_name = parts[1].to_string();
3196 let pattern = parts[2].to_string();
3197 if let Some(fs) = fs.clone() {
3198 cx.update(|cx| {
3199 update_settings_file(fs, cx, move |settings, _| {
3200 settings
3201 .agent
3202 .get_or_insert_default()
3203 .add_tool_allow_pattern(&pattern_tool_name, pattern);
3204 });
3205 });
3206 }
3207 }
3208 return Ok(());
3209 }
3210
3211 // Handle "always deny pattern" - e.g., "always_deny_pattern:terminal:^cargo\s"
3212 if response_str.starts_with("always_deny_pattern:") {
3213 let parts: Vec<&str> = response_str.splitn(3, ':').collect();
3214 if parts.len() == 3 {
3215 let pattern_tool_name = parts[1].to_string();
3216 let pattern = parts[2].to_string();
3217 if let Some(fs) = fs.clone() {
3218 cx.update(|cx| {
3219 update_settings_file(fs, cx, move |settings, _| {
3220 settings
3221 .agent
3222 .get_or_insert_default()
3223 .add_tool_deny_pattern(&pattern_tool_name, pattern);
3224 });
3225 });
3226 }
3227 }
3228 return Err(anyhow!("Permission to run tool denied by user"));
3229 }
3230
3231 // Handle simple "allow" (allow once)
3232 if response_str == "allow" {
3233 return Ok(());
3234 }
3235
3236 // Handle simple "deny" (deny once)
3237 Err(anyhow!("Permission to run tool denied by user"))
3238 })
3239 }
3240}
3241
3242#[cfg(any(test, feature = "test-support"))]
3243pub struct ToolCallEventStreamReceiver(mpsc::UnboundedReceiver<Result<ThreadEvent>>);
3244
3245#[cfg(any(test, feature = "test-support"))]
3246impl ToolCallEventStreamReceiver {
3247 pub async fn expect_authorization(&mut self) -> ToolCallAuthorization {
3248 let event = self.0.next().await;
3249 if let Some(Ok(ThreadEvent::ToolCallAuthorization(auth))) = event {
3250 auth
3251 } else {
3252 panic!("Expected ToolCallAuthorization but got: {:?}", event);
3253 }
3254 }
3255
3256 pub async fn expect_update_fields(&mut self) -> acp::ToolCallUpdateFields {
3257 let event = self.0.next().await;
3258 if let Some(Ok(ThreadEvent::ToolCallUpdate(acp_thread::ToolCallUpdate::UpdateFields(
3259 update,
3260 )))) = event
3261 {
3262 update.fields
3263 } else {
3264 panic!("Expected update fields but got: {:?}", event);
3265 }
3266 }
3267
3268 pub async fn expect_diff(&mut self) -> Entity<acp_thread::Diff> {
3269 let event = self.0.next().await;
3270 if let Some(Ok(ThreadEvent::ToolCallUpdate(acp_thread::ToolCallUpdate::UpdateDiff(
3271 update,
3272 )))) = event
3273 {
3274 update.diff
3275 } else {
3276 panic!("Expected diff but got: {:?}", event);
3277 }
3278 }
3279
3280 pub async fn expect_terminal(&mut self) -> Entity<acp_thread::Terminal> {
3281 let event = self.0.next().await;
3282 if let Some(Ok(ThreadEvent::ToolCallUpdate(acp_thread::ToolCallUpdate::UpdateTerminal(
3283 update,
3284 )))) = event
3285 {
3286 update.terminal
3287 } else {
3288 panic!("Expected terminal but got: {:?}", event);
3289 }
3290 }
3291}
3292
3293#[cfg(any(test, feature = "test-support"))]
3294impl std::ops::Deref for ToolCallEventStreamReceiver {
3295 type Target = mpsc::UnboundedReceiver<Result<ThreadEvent>>;
3296
3297 fn deref(&self) -> &Self::Target {
3298 &self.0
3299 }
3300}
3301
3302#[cfg(any(test, feature = "test-support"))]
3303impl std::ops::DerefMut for ToolCallEventStreamReceiver {
3304 fn deref_mut(&mut self) -> &mut Self::Target {
3305 &mut self.0
3306 }
3307}
3308
3309impl From<&str> for UserMessageContent {
3310 fn from(text: &str) -> Self {
3311 Self::Text(text.into())
3312 }
3313}
3314
3315impl UserMessageContent {
3316 pub fn from_content_block(value: acp::ContentBlock, path_style: PathStyle) -> Self {
3317 match value {
3318 acp::ContentBlock::Text(text_content) => Self::Text(text_content.text),
3319 acp::ContentBlock::Image(image_content) => Self::Image(convert_image(image_content)),
3320 acp::ContentBlock::Audio(_) => {
3321 // TODO
3322 Self::Text("[audio]".to_string())
3323 }
3324 acp::ContentBlock::ResourceLink(resource_link) => {
3325 match MentionUri::parse(&resource_link.uri, path_style) {
3326 Ok(uri) => Self::Mention {
3327 uri,
3328 content: String::new(),
3329 },
3330 Err(err) => {
3331 log::error!("Failed to parse mention link: {}", err);
3332 Self::Text(format!("[{}]({})", resource_link.name, resource_link.uri))
3333 }
3334 }
3335 }
3336 acp::ContentBlock::Resource(resource) => match resource.resource {
3337 acp::EmbeddedResourceResource::TextResourceContents(resource) => {
3338 match MentionUri::parse(&resource.uri, path_style) {
3339 Ok(uri) => Self::Mention {
3340 uri,
3341 content: resource.text,
3342 },
3343 Err(err) => {
3344 log::error!("Failed to parse mention link: {}", err);
3345 Self::Text(
3346 MarkdownCodeBlock {
3347 tag: &resource.uri,
3348 text: &resource.text,
3349 }
3350 .to_string(),
3351 )
3352 }
3353 }
3354 }
3355 acp::EmbeddedResourceResource::BlobResourceContents(_) => {
3356 // TODO
3357 Self::Text("[blob]".to_string())
3358 }
3359 other => {
3360 log::warn!("Unexpected content type: {:?}", other);
3361 Self::Text("[unknown]".to_string())
3362 }
3363 },
3364 other => {
3365 log::warn!("Unexpected content type: {:?}", other);
3366 Self::Text("[unknown]".to_string())
3367 }
3368 }
3369 }
3370}
3371
3372impl From<UserMessageContent> for acp::ContentBlock {
3373 fn from(content: UserMessageContent) -> Self {
3374 match content {
3375 UserMessageContent::Text(text) => text.into(),
3376 UserMessageContent::Image(image) => {
3377 acp::ContentBlock::Image(acp::ImageContent::new(image.source, "image/png"))
3378 }
3379 UserMessageContent::Mention { uri, content } => acp::ContentBlock::Resource(
3380 acp::EmbeddedResource::new(acp::EmbeddedResourceResource::TextResourceContents(
3381 acp::TextResourceContents::new(content, uri.to_uri().to_string()),
3382 )),
3383 ),
3384 }
3385 }
3386}
3387
3388fn convert_image(image_content: acp::ImageContent) -> LanguageModelImage {
3389 LanguageModelImage {
3390 source: image_content.data.into(),
3391 size: None,
3392 }
3393}