1use std::cell::{Ref, RefCell};
2use std::path::{Path, PathBuf};
3use std::rc::Rc;
4use std::sync::{Arc, Mutex};
5
6use agent_settings::{AgentProfileId, CompletionMode};
7use anyhow::{Context as _, Result, anyhow};
8use assistant_tool::{ToolId, ToolWorkingSet};
9use chrono::{DateTime, Utc};
10use collections::HashMap;
11use context_server::ContextServerId;
12use futures::channel::{mpsc, oneshot};
13use futures::future::{self, BoxFuture, Shared};
14use futures::{FutureExt as _, StreamExt as _};
15use gpui::{
16 App, BackgroundExecutor, Context, Entity, EventEmitter, Global, ReadGlobal, SharedString,
17 Subscription, Task, prelude::*,
18};
19
20use language_model::{LanguageModelToolResultContent, LanguageModelToolUseId, Role, TokenUsage};
21use project::context_server_store::{ContextServerStatus, ContextServerStore};
22use project::{Project, ProjectItem, ProjectPath, Worktree};
23use prompt_store::{
24 ProjectContext, PromptBuilder, PromptId, PromptStore, PromptsUpdatedEvent, RulesFileContext,
25 UserRulesContext, WorktreeContext,
26};
27use serde::{Deserialize, Serialize};
28use ui::Window;
29use util::ResultExt as _;
30
31use crate::context_server_tool::ContextServerTool;
32use crate::thread::{
33 DetailedSummaryState, ExceededWindowError, MessageId, ProjectSnapshot, Thread, ThreadId,
34};
35use indoc::indoc;
36use sqlez::{
37 bindable::{Bind, Column},
38 connection::Connection,
39 statement::Statement,
40};
41
42#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
43pub enum DataType {
44 #[serde(rename = "json")]
45 Json,
46 #[serde(rename = "zstd")]
47 Zstd,
48}
49
50impl Bind for DataType {
51 fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
52 let value = match self {
53 DataType::Json => "json",
54 DataType::Zstd => "zstd",
55 };
56 value.bind(statement, start_index)
57 }
58}
59
60impl Column for DataType {
61 fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
62 let (value, next_index) = String::column(statement, start_index)?;
63 let data_type = match value.as_str() {
64 "json" => DataType::Json,
65 "zstd" => DataType::Zstd,
66 _ => anyhow::bail!("Unknown data type: {}", value),
67 };
68 Ok((data_type, next_index))
69 }
70}
71
72const RULES_FILE_NAMES: [&'static str; 8] = [
73 ".rules",
74 ".cursorrules",
75 ".windsurfrules",
76 ".clinerules",
77 ".github/copilot-instructions.md",
78 "CLAUDE.md",
79 "AGENT.md",
80 "AGENTS.md",
81];
82
83pub fn init(cx: &mut App) {
84 ThreadsDatabase::init(cx);
85}
86
87/// A system prompt shared by all threads created by this ThreadStore
88#[derive(Clone, Default)]
89pub struct SharedProjectContext(Rc<RefCell<Option<ProjectContext>>>);
90
91impl SharedProjectContext {
92 pub fn borrow(&self) -> Ref<Option<ProjectContext>> {
93 self.0.borrow()
94 }
95}
96
97pub type TextThreadStore = assistant_context_editor::ContextStore;
98
99pub struct ThreadStore {
100 project: Entity<Project>,
101 tools: Entity<ToolWorkingSet>,
102 prompt_builder: Arc<PromptBuilder>,
103 prompt_store: Option<Entity<PromptStore>>,
104 context_server_tool_ids: HashMap<ContextServerId, Vec<ToolId>>,
105 threads: Vec<SerializedThreadMetadata>,
106 project_context: SharedProjectContext,
107 reload_system_prompt_tx: mpsc::Sender<()>,
108 _reload_system_prompt_task: Task<()>,
109 _subscriptions: Vec<Subscription>,
110}
111
112pub struct RulesLoadingError {
113 pub message: SharedString,
114}
115
116impl EventEmitter<RulesLoadingError> for ThreadStore {}
117
118impl ThreadStore {
119 pub fn load(
120 project: Entity<Project>,
121 tools: Entity<ToolWorkingSet>,
122 prompt_store: Option<Entity<PromptStore>>,
123 prompt_builder: Arc<PromptBuilder>,
124 cx: &mut App,
125 ) -> Task<Result<Entity<Self>>> {
126 cx.spawn(async move |cx| {
127 let (thread_store, ready_rx) = cx.update(|cx| {
128 let mut option_ready_rx = None;
129 let thread_store = cx.new(|cx| {
130 let (thread_store, ready_rx) =
131 Self::new(project, tools, prompt_builder, prompt_store, cx);
132 option_ready_rx = Some(ready_rx);
133 thread_store
134 });
135 (thread_store, option_ready_rx.take().unwrap())
136 })?;
137 ready_rx.await?;
138 Ok(thread_store)
139 })
140 }
141
142 fn new(
143 project: Entity<Project>,
144 tools: Entity<ToolWorkingSet>,
145 prompt_builder: Arc<PromptBuilder>,
146 prompt_store: Option<Entity<PromptStore>>,
147 cx: &mut Context<Self>,
148 ) -> (Self, oneshot::Receiver<()>) {
149 let mut subscriptions = vec![cx.subscribe(&project, Self::handle_project_event)];
150
151 if let Some(prompt_store) = prompt_store.as_ref() {
152 subscriptions.push(cx.subscribe(
153 prompt_store,
154 |this, _prompt_store, PromptsUpdatedEvent, _cx| {
155 this.enqueue_system_prompt_reload();
156 },
157 ))
158 }
159
160 // This channel and task prevent concurrent and redundant loading of the system prompt.
161 let (reload_system_prompt_tx, mut reload_system_prompt_rx) = mpsc::channel(1);
162 let (ready_tx, ready_rx) = oneshot::channel();
163 let mut ready_tx = Some(ready_tx);
164 let reload_system_prompt_task = cx.spawn({
165 let prompt_store = prompt_store.clone();
166 async move |thread_store, cx| {
167 loop {
168 let Some(reload_task) = thread_store
169 .update(cx, |thread_store, cx| {
170 thread_store.reload_system_prompt(prompt_store.clone(), cx)
171 })
172 .ok()
173 else {
174 return;
175 };
176 reload_task.await;
177 if let Some(ready_tx) = ready_tx.take() {
178 ready_tx.send(()).ok();
179 }
180 reload_system_prompt_rx.next().await;
181 }
182 }
183 });
184
185 let this = Self {
186 project,
187 tools,
188 prompt_builder,
189 prompt_store,
190 context_server_tool_ids: HashMap::default(),
191 threads: Vec::new(),
192 project_context: SharedProjectContext::default(),
193 reload_system_prompt_tx,
194 _reload_system_prompt_task: reload_system_prompt_task,
195 _subscriptions: subscriptions,
196 };
197 this.register_context_server_handlers(cx);
198 this.reload(cx).detach_and_log_err(cx);
199 (this, ready_rx)
200 }
201
202 fn handle_project_event(
203 &mut self,
204 _project: Entity<Project>,
205 event: &project::Event,
206 _cx: &mut Context<Self>,
207 ) {
208 match event {
209 project::Event::WorktreeAdded(_) | project::Event::WorktreeRemoved(_) => {
210 self.enqueue_system_prompt_reload();
211 }
212 project::Event::WorktreeUpdatedEntries(_, items) => {
213 if items.iter().any(|(path, _, _)| {
214 RULES_FILE_NAMES
215 .iter()
216 .any(|name| path.as_ref() == Path::new(name))
217 }) {
218 self.enqueue_system_prompt_reload();
219 }
220 }
221 _ => {}
222 }
223 }
224
225 fn enqueue_system_prompt_reload(&mut self) {
226 self.reload_system_prompt_tx.try_send(()).ok();
227 }
228
229 // Note that this should only be called from `reload_system_prompt_task`.
230 fn reload_system_prompt(
231 &self,
232 prompt_store: Option<Entity<PromptStore>>,
233 cx: &mut Context<Self>,
234 ) -> Task<()> {
235 let worktrees = self
236 .project
237 .read(cx)
238 .visible_worktrees(cx)
239 .collect::<Vec<_>>();
240 let worktree_tasks = worktrees
241 .into_iter()
242 .map(|worktree| {
243 Self::load_worktree_info_for_system_prompt(worktree, self.project.clone(), cx)
244 })
245 .collect::<Vec<_>>();
246 let default_user_rules_task = match prompt_store {
247 None => Task::ready(vec![]),
248 Some(prompt_store) => prompt_store.read_with(cx, |prompt_store, cx| {
249 let prompts = prompt_store.default_prompt_metadata();
250 let load_tasks = prompts.into_iter().map(|prompt_metadata| {
251 let contents = prompt_store.load(prompt_metadata.id, cx);
252 async move { (contents.await, prompt_metadata) }
253 });
254 cx.background_spawn(future::join_all(load_tasks))
255 }),
256 };
257
258 cx.spawn(async move |this, cx| {
259 let (worktrees, default_user_rules) =
260 future::join(future::join_all(worktree_tasks), default_user_rules_task).await;
261
262 let worktrees = worktrees
263 .into_iter()
264 .map(|(worktree, rules_error)| {
265 if let Some(rules_error) = rules_error {
266 this.update(cx, |_, cx| cx.emit(rules_error)).ok();
267 }
268 worktree
269 })
270 .collect::<Vec<_>>();
271
272 let default_user_rules = default_user_rules
273 .into_iter()
274 .flat_map(|(contents, prompt_metadata)| match contents {
275 Ok(contents) => Some(UserRulesContext {
276 uuid: match prompt_metadata.id {
277 PromptId::User { uuid } => uuid,
278 PromptId::EditWorkflow => return None,
279 },
280 title: prompt_metadata.title.map(|title| title.to_string()),
281 contents,
282 }),
283 Err(err) => {
284 this.update(cx, |_, cx| {
285 cx.emit(RulesLoadingError {
286 message: format!("{err:?}").into(),
287 });
288 })
289 .ok();
290 None
291 }
292 })
293 .collect::<Vec<_>>();
294
295 this.update(cx, |this, _cx| {
296 *this.project_context.0.borrow_mut() =
297 Some(ProjectContext::new(worktrees, default_user_rules));
298 })
299 .ok();
300 })
301 }
302
303 fn load_worktree_info_for_system_prompt(
304 worktree: Entity<Worktree>,
305 project: Entity<Project>,
306 cx: &mut App,
307 ) -> Task<(WorktreeContext, Option<RulesLoadingError>)> {
308 let root_name = worktree.read(cx).root_name().into();
309
310 let rules_task = Self::load_worktree_rules_file(worktree, project, cx);
311 let Some(rules_task) = rules_task else {
312 return Task::ready((
313 WorktreeContext {
314 root_name,
315 rules_file: None,
316 },
317 None,
318 ));
319 };
320
321 cx.spawn(async move |_| {
322 let (rules_file, rules_file_error) = match rules_task.await {
323 Ok(rules_file) => (Some(rules_file), None),
324 Err(err) => (
325 None,
326 Some(RulesLoadingError {
327 message: format!("{err}").into(),
328 }),
329 ),
330 };
331 let worktree_info = WorktreeContext {
332 root_name,
333 rules_file,
334 };
335 (worktree_info, rules_file_error)
336 })
337 }
338
339 fn load_worktree_rules_file(
340 worktree: Entity<Worktree>,
341 project: Entity<Project>,
342 cx: &mut App,
343 ) -> Option<Task<Result<RulesFileContext>>> {
344 let worktree_ref = worktree.read(cx);
345 let worktree_id = worktree_ref.id();
346 let selected_rules_file = RULES_FILE_NAMES
347 .into_iter()
348 .filter_map(|name| {
349 worktree_ref
350 .entry_for_path(name)
351 .filter(|entry| entry.is_file())
352 .map(|entry| entry.path.clone())
353 })
354 .next();
355
356 // Note that Cline supports `.clinerules` being a directory, but that is not currently
357 // supported. This doesn't seem to occur often in GitHub repositories.
358 selected_rules_file.map(|path_in_worktree| {
359 let project_path = ProjectPath {
360 worktree_id,
361 path: path_in_worktree.clone(),
362 };
363 let buffer_task =
364 project.update(cx, |project, cx| project.open_buffer(project_path, cx));
365 let rope_task = cx.spawn(async move |cx| {
366 buffer_task.await?.read_with(cx, |buffer, cx| {
367 let project_entry_id = buffer.entry_id(cx).context("buffer has no file")?;
368 anyhow::Ok((project_entry_id, buffer.as_rope().clone()))
369 })?
370 });
371 // Build a string from the rope on a background thread.
372 cx.background_spawn(async move {
373 let (project_entry_id, rope) = rope_task.await?;
374 anyhow::Ok(RulesFileContext {
375 path_in_worktree,
376 text: rope.to_string().trim().to_string(),
377 project_entry_id: project_entry_id.to_usize(),
378 })
379 })
380 })
381 }
382
383 pub fn prompt_store(&self) -> &Option<Entity<PromptStore>> {
384 &self.prompt_store
385 }
386
387 pub fn tools(&self) -> Entity<ToolWorkingSet> {
388 self.tools.clone()
389 }
390
391 /// Returns the number of threads.
392 pub fn thread_count(&self) -> usize {
393 self.threads.len()
394 }
395
396 pub fn unordered_threads(&self) -> impl Iterator<Item = &SerializedThreadMetadata> {
397 self.threads.iter()
398 }
399
400 pub fn reverse_chronological_threads(&self) -> Vec<SerializedThreadMetadata> {
401 let mut threads = self.threads.iter().cloned().collect::<Vec<_>>();
402 threads.sort_unstable_by_key(|thread| std::cmp::Reverse(thread.updated_at));
403 threads
404 }
405
406 pub fn create_thread(&mut self, cx: &mut Context<Self>) -> Entity<Thread> {
407 cx.new(|cx| {
408 Thread::new(
409 self.project.clone(),
410 self.tools.clone(),
411 self.prompt_builder.clone(),
412 self.project_context.clone(),
413 cx,
414 )
415 })
416 }
417
418 pub fn create_thread_from_serialized(
419 &mut self,
420 serialized: SerializedThread,
421 cx: &mut Context<Self>,
422 ) -> Entity<Thread> {
423 cx.new(|cx| {
424 Thread::deserialize(
425 ThreadId::new(),
426 serialized,
427 self.project.clone(),
428 self.tools.clone(),
429 self.prompt_builder.clone(),
430 self.project_context.clone(),
431 None,
432 cx,
433 )
434 })
435 }
436
437 pub fn open_thread(
438 &self,
439 id: &ThreadId,
440 window: &mut Window,
441 cx: &mut Context<Self>,
442 ) -> Task<Result<Entity<Thread>>> {
443 let id = id.clone();
444 let database_future = ThreadsDatabase::global_future(cx);
445 let this = cx.weak_entity();
446 window.spawn(cx, async move |cx| {
447 let database = database_future.await.map_err(|err| anyhow!(err))?;
448 let thread = database
449 .try_find_thread(id.clone())
450 .await?
451 .with_context(|| format!("no thread found with ID: {id:?}"))?;
452
453 let thread = this.update_in(cx, |this, window, cx| {
454 cx.new(|cx| {
455 Thread::deserialize(
456 id.clone(),
457 thread,
458 this.project.clone(),
459 this.tools.clone(),
460 this.prompt_builder.clone(),
461 this.project_context.clone(),
462 Some(window),
463 cx,
464 )
465 })
466 })?;
467
468 Ok(thread)
469 })
470 }
471
472 pub fn save_thread(&self, thread: &Entity<Thread>, cx: &mut Context<Self>) -> Task<Result<()>> {
473 let (metadata, serialized_thread) =
474 thread.update(cx, |thread, cx| (thread.id().clone(), thread.serialize(cx)));
475
476 let database_future = ThreadsDatabase::global_future(cx);
477 cx.spawn(async move |this, cx| {
478 let serialized_thread = serialized_thread.await?;
479 let database = database_future.await.map_err(|err| anyhow!(err))?;
480 database.save_thread(metadata, serialized_thread).await?;
481
482 this.update(cx, |this, cx| this.reload(cx))?.await
483 })
484 }
485
486 pub fn delete_thread(&mut self, id: &ThreadId, cx: &mut Context<Self>) -> Task<Result<()>> {
487 let id = id.clone();
488 let database_future = ThreadsDatabase::global_future(cx);
489 cx.spawn(async move |this, cx| {
490 let database = database_future.await.map_err(|err| anyhow!(err))?;
491 database.delete_thread(id.clone()).await?;
492
493 this.update(cx, |this, cx| {
494 this.threads.retain(|thread| thread.id != id);
495 cx.notify();
496 })
497 })
498 }
499
500 pub fn reload(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
501 let database_future = ThreadsDatabase::global_future(cx);
502 cx.spawn(async move |this, cx| {
503 let threads = database_future
504 .await
505 .map_err(|err| anyhow!(err))?
506 .list_threads()
507 .await?;
508
509 this.update(cx, |this, cx| {
510 this.threads = threads;
511 cx.notify();
512 })
513 })
514 }
515
516 fn register_context_server_handlers(&self, cx: &mut Context<Self>) {
517 let context_server_store = self.project.read(cx).context_server_store();
518 cx.subscribe(&context_server_store, Self::handle_context_server_event)
519 .detach();
520
521 // Check for any servers that were already running before the handler was registered
522 for server in context_server_store.read(cx).running_servers() {
523 self.load_context_server_tools(server.id(), context_server_store.clone(), cx);
524 }
525 }
526
527 fn handle_context_server_event(
528 &mut self,
529 context_server_store: Entity<ContextServerStore>,
530 event: &project::context_server_store::Event,
531 cx: &mut Context<Self>,
532 ) {
533 let tool_working_set = self.tools.clone();
534 match event {
535 project::context_server_store::Event::ServerStatusChanged { server_id, status } => {
536 match status {
537 ContextServerStatus::Starting => {}
538 ContextServerStatus::Running => {
539 self.load_context_server_tools(server_id.clone(), context_server_store, cx);
540 }
541 ContextServerStatus::Stopped | ContextServerStatus::Error(_) => {
542 if let Some(tool_ids) = self.context_server_tool_ids.remove(server_id) {
543 tool_working_set.update(cx, |tool_working_set, _| {
544 tool_working_set.remove(&tool_ids);
545 });
546 }
547 }
548 }
549 }
550 }
551 }
552
553 fn load_context_server_tools(
554 &self,
555 server_id: ContextServerId,
556 context_server_store: Entity<ContextServerStore>,
557 cx: &mut Context<Self>,
558 ) {
559 let Some(server) = context_server_store.read(cx).get_running_server(&server_id) else {
560 return;
561 };
562 let tool_working_set = self.tools.clone();
563 cx.spawn(async move |this, cx| {
564 let Some(protocol) = server.client() else {
565 return;
566 };
567
568 if protocol.capable(context_server::protocol::ServerCapability::Tools) {
569 if let Some(tools) = protocol.list_tools().await.log_err() {
570 let tool_ids = tool_working_set
571 .update(cx, |tool_working_set, _| {
572 tools
573 .tools
574 .into_iter()
575 .map(|tool| {
576 log::info!("registering context server tool: {:?}", tool.name);
577 tool_working_set.insert(Arc::new(ContextServerTool::new(
578 context_server_store.clone(),
579 server.id(),
580 tool,
581 )))
582 })
583 .collect::<Vec<_>>()
584 })
585 .log_err();
586
587 if let Some(tool_ids) = tool_ids {
588 this.update(cx, |this, _| {
589 this.context_server_tool_ids.insert(server_id, tool_ids);
590 })
591 .log_err();
592 }
593 }
594 }
595 })
596 .detach();
597 }
598}
599
600#[derive(Debug, Clone, Serialize, Deserialize)]
601pub struct SerializedThreadMetadata {
602 pub id: ThreadId,
603 pub summary: SharedString,
604 pub updated_at: DateTime<Utc>,
605}
606
607#[derive(Serialize, Deserialize, Debug)]
608pub struct SerializedThread {
609 pub version: String,
610 pub summary: SharedString,
611 pub updated_at: DateTime<Utc>,
612 pub messages: Vec<SerializedMessage>,
613 #[serde(default)]
614 pub initial_project_snapshot: Option<Arc<ProjectSnapshot>>,
615 #[serde(default)]
616 pub cumulative_token_usage: TokenUsage,
617 #[serde(default)]
618 pub request_token_usage: Vec<TokenUsage>,
619 #[serde(default)]
620 pub detailed_summary_state: DetailedSummaryState,
621 #[serde(default)]
622 pub exceeded_window_error: Option<ExceededWindowError>,
623 #[serde(default)]
624 pub model: Option<SerializedLanguageModel>,
625 #[serde(default)]
626 pub completion_mode: Option<CompletionMode>,
627 #[serde(default)]
628 pub tool_use_limit_reached: bool,
629 #[serde(default)]
630 pub profile: Option<AgentProfileId>,
631}
632
633#[derive(Serialize, Deserialize, Debug)]
634pub struct SerializedLanguageModel {
635 pub provider: String,
636 pub model: String,
637}
638
639impl SerializedThread {
640 pub const VERSION: &'static str = "0.2.0";
641
642 pub fn from_json(json: &[u8]) -> Result<Self> {
643 let saved_thread_json = serde_json::from_slice::<serde_json::Value>(json)?;
644 match saved_thread_json.get("version") {
645 Some(serde_json::Value::String(version)) => match version.as_str() {
646 SerializedThreadV0_1_0::VERSION => {
647 let saved_thread =
648 serde_json::from_value::<SerializedThreadV0_1_0>(saved_thread_json)?;
649 Ok(saved_thread.upgrade())
650 }
651 SerializedThread::VERSION => Ok(serde_json::from_value::<SerializedThread>(
652 saved_thread_json,
653 )?),
654 _ => anyhow::bail!("unrecognized serialized thread version: {version:?}"),
655 },
656 None => {
657 let saved_thread =
658 serde_json::from_value::<LegacySerializedThread>(saved_thread_json)?;
659 Ok(saved_thread.upgrade())
660 }
661 version => anyhow::bail!("unrecognized serialized thread version: {version:?}"),
662 }
663 }
664}
665
666#[derive(Serialize, Deserialize, Debug)]
667pub struct SerializedThreadV0_1_0(
668 // The structure did not change, so we are reusing the latest SerializedThread.
669 // When making the next version, make sure this points to SerializedThreadV0_2_0
670 SerializedThread,
671);
672
673impl SerializedThreadV0_1_0 {
674 pub const VERSION: &'static str = "0.1.0";
675
676 pub fn upgrade(self) -> SerializedThread {
677 debug_assert_eq!(SerializedThread::VERSION, "0.2.0");
678
679 let mut messages: Vec<SerializedMessage> = Vec::with_capacity(self.0.messages.len());
680
681 for message in self.0.messages {
682 if message.role == Role::User && !message.tool_results.is_empty() {
683 if let Some(last_message) = messages.last_mut() {
684 debug_assert!(last_message.role == Role::Assistant);
685
686 last_message.tool_results = message.tool_results;
687 continue;
688 }
689 }
690
691 messages.push(message);
692 }
693
694 SerializedThread { messages, ..self.0 }
695 }
696}
697
698#[derive(Debug, Serialize, Deserialize)]
699pub struct SerializedMessage {
700 pub id: MessageId,
701 pub role: Role,
702 #[serde(default)]
703 pub segments: Vec<SerializedMessageSegment>,
704 #[serde(default)]
705 pub tool_uses: Vec<SerializedToolUse>,
706 #[serde(default)]
707 pub tool_results: Vec<SerializedToolResult>,
708 #[serde(default)]
709 pub context: String,
710 #[serde(default)]
711 pub creases: Vec<SerializedCrease>,
712 #[serde(default)]
713 pub is_hidden: bool,
714}
715
716#[derive(Debug, Serialize, Deserialize)]
717#[serde(tag = "type")]
718pub enum SerializedMessageSegment {
719 #[serde(rename = "text")]
720 Text {
721 text: String,
722 },
723 #[serde(rename = "thinking")]
724 Thinking {
725 text: String,
726 #[serde(skip_serializing_if = "Option::is_none")]
727 signature: Option<String>,
728 },
729 RedactedThinking {
730 data: Vec<u8>,
731 },
732}
733
734#[derive(Debug, Serialize, Deserialize)]
735pub struct SerializedToolUse {
736 pub id: LanguageModelToolUseId,
737 pub name: SharedString,
738 pub input: serde_json::Value,
739}
740
741#[derive(Debug, Serialize, Deserialize)]
742pub struct SerializedToolResult {
743 pub tool_use_id: LanguageModelToolUseId,
744 pub is_error: bool,
745 pub content: LanguageModelToolResultContent,
746 pub output: Option<serde_json::Value>,
747}
748
749#[derive(Serialize, Deserialize)]
750struct LegacySerializedThread {
751 pub summary: SharedString,
752 pub updated_at: DateTime<Utc>,
753 pub messages: Vec<LegacySerializedMessage>,
754 #[serde(default)]
755 pub initial_project_snapshot: Option<Arc<ProjectSnapshot>>,
756}
757
758impl LegacySerializedThread {
759 pub fn upgrade(self) -> SerializedThread {
760 SerializedThread {
761 version: SerializedThread::VERSION.to_string(),
762 summary: self.summary,
763 updated_at: self.updated_at,
764 messages: self.messages.into_iter().map(|msg| msg.upgrade()).collect(),
765 initial_project_snapshot: self.initial_project_snapshot,
766 cumulative_token_usage: TokenUsage::default(),
767 request_token_usage: Vec::new(),
768 detailed_summary_state: DetailedSummaryState::default(),
769 exceeded_window_error: None,
770 model: None,
771 completion_mode: None,
772 tool_use_limit_reached: false,
773 profile: None,
774 }
775 }
776}
777
778#[derive(Debug, Serialize, Deserialize)]
779struct LegacySerializedMessage {
780 pub id: MessageId,
781 pub role: Role,
782 pub text: String,
783 #[serde(default)]
784 pub tool_uses: Vec<SerializedToolUse>,
785 #[serde(default)]
786 pub tool_results: Vec<SerializedToolResult>,
787}
788
789impl LegacySerializedMessage {
790 fn upgrade(self) -> SerializedMessage {
791 SerializedMessage {
792 id: self.id,
793 role: self.role,
794 segments: vec![SerializedMessageSegment::Text { text: self.text }],
795 tool_uses: self.tool_uses,
796 tool_results: self.tool_results,
797 context: String::new(),
798 creases: Vec::new(),
799 is_hidden: false,
800 }
801 }
802}
803
804#[derive(Debug, Serialize, Deserialize)]
805pub struct SerializedCrease {
806 pub start: usize,
807 pub end: usize,
808 pub icon_path: SharedString,
809 pub label: SharedString,
810}
811
812struct GlobalThreadsDatabase(
813 Shared<BoxFuture<'static, Result<Arc<ThreadsDatabase>, Arc<anyhow::Error>>>>,
814);
815
816impl Global for GlobalThreadsDatabase {}
817
818pub(crate) struct ThreadsDatabase {
819 executor: BackgroundExecutor,
820 connection: Arc<Mutex<Connection>>,
821}
822
823impl ThreadsDatabase {
824 fn connection(&self) -> Arc<Mutex<Connection>> {
825 self.connection.clone()
826 }
827
828 const COMPRESSION_LEVEL: i32 = 3;
829}
830
831impl Bind for ThreadId {
832 fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
833 self.to_string().bind(statement, start_index)
834 }
835}
836
837impl Column for ThreadId {
838 fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
839 let (id_str, next_index) = String::column(statement, start_index)?;
840 Ok((ThreadId::from(id_str.as_str()), next_index))
841 }
842}
843
844impl ThreadsDatabase {
845 fn global_future(
846 cx: &mut App,
847 ) -> Shared<BoxFuture<'static, Result<Arc<ThreadsDatabase>, Arc<anyhow::Error>>>> {
848 GlobalThreadsDatabase::global(cx).0.clone()
849 }
850
851 fn init(cx: &mut App) {
852 let executor = cx.background_executor().clone();
853 let database_future = executor
854 .spawn({
855 let executor = executor.clone();
856 let threads_dir = paths::data_dir().join("threads");
857 async move { ThreadsDatabase::new(threads_dir, executor) }
858 })
859 .then(|result| future::ready(result.map(Arc::new).map_err(Arc::new)))
860 .boxed()
861 .shared();
862
863 cx.set_global(GlobalThreadsDatabase(database_future));
864 }
865
866 pub fn new(threads_dir: PathBuf, executor: BackgroundExecutor) -> Result<Self> {
867 std::fs::create_dir_all(&threads_dir)?;
868
869 let sqlite_path = threads_dir.join("threads.db");
870 let mdb_path = threads_dir.join("threads-db.1.mdb");
871
872 let needs_migration_from_heed = mdb_path.exists();
873
874 let connection = Connection::open_file(&sqlite_path.to_string_lossy());
875
876 connection.exec(indoc! {"
877 CREATE TABLE IF NOT EXISTS threads (
878 id TEXT PRIMARY KEY,
879 summary TEXT NOT NULL,
880 updated_at TEXT NOT NULL,
881 data_type TEXT NOT NULL,
882 data BLOB NOT NULL
883 )
884 "})?()
885 .map_err(|e| anyhow!("Failed to create threads table: {}", e))?;
886
887 let db = Self {
888 executor: executor.clone(),
889 connection: Arc::new(Mutex::new(connection)),
890 };
891
892 if needs_migration_from_heed {
893 let db_connection = db.connection();
894 let executor_clone = executor.clone();
895 executor
896 .spawn(async move {
897 log::info!("Starting threads.db migration");
898 Self::migrate_from_heed(&mdb_path, db_connection, executor_clone)?;
899 std::fs::remove_dir_all(mdb_path)?;
900 log::info!("threads.db migrated to sqlite");
901 Ok::<(), anyhow::Error>(())
902 })
903 .detach();
904 }
905
906 Ok(db)
907 }
908
909 // Remove this migration after 2025-09-01
910 fn migrate_from_heed(
911 mdb_path: &Path,
912 connection: Arc<Mutex<Connection>>,
913 _executor: BackgroundExecutor,
914 ) -> Result<()> {
915 use heed::types::SerdeBincode;
916 struct SerializedThreadHeed(SerializedThread);
917
918 impl heed::BytesEncode<'_> for SerializedThreadHeed {
919 type EItem = SerializedThreadHeed;
920
921 fn bytes_encode(
922 item: &Self::EItem,
923 ) -> Result<std::borrow::Cow<[u8]>, heed::BoxedError> {
924 serde_json::to_vec(&item.0)
925 .map(std::borrow::Cow::Owned)
926 .map_err(Into::into)
927 }
928 }
929
930 impl<'a> heed::BytesDecode<'a> for SerializedThreadHeed {
931 type DItem = SerializedThreadHeed;
932
933 fn bytes_decode(bytes: &'a [u8]) -> Result<Self::DItem, heed::BoxedError> {
934 SerializedThread::from_json(bytes)
935 .map(SerializedThreadHeed)
936 .map_err(Into::into)
937 }
938 }
939
940 const ONE_GB_IN_BYTES: usize = 1024 * 1024 * 1024;
941
942 let env = unsafe {
943 heed::EnvOpenOptions::new()
944 .map_size(ONE_GB_IN_BYTES)
945 .max_dbs(1)
946 .open(mdb_path)?
947 };
948
949 let txn = env.write_txn()?;
950 let threads: heed::Database<SerdeBincode<ThreadId>, SerializedThreadHeed> = env
951 .open_database(&txn, Some("threads"))?
952 .ok_or_else(|| anyhow!("threads database not found"))?;
953
954 for result in threads.iter(&txn)? {
955 let (thread_id, thread_heed) = result?;
956 Self::save_thread_sync(&connection, thread_id, thread_heed.0)?;
957 }
958
959 Ok(())
960 }
961
962 fn save_thread_sync(
963 connection: &Arc<Mutex<Connection>>,
964 id: ThreadId,
965 thread: SerializedThread,
966 ) -> Result<()> {
967 let json_data = serde_json::to_string(&thread)?;
968 let summary = thread.summary.to_string();
969 let updated_at = thread.updated_at.to_rfc3339();
970
971 let connection = connection.lock().unwrap();
972
973 let compressed = zstd::encode_all(json_data.as_bytes(), Self::COMPRESSION_LEVEL)?;
974 let data_type = DataType::Zstd;
975 let data = compressed;
976
977 let mut insert = connection.exec_bound::<(ThreadId, String, String, DataType, Vec<u8>)>(indoc! {"
978 INSERT OR REPLACE INTO threads (id, summary, updated_at, data_type, data) VALUES (?, ?, ?, ?, ?)
979 "})?;
980
981 insert((id, summary, updated_at, data_type, data))?;
982
983 Ok(())
984 }
985
986 pub fn list_threads(&self) -> Task<Result<Vec<SerializedThreadMetadata>>> {
987 let connection = self.connection.clone();
988
989 self.executor.spawn(async move {
990 let connection = connection.lock().unwrap();
991 let mut select =
992 connection.select_bound::<(), (ThreadId, String, String)>(indoc! {"
993 SELECT id, summary, updated_at FROM threads ORDER BY updated_at DESC
994 "})?;
995
996 let rows = select(())?;
997 let mut threads = Vec::new();
998
999 for (id, summary, updated_at) in rows {
1000 threads.push(SerializedThreadMetadata {
1001 id,
1002 summary: summary.into(),
1003 updated_at: DateTime::parse_from_rfc3339(&updated_at)?.with_timezone(&Utc),
1004 });
1005 }
1006
1007 Ok(threads)
1008 })
1009 }
1010
1011 pub fn try_find_thread(&self, id: ThreadId) -> Task<Result<Option<SerializedThread>>> {
1012 let connection = self.connection.clone();
1013
1014 self.executor.spawn(async move {
1015 let connection = connection.lock().unwrap();
1016 let mut select = connection.select_bound::<ThreadId, (DataType, Vec<u8>)>(indoc! {"
1017 SELECT data_type, data FROM threads WHERE id = ? LIMIT 1
1018 "})?;
1019
1020 let rows = select(id)?;
1021 if let Some((data_type, data)) = rows.into_iter().next() {
1022 let json_data = match data_type {
1023 DataType::Zstd => {
1024 let decompressed = zstd::decode_all(&data[..])?;
1025 String::from_utf8(decompressed)?
1026 }
1027 DataType::Json => String::from_utf8(data)?,
1028 };
1029
1030 let thread = SerializedThread::from_json(json_data.as_bytes())?;
1031 Ok(Some(thread))
1032 } else {
1033 Ok(None)
1034 }
1035 })
1036 }
1037
1038 pub fn save_thread(&self, id: ThreadId, thread: SerializedThread) -> Task<Result<()>> {
1039 let connection = self.connection.clone();
1040
1041 self.executor
1042 .spawn(async move { Self::save_thread_sync(&connection, id, thread) })
1043 }
1044
1045 pub fn delete_thread(&self, id: ThreadId) -> Task<Result<()>> {
1046 let connection = self.connection.clone();
1047
1048 self.executor.spawn(async move {
1049 let connection = connection.lock().unwrap();
1050
1051 let mut delete = connection.exec_bound::<ThreadId>(indoc! {"
1052 DELETE FROM threads WHERE id = ?
1053 "})?;
1054
1055 delete(id)?;
1056
1057 Ok(())
1058 })
1059 }
1060}