Cargo.lock 🔗
@@ -15870,6 +15870,7 @@ dependencies = [
"git",
"gpui",
"language_model",
+ "log",
"menu",
"platform_title_bar",
"pretty_assertions",
Richard Feldman created
When the last agent thread associated with a linked git worktree is
archived, creates two WIP commits to preserve staging state, anchors
them with a git ref, and deletes the worktree directory.
When a thread is unarchived, recreates the worktree from the WIP
commits, restores staging state via two resets, and cleans up.
Uses a join table (thread_archived_worktrees) to associate threads
with archived worktree records, allowing multiple archives of the
same path without data loss.
Cargo.lock | 1
crates/agent_ui/src/thread_metadata_store.rs | 215 ++++
crates/sidebar/Cargo.toml | 1
crates/sidebar/src/sidebar.rs | 1135 ++++++++++++++++++++-
crates/sidebar/src/sidebar_tests.rs | 932 ++++++++++++++---
5 files changed, 2,016 insertions(+), 268 deletions(-)
@@ -15870,6 +15870,7 @@ dependencies = [
"git",
"gpui",
"language_model",
+ "log",
"menu",
"platform_title_bar",
"pretty_assertions",
@@ -1,4 +1,7 @@
-use std::{path::Path, sync::Arc};
+use std::{
+ path::{Path, PathBuf},
+ sync::Arc,
+};
use acp_thread::AcpThreadEvent;
use agent::{ThreadStore, ZED_AGENT_ID};
@@ -144,6 +147,16 @@ impl From<&ThreadMetadata> for acp_thread::AgentSessionInfo {
}
}
+/// Record of a git worktree that was archived (deleted from disk) when its last thread was archived.
+pub struct ArchivedGitWorktree {
+ pub id: i64,
+ pub worktree_path: PathBuf,
+ pub main_repo_path: PathBuf,
+ pub branch_name: Option<String>,
+ pub commit_hash: String,
+ pub restored: bool,
+}
+
/// The store holds all metadata needed to show threads in the sidebar/the archive.
///
/// Automatically listens to AcpThread events and updates metadata if it has changed.
@@ -388,6 +401,71 @@ impl ThreadMetadataStore {
self.update_archived(session_id, false, cx);
}
+ pub fn create_archived_worktree(
+ &self,
+ worktree_path: String,
+ main_repo_path: String,
+ branch_name: Option<String>,
+ commit_hash: String,
+ cx: &mut Context<Self>,
+ ) -> Task<anyhow::Result<i64>> {
+ let db = self.db.clone();
+ cx.background_spawn(async move {
+ db.create_archived_worktree(
+ &worktree_path,
+ &main_repo_path,
+ branch_name.as_deref(),
+ &commit_hash,
+ )
+ .await
+ })
+ }
+
+ pub fn link_thread_to_archived_worktree(
+ &self,
+ session_id: String,
+ archived_worktree_id: i64,
+ cx: &mut Context<Self>,
+ ) -> Task<anyhow::Result<()>> {
+ let db = self.db.clone();
+ cx.background_spawn(async move {
+ db.link_thread_to_archived_worktree(&session_id, archived_worktree_id)
+ .await
+ })
+ }
+
+ pub fn get_archived_worktrees_for_thread(
+ &self,
+ session_id: String,
+ cx: &mut Context<Self>,
+ ) -> Task<anyhow::Result<Vec<ArchivedGitWorktree>>> {
+ let db = self.db.clone();
+ cx.background_spawn(async move { db.get_archived_worktrees_for_thread(&session_id).await })
+ }
+
+ pub fn delete_archived_worktree(
+ &self,
+ id: i64,
+ cx: &mut Context<Self>,
+ ) -> Task<anyhow::Result<()>> {
+ let db = self.db.clone();
+ cx.background_spawn(async move { db.delete_archived_worktree(id).await })
+ }
+
+ pub fn set_archived_worktree_restored(
+ &self,
+ id: i64,
+ worktree_path: String,
+ branch_name: Option<String>,
+ cx: &mut Context<Self>,
+ ) -> Task<anyhow::Result<()>> {
+ let db = self.db.clone();
+ cx.background_spawn(async move {
+ db.set_archived_worktree_restored(id, &worktree_path, branch_name.as_deref())
+ .await
+ })
+ }
+
fn update_archived(
&mut self,
session_id: &acp::SessionId,
@@ -634,6 +712,21 @@ impl Domain for ThreadMetadataDb {
sql!(ALTER TABLE sidebar_threads ADD COLUMN archived INTEGER DEFAULT 0),
sql!(ALTER TABLE sidebar_threads ADD COLUMN main_worktree_paths TEXT),
sql!(ALTER TABLE sidebar_threads ADD COLUMN main_worktree_paths_order TEXT),
+ sql!(
+ CREATE TABLE IF NOT EXISTS archived_git_worktrees(
+ id INTEGER PRIMARY KEY,
+ worktree_path TEXT NOT NULL,
+ main_repo_path TEXT NOT NULL,
+ branch_name TEXT,
+ commit_hash TEXT NOT NULL,
+ restored INTEGER NOT NULL DEFAULT 0
+ ) STRICT;
+ CREATE TABLE IF NOT EXISTS thread_archived_worktrees(
+ session_id TEXT NOT NULL,
+ archived_worktree_id INTEGER NOT NULL REFERENCES archived_git_worktrees(id),
+ PRIMARY KEY (session_id, archived_worktree_id)
+ ) STRICT;
+ ),
];
}
@@ -711,6 +804,104 @@ impl ThreadMetadataDb {
.await
}
+ pub async fn create_archived_worktree(
+ &self,
+ worktree_path: &str,
+ main_repo_path: &str,
+ branch_name: Option<&str>,
+ commit_hash: &str,
+ ) -> anyhow::Result<i64> {
+ let worktree_path = worktree_path.to_string();
+ let main_repo_path = main_repo_path.to_string();
+ let branch_name = branch_name.map(|s| s.to_string());
+ let commit_hash = commit_hash.to_string();
+ self.write(move |conn| {
+ let id: i64 =
+ conn.select_row_bound::<_, i64>(sql!(
+ INSERT INTO archived_git_worktrees(
+ worktree_path, main_repo_path, branch_name, commit_hash
+ ) VALUES (?1, ?2, ?3, ?4)
+ RETURNING id
+ ))?((worktree_path, main_repo_path, branch_name, commit_hash))?
+ .context("Could not retrieve inserted archived worktree id")?;
+ Ok(id)
+ })
+ .await
+ }
+
+ pub async fn link_thread_to_archived_worktree(
+ &self,
+ session_id: &str,
+ archived_worktree_id: i64,
+ ) -> anyhow::Result<()> {
+ let session_id = session_id.to_string();
+ self.write(move |conn| {
+ let mut stmt = Statement::prepare(
+ conn,
+ "INSERT INTO thread_archived_worktrees(\
+ session_id, archived_worktree_id\
+ ) VALUES (?, ?)",
+ )?;
+ let i = stmt.bind(&session_id, 1)?;
+ stmt.bind(&archived_worktree_id, i)?;
+ stmt.exec()
+ })
+ .await
+ }
+
+ pub async fn get_archived_worktrees_for_thread(
+ &self,
+ session_id: &str,
+ ) -> anyhow::Result<Vec<ArchivedGitWorktree>> {
+ let session_id = session_id.to_string();
+ self.select_bound(
+ "SELECT aw.id, aw.worktree_path, aw.main_repo_path, aw.branch_name, aw.commit_hash, aw.restored \
+ FROM archived_git_worktrees aw \
+ JOIN thread_archived_worktrees taw ON taw.archived_worktree_id = aw.id \
+ WHERE taw.session_id = ?",
+ )?(session_id)
+ }
+
+ pub async fn delete_archived_worktree(&self, id: i64) -> anyhow::Result<()> {
+ self.write(move |conn| {
+ let mut stmt = Statement::prepare(
+ conn,
+ "DELETE FROM thread_archived_worktrees WHERE archived_worktree_id = ?",
+ )?;
+ stmt.bind(&id, 1)?;
+ stmt.exec()?;
+
+ let mut stmt =
+ Statement::prepare(conn, "DELETE FROM archived_git_worktrees WHERE id = ?")?;
+ stmt.bind(&id, 1)?;
+ stmt.exec()
+ })
+ .await
+ }
+
+ pub async fn set_archived_worktree_restored(
+ &self,
+ id: i64,
+ worktree_path: &str,
+ branch_name: Option<&str>,
+ ) -> anyhow::Result<()> {
+ let worktree_path = worktree_path.to_string();
+ let branch_name = branch_name.map(|s| s.to_string());
+ self.write(move |conn| {
+ let mut stmt = Statement::prepare(
+ conn,
+ "UPDATE archived_git_worktrees \
+ SET restored = 1, worktree_path = ?, branch_name = ? \
+ WHERE id = ?",
+ )?;
+ let mut i = stmt.bind(&worktree_path, 1)?;
+ i = stmt.bind(&branch_name, i)?;
+ stmt.bind(&id, i)?;
+ stmt.exec()
+ })
+ .await
+ }
+
/// Delete metadata for a single thread.
pub async fn delete(&self, session_id: acp::SessionId) -> anyhow::Result<()> {
let id = session_id.0.clone();
@@ -785,6 +976,28 @@ impl Column for ThreadMetadata {
}
}
+impl Column for ArchivedGitWorktree {
+ fn column(statement: &mut Statement, start_index: i32) -> anyhow::Result<(Self, i32)> {
+ let (id, next): (i64, i32) = Column::column(statement, start_index)?;
+ let (worktree_path_str, next): (String, i32) = Column::column(statement, next)?;
+ let (main_repo_path_str, next): (String, i32) = Column::column(statement, next)?;
+ let (branch_name, next): (Option<String>, i32) = Column::column(statement, next)?;
+ let (commit_hash, next): (String, i32) = Column::column(statement, next)?;
+ let (restored_int, next): (i64, i32) = Column::column(statement, next)?;
+ Ok((
+ ArchivedGitWorktree {
+ id,
+ worktree_path: PathBuf::from(worktree_path_str),
+ main_repo_path: PathBuf::from(main_repo_path_str),
+ branch_name,
+ commit_hash,
+ restored: restored_int != 0,
+ },
+ next,
+ ))
+ }
+}
+
#[cfg(test)]
mod tests {
use super::*;
@@ -29,6 +29,7 @@ feature_flags.workspace = true
fs.workspace = true
git.workspace = true
gpui.workspace = true
+log.workspace = true
menu.workspace = true
platform_title_bar.workspace = true
project.workspace = true
@@ -4,7 +4,7 @@ use acp_thread::ThreadStatus;
use action_log::DiffStats;
use agent_client_protocol::{self as acp};
use agent_settings::AgentSettings;
-use agent_ui::thread_metadata_store::{ThreadMetadata, ThreadMetadataStore};
+use agent_ui::thread_metadata_store::{ArchivedGitWorktree, ThreadMetadata, ThreadMetadataStore};
use agent_ui::threads_archive_view::{
ThreadsArchiveView, ThreadsArchiveViewEvent, format_history_entry_timestamp,
};
@@ -15,14 +15,16 @@ use agent_ui::{
use chrono::{DateTime, Utc};
use editor::Editor;
use feature_flags::{AgentV2FeatureFlag, FeatureFlagViewExt as _};
+use git::repository::{AskPassDelegate, CommitOptions, ResetMode};
use gpui::{
- Action as _, AnyElement, App, Context, Entity, FocusHandle, Focusable, KeyContext, ListState,
- Pixels, Render, SharedString, WeakEntity, Window, WindowHandle, linear_color_stop,
- linear_gradient, list, prelude::*, px,
+ Action as _, AnyElement, App, AsyncWindowContext, Context, Entity, FocusHandle, Focusable,
+ KeyContext, ListState, Pixels, PromptLevel, Render, SharedString, Task, WeakEntity, Window,
+ WindowHandle, linear_color_stop, linear_gradient, list, prelude::*, px,
};
use menu::{
Cancel, Confirm, SelectChild, SelectFirst, SelectLast, SelectNext, SelectParent, SelectPrevious,
};
+use project::git_store;
use project::{AgentId, AgentRegistryStore, Event as ProjectEvent, linked_worktree_short_name};
use recent_projects::sidebar_recent_projects::SidebarRecentProjects;
use remote::RemoteConnectionOptions;
@@ -32,6 +34,7 @@ use serde::{Deserialize, Serialize};
use settings::Settings as _;
use std::collections::{HashMap, HashSet};
use std::mem;
+use std::path::PathBuf;
use std::rc::Rc;
use theme::ActiveTheme;
use ui::{
@@ -381,6 +384,34 @@ pub struct Sidebar {
project_header_menu_ix: Option<usize>,
_subscriptions: Vec<gpui::Subscription>,
_draft_observation: Option<gpui::Subscription>,
+ pending_worktree_archives: HashMap<PathBuf, Task<anyhow::Result<()>>>,
+}
+
+fn archived_worktree_ref_name(id: i64) -> String {
+ format!("refs/archived-worktrees/{}", id)
+}
+
+/// Finds the main (non-linked-worktree) repository entity for a given path
+/// by searching across all open workspaces. This cross-workspace search is
+/// needed because the restore flow starts from a DB record (not an open repo
+/// entity), so the main repo could be in any open workspace window.
+fn find_main_repo_in_workspaces(
+ workspaces: &[Entity<Workspace>],
+ main_repo_path: &std::path::Path,
+ cx: &App,
+) -> Option<Entity<git_store::Repository>> {
+ workspaces.iter().find_map(|workspace| {
+ let project = workspace.read(cx).project().clone();
+ project
+ .read(cx)
+ .repositories(cx)
+ .values()
+ .find_map(|repo_entity| {
+ let repo = repo_entity.read(cx);
+ (repo.is_main_worktree() && *repo.work_directory_abs_path == *main_repo_path)
+ .then(|| repo_entity.clone())
+ })
+ })
}
impl Sidebar {
@@ -472,6 +503,7 @@ impl Sidebar {
project_header_menu_ix: None,
_subscriptions: Vec::new(),
_draft_observation: None,
+ pending_worktree_archives: HashMap::default(),
}
}
@@ -709,25 +741,19 @@ impl Sidebar {
// Derive active_entry from the active workspace's agent panel.
// Draft is checked first because a conversation can have a session_id
// before any messages are sent. However, a thread that's still loading
- // also appears as a "draft" (no messages yet).
+ // also appears as a "draft" (no messages yet), so when we already have
+ // an eager Thread write for this workspace we preserve it. A session_id
+ // on a non-draft is a positive Thread signal. The remaining case
+ // (conversation exists, not draft, no session_id) is a genuine
+ // mid-load — keep the previous value.
if let Some(active_ws) = &active_workspace {
if let Some(panel) = active_ws.read(cx).panel::<AgentPanel>(cx) {
if panel.read(cx).active_thread_is_draft(cx)
|| panel.read(cx).active_conversation_view().is_none()
{
- let conversation_parent_id = panel
- .read(cx)
- .active_conversation_view()
- .and_then(|cv| cv.read(cx).parent_id(cx));
let preserving_thread =
- if let Some(ActiveEntry::Thread { session_id, .. }) = &self.active_entry {
- self.active_entry_workspace() == Some(active_ws)
- && conversation_parent_id
- .as_ref()
- .is_some_and(|id| id == session_id)
- } else {
- false
- };
+ matches!(&self.active_entry, Some(ActiveEntry::Thread { .. }))
+ && self.active_entry_workspace() == Some(active_ws);
if !preserving_thread {
self.active_entry = Some(ActiveEntry::Draft(active_ws.clone()));
}
@@ -906,51 +932,6 @@ impl Sidebar {
}
}
- // Load threads from main worktrees when a workspace in this
- // group is itself a linked worktree checkout.
- let main_repo_queries: Vec<PathList> = group
- .workspaces
- .iter()
- .flat_map(|ws| root_repository_snapshots(ws, cx))
- .filter(|snapshot| snapshot.is_linked_worktree())
- .map(|snapshot| {
- PathList::new(std::slice::from_ref(&snapshot.original_repo_abs_path))
- })
- .collect();
-
- for main_repo_path_list in main_repo_queries {
- let folder_path_matches = thread_store
- .read(cx)
- .entries_for_path(&main_repo_path_list)
- .cloned();
- let main_worktree_path_matches = thread_store
- .read(cx)
- .entries_for_main_worktree_path(&main_repo_path_list)
- .cloned();
-
- for row in folder_path_matches.chain(main_worktree_path_matches) {
- if !seen_session_ids.insert(row.session_id.clone()) {
- continue;
- }
- let (icon, icon_from_external_svg) = resolve_agent_icon(&row.agent_id);
- let worktrees =
- worktree_info_from_thread_paths(&row.folder_paths, &project_groups);
- threads.push(ThreadEntry {
- metadata: row,
- icon,
- icon_from_external_svg,
- status: AgentThreadStatus::default(),
- workspace: ThreadEntryWorkspace::Closed(main_repo_path_list.clone()),
- is_live: false,
- is_background: false,
- is_title_generating: false,
- highlight_positions: Vec::new(),
- worktrees,
- diff_stats: DiffStats::default(),
- });
- }
- }
-
// Build a lookup from live_infos and compute running/waiting
// counts in a single pass.
let mut live_info_by_session: HashMap<&acp::SessionId, &ActiveThreadInfo> =
@@ -2218,16 +2199,38 @@ impl Sidebar {
ThreadMetadataStore::global(cx)
.update(cx, |store, cx| store.unarchive(&metadata.session_id, cx));
+ if metadata.folder_paths.paths().is_empty() {
+ self.activate_unarchived_thread_in_workspace(&metadata, window, cx);
+ return;
+ }
+
+ // Check all paths for archived worktrees that may need restoration
+ // before opening the workspace.
+ let paths = metadata.folder_paths.paths().to_vec();
+ self.maybe_restore_git_worktrees(paths, metadata, window, cx);
+ }
+
+ fn activate_unarchived_thread_in_workspace(
+ &mut self,
+ metadata: &ThreadMetadata,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
if !metadata.folder_paths.paths().is_empty() {
let path_list = metadata.folder_paths.clone();
if let Some(workspace) = self.find_current_workspace_for_path_list(&path_list, cx) {
- self.activate_thread_locally(&metadata, &workspace, window, cx);
+ self.activate_thread_locally(metadata, &workspace, window, cx);
} else if let Some((target_window, workspace)) =
self.find_open_workspace_for_path_list(&path_list, cx)
{
- self.activate_thread_in_other_window(metadata, workspace, target_window, cx);
+ self.activate_thread_in_other_window(
+ metadata.clone(),
+ workspace,
+ target_window,
+ cx,
+ );
} else {
- self.open_workspace_and_activate_thread(metadata, path_list, window, cx);
+ self.open_workspace_and_activate_thread(metadata.clone(), path_list, window, cx);
}
return;
}
@@ -2240,10 +2243,443 @@ impl Sidebar {
});
if let Some(workspace) = active_workspace {
- self.activate_thread_locally(&metadata, &workspace, window, cx);
+ self.activate_thread_locally(metadata, &workspace, window, cx);
}
}
+ fn maybe_restore_git_worktrees(
+ &mut self,
+ paths: Vec<std::path::PathBuf>,
+ metadata: ThreadMetadata,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ // Cancel any in-flight archive tasks for the paths we're about to
+ // restore, so a slow archive cannot delete a worktree we are restoring.
+ let canceled_paths: Vec<_> = paths
+ .iter()
+ .filter(|path| self.pending_worktree_archives.remove(*path).is_some())
+ .cloned()
+ .collect();
+
+ let Some(multi_workspace) = self.multi_workspace.upgrade() else {
+ return;
+ };
+ let workspaces = multi_workspace.read(cx).workspaces().to_vec();
+ let session_id = metadata.session_id.0.to_string();
+
+ cx.spawn_in(window, async move |this, cx| {
+ let store = cx.update(|_window, cx| ThreadMetadataStore::global(cx))?;
+
+ // Look up all archived worktrees linked to this thread.
+ let archived_worktrees = store
+ .update(cx, |store, cx| {
+ store.get_archived_worktrees_for_thread(session_id, cx)
+ })
+ .await
+ .unwrap_or_default();
+
+ // Build a map from worktree_path → ArchivedGitWorktree for quick lookup.
+ let archived_by_path: HashMap<PathBuf, ArchivedGitWorktree> = archived_worktrees
+ .into_iter()
+ .map(|row| (row.worktree_path.clone(), row))
+ .collect();
+
+ // Clean up any canceled in-flight archives that have DB records.
+ for canceled_path in &canceled_paths {
+ if let Some(row) = archived_by_path.get(canceled_path) {
+ Self::maybe_cleanup_archived_worktree(row, &store, &workspaces, cx).await;
+ }
+ }
+
+ let mut final_paths = Vec::with_capacity(paths.len());
+
+ for path in &paths {
+ match archived_by_path.get(path) {
+ None => {
+ final_paths.push(path.clone());
+ }
+ Some(row) => {
+ match Self::restore_archived_worktree(row, &workspaces, cx).await {
+ Ok(restored_path) => {
+ final_paths.push(restored_path);
+ Self::maybe_cleanup_archived_worktree(row, &store, &workspaces, cx)
+ .await;
+ }
+ Err(err) => {
+ log::error!(
+ "Failed to restore archived worktree for {}: {err}",
+ path.display()
+ );
+ final_paths.push(path.clone());
+ }
+ }
+ }
+ }
+ }
+
+ let mut updated_metadata = metadata;
+ updated_metadata.folder_paths = PathList::new(&final_paths);
+
+ this.update_in(cx, |this, window, cx| {
+ this.activate_unarchived_thread_in_workspace(&updated_metadata, window, cx);
+ })?;
+
+ anyhow::Ok(())
+ })
+ .detach_and_log_err(cx);
+ }
+
+ async fn restore_archived_worktree(
+ row: &ArchivedGitWorktree,
+ workspaces: &[Entity<Workspace>],
+ cx: &mut AsyncWindowContext,
+ ) -> anyhow::Result<PathBuf> {
+ let commit_hash = row.commit_hash.clone();
+
+ // Find the main repo entity.
+ let main_repo = cx.update(|_window, cx| {
+ find_main_repo_in_workspaces(workspaces, &row.main_repo_path, cx)
+ })?;
+
+ let Some(main_repo) = main_repo else {
+ // Main repo not found — fall back to fresh worktree.
+ return Self::create_fresh_worktree(row, workspaces, cx).await;
+ };
+
+ // Check if the original worktree path is already in use.
+ let worktree_path = &row.worktree_path;
+ let fs = cx.update(|_window, cx| <dyn fs::Fs>::global(cx))?;
+ let already_exists = fs.metadata(worktree_path).await?.is_some();
+
+ let is_restored_and_valid = already_exists
+ && row.restored
+ && cx.update(|_window, cx| {
+ workspaces.iter().any(|workspace| {
+ let project = workspace.read(cx).project().clone();
+ project
+ .read(cx)
+ .repositories(cx)
+ .values()
+ .any(|repo_entity| {
+ *repo_entity.read(cx).snapshot().work_directory_abs_path
+ == *worktree_path
+ })
+ })
+ })?;
+
+ let final_worktree_path = if !already_exists {
+ worktree_path.clone()
+ } else if is_restored_and_valid {
+ // Another thread already restored this worktree and it's
+ // registered as a git worktree in the project — reuse it.
+ worktree_path.clone()
+ } else {
+ // Collision — use a different path. Generate a name based on
+ // the archived worktree ID to keep it deterministic.
+ let suffix = row.id.to_string();
+ let new_name = format!(
+ "{}-restored-{suffix}",
+ row.branch_name.as_deref().unwrap_or("worktree"),
+ );
+ let path = main_repo.update(cx, |repo, _cx| {
+ let setting = git_store::worktrees_directory_for_repo(
+ &repo.snapshot().original_repo_abs_path,
+ git::repository::DEFAULT_WORKTREE_DIRECTORY,
+ )
+ .ok()
+ .map(|p| p.to_string_lossy().to_string())
+ .unwrap_or_default();
+ repo.path_for_new_linked_worktree(&new_name, &setting)
+ })?;
+ path
+ };
+
+ // We need to create the worktree if it doesn't already exist at
+ // the final path (which may differ from the original due to a
+ // collision). If another thread already restored it and it's a
+ // recognized worktree, we skip creation.
+ let final_path_exists = if final_worktree_path == *worktree_path {
+ already_exists
+ } else {
+ fs.metadata(&final_worktree_path).await?.is_some()
+ };
+
+ if !final_path_exists && !is_restored_and_valid {
+ // Create the worktree in detached HEAD mode at the WIP commit.
+ let create_receiver = main_repo.update(cx, |repo, _cx| {
+ repo.create_worktree_detached(final_worktree_path.clone(), commit_hash.clone())
+ });
+ match create_receiver.await {
+ Ok(Ok(())) => {}
+ Ok(Err(err)) => {
+ // Another concurrent restore may have already created
+ // this worktree. Re-check before falling back.
+ if fs.metadata(&final_worktree_path).await?.is_some() {
+ log::info!("Worktree creation failed ({err}) but path exists — reusing it");
+ } else {
+ log::error!("Failed to create worktree: {err}");
+ return Self::create_fresh_worktree(row, workspaces, cx).await;
+ }
+ }
+ Err(_) => {
+ anyhow::bail!("Worktree creation was canceled");
+ }
+ }
+
+ // Tell the project about the new worktree and wait for it
+ // to finish scanning so the GitStore creates a Repository.
+ let project = cx.update(|_window, cx| {
+ workspaces.iter().find_map(|workspace| {
+ let project = workspace.read(cx).project().clone();
+ let has_main_repo = project.read(cx).repositories(cx).values().any(|repo| {
+ let repo = repo.read(cx);
+ repo.is_main_worktree()
+ && *repo.work_directory_abs_path == *row.main_repo_path
+ });
+ has_main_repo.then_some(project)
+ })
+ })?;
+
+ if let Some(project) = project {
+ let path_for_register = final_worktree_path.clone();
+ let worktree_result = project
+ .update(cx, |project, cx| {
+ project.find_or_create_worktree(path_for_register, true, cx)
+ })
+ .await;
+ if let Ok((worktree, _)) = worktree_result {
+ let scan_complete = cx.update(|_window, cx| {
+ worktree
+ .read(cx)
+ .as_local()
+ .map(project::LocalWorktree::scan_complete)
+ })?;
+ if let Some(future) = scan_complete {
+ future.await;
+ }
+ }
+ }
+
+ // Find the new worktree's repo entity.
+ let worktree_repo = cx.update(|_window, cx| {
+ workspaces.iter().find_map(|workspace| {
+ let project = workspace.read(cx).project().clone();
+ project
+ .read(cx)
+ .repositories(cx)
+ .values()
+ .find_map(|repo_entity| {
+ let snapshot = repo_entity.read(cx).snapshot();
+ (*snapshot.work_directory_abs_path == *final_worktree_path)
+ .then(|| repo_entity.clone())
+ })
+ })
+ })?;
+
+ if let Some(worktree_repo) = worktree_repo {
+ let resets_ok = 'resets: {
+ let mixed_reset = worktree_repo.update(cx, |repo, cx| {
+ repo.reset("HEAD~".to_string(), ResetMode::Mixed, cx)
+ });
+ match mixed_reset.await {
+ Ok(Ok(())) => {}
+ Ok(Err(err)) => {
+ log::warn!("Failed to mixed-reset WIP unstaged commit: {err}");
+ break 'resets false;
+ }
+ Err(_) => {
+ log::warn!("Mixed reset was canceled");
+ break 'resets false;
+ }
+ }
+
+ let soft_reset = worktree_repo.update(cx, |repo, cx| {
+ repo.reset("HEAD~".to_string(), ResetMode::Soft, cx)
+ });
+ match soft_reset.await {
+ Ok(Ok(())) => {}
+ Ok(Err(err)) => {
+ log::warn!("Failed to soft-reset WIP staged commit: {err}");
+ // Attempt to undo the mixed reset to return to the WIP commit.
+ let undo = worktree_repo.update(cx, |repo, cx| {
+ repo.reset(commit_hash.clone(), ResetMode::Mixed, cx)
+ });
+ match undo.await {
+ Ok(Ok(())) => {
+ log::info!("Undid mixed reset after soft-reset failure")
+ }
+ Ok(Err(undo_err)) => {
+ log::warn!("Could not undo mixed reset: {undo_err}")
+ }
+ Err(_) => log::warn!("Undo of mixed reset was canceled"),
+ }
+ break 'resets false;
+ }
+ Err(_) => {
+ log::warn!("Soft reset was canceled");
+ // Attempt to undo the mixed reset to return to the WIP commit.
+ let undo = worktree_repo.update(cx, |repo, cx| {
+ repo.reset(commit_hash.clone(), ResetMode::Mixed, cx)
+ });
+ match undo.await {
+ Ok(Ok(())) => {
+ log::info!("Undid mixed reset after soft-reset cancellation")
+ }
+ Ok(Err(undo_err)) => {
+ log::warn!("Could not undo mixed reset: {undo_err}")
+ }
+ Err(_) => log::warn!("Undo of mixed reset was canceled"),
+ }
+ break 'resets false;
+ }
+ }
+
+ true
+ };
+
+ if !resets_ok {
+ log::warn!(
+ "Staging state could not be fully restored for worktree; proceeding to mark as restored"
+ );
+ } else if let Some(original_branch) = &row.branch_name {
+ let switch_receiver = worktree_repo
+ .update(cx, |repo, _cx| repo.change_branch(original_branch.clone()));
+ let switch_ok = matches!(switch_receiver.await, Ok(Ok(())));
+
+ if !switch_ok {
+ // The branch may already exist but be checked out in
+ // another worktree. Attempt to create it in case it
+ // was deleted; if it already exists, just accept the
+ // detached HEAD and warn.
+ let create_receiver = worktree_repo.update(cx, |repo, _cx| {
+ repo.create_branch(original_branch.clone(), None)
+ });
+ match create_receiver.await {
+ Ok(Ok(())) => {}
+ Ok(Err(_)) => {
+ log::warn!(
+ "Could not switch to branch '{original_branch}' — \
+ it may be checked out in another worktree. \
+ The restored worktree is in detached HEAD state."
+ );
+ }
+ Err(_) => {
+ log::warn!(
+ "Branch creation for '{original_branch}' was canceled; \
+ the restored worktree is in detached HEAD state."
+ );
+ }
+ }
+ }
+ }
+ }
+
+ // Mark the archived worktree as restored in the database.
+ let store = cx.update(|_window, cx| ThreadMetadataStore::global(cx))?;
+ store
+ .update(cx, |store, cx| {
+ store.set_archived_worktree_restored(
+ row.id,
+ final_worktree_path.to_string_lossy().to_string(),
+ row.branch_name.clone(),
+ cx,
+ )
+ })
+ .await?;
+ }
+
+ Ok(final_worktree_path)
+ }
+
+ async fn create_fresh_worktree(
+ row: &ArchivedGitWorktree,
+ workspaces: &[Entity<Workspace>],
+ cx: &mut AsyncWindowContext,
+ ) -> anyhow::Result<PathBuf> {
+ // Find the main repo entity.
+ let main_repo = cx.update(|_window, cx| {
+ find_main_repo_in_workspaces(workspaces, &row.main_repo_path, cx)
+ })?;
+
+ let Some(main_repo) = main_repo else {
+ anyhow::bail!(
+ "Main repository at {} not found in any open workspace",
+ row.main_repo_path.display()
+ );
+ };
+
+ // Generate a new branch name for the fresh worktree.
+ let branch_name = {
+ use std::hash::{Hash, Hasher};
+ let mut hasher = std::collections::hash_map::DefaultHasher::new();
+ row.worktree_path.hash(&mut hasher);
+ let suffix = format!("{:x}", hasher.finish())
+ .chars()
+ .take(8)
+ .collect::<String>();
+ format!("restored-{suffix}")
+ };
+ let worktree_path = main_repo.update(cx, |repo, _cx| {
+ let setting = git_store::worktrees_directory_for_repo(
+ &repo.snapshot().original_repo_abs_path,
+ git::repository::DEFAULT_WORKTREE_DIRECTORY,
+ )
+ .ok()
+ .map(|p| p.to_string_lossy().to_string())
+ .unwrap_or_default();
+ repo.path_for_new_linked_worktree(&branch_name, &setting)
+ })?;
+
+ // Create the fresh worktree.
+ let create_receiver = main_repo.update(cx, |repo, _cx| {
+ repo.create_worktree(branch_name, worktree_path.clone(), None)
+ });
+ match create_receiver.await {
+ Ok(Ok(())) => {}
+ Ok(Err(err)) => {
+ anyhow::bail!("Failed to create fresh worktree: {err}");
+ }
+ Err(_) => {
+ anyhow::bail!("Fresh worktree creation was canceled");
+ }
+ }
+
+ log::warn!(
+ "Unable to restore the original git worktree. Created a fresh worktree instead."
+ );
+
+ Ok(worktree_path)
+ }
+
+ async fn maybe_cleanup_archived_worktree(
+ row: &ArchivedGitWorktree,
+ store: &Entity<ThreadMetadataStore>,
+ workspaces: &[Entity<Workspace>],
+ cx: &mut AsyncWindowContext,
+ ) {
+ // Delete the git ref from the main repo.
+ let Ok(main_repo) = cx.update(|_window, cx| {
+ find_main_repo_in_workspaces(workspaces, &row.main_repo_path, cx)
+ }) else {
+ return;
+ };
+
+ if let Some(main_repo) = main_repo {
+ let ref_name = archived_worktree_ref_name(row.id);
+ let receiver = main_repo.update(cx, |repo, _cx| repo.delete_ref(ref_name));
+ if let Ok(result) = receiver.await {
+ result.log_err();
+ }
+ }
+
+ // Delete the archived worktree record (and join table entries).
+ store
+ .update(cx, |store, cx| store.delete_archived_worktree(row.id, cx))
+ .await
+ .log_err();
+ }
+
fn expand_selected_entry(
&mut self,
_: &SelectChild,
@@ -2496,6 +2932,573 @@ impl Sidebar {
}
}
}
+
+ self.maybe_delete_git_worktree_for_archived_thread(session_id, window, cx);
+ }
+
+ /// If the thread being archived is associated with a linked git worktree,
+ /// link it to an archived worktree record. If this is the last thread on
+ /// that worktree, create a WIP commit, anchor it with a git ref, and
+ /// delete the worktree.
+ fn maybe_delete_git_worktree_for_archived_thread(
+ &mut self,
+ session_id: &acp::SessionId,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let folder_paths = self.contents.entries.iter().find_map(|entry| {
+ if let ListEntry::Thread(t) = entry {
+ if &t.metadata.session_id == session_id {
+ return Some(match &t.workspace {
+ ThreadEntryWorkspace::Open(ws) => workspace_path_list(ws, cx),
+ ThreadEntryWorkspace::Closed(path_list) => path_list.clone(),
+ });
+ }
+ }
+ None
+ });
+
+ let Some(folder_paths) = folder_paths else {
+ return;
+ };
+
+ let Some(multi_workspace) = self.multi_workspace.upgrade() else {
+ return;
+ };
+ let workspaces = multi_workspace.read(cx).workspaces().to_vec();
+
+ let store_entity = ThreadMetadataStore::global(cx);
+ let is_last_thread = !store_entity
+ .read(cx)
+ .entries_for_path(&folder_paths)
+ .any(|entry| &entry.session_id != session_id);
+
+ // Collect info for each path that is a linked git worktree.
+ let mut linked_worktrees: Vec<(
+ Entity<git_store::Repository>,
+ PathBuf,
+ Option<String>,
+ std::sync::Arc<std::path::Path>,
+ Option<Entity<git_store::Repository>>,
+ )> = Vec::new();
+ for worktree_path in folder_paths.paths() {
+ if let Some(info) = workspaces.iter().find_map(|workspace| {
+ let project = workspace.read(cx).project().clone();
+ project
+ .read(cx)
+ .repositories(cx)
+ .values()
+ .find_map(|repo_entity| {
+ let snapshot = repo_entity.read(cx).snapshot();
+ if snapshot.is_linked_worktree()
+ && *snapshot.work_directory_abs_path == *worktree_path
+ {
+ let branch_name =
+ snapshot.branch.as_ref().map(|b| b.name().to_string());
+ let main_repo_path = snapshot.original_repo_abs_path;
+ let main_repo =
+ find_main_repo_in_workspaces(&workspaces, &main_repo_path, cx);
+ Some((
+ repo_entity.clone(),
+ worktree_path.clone(),
+ branch_name,
+ main_repo_path,
+ main_repo,
+ ))
+ } else {
+ None
+ }
+ })
+ }) {
+ linked_worktrees.push(info);
+ }
+ }
+
+ if linked_worktrees.is_empty() {
+ return;
+ }
+
+ let fs = <dyn fs::Fs>::global(cx);
+
+ for (worktree_repo, worktree_path, branch_name, main_repo_path, main_repo) in
+ linked_worktrees
+ {
+ let session_id = session_id.clone();
+ let folder_paths = folder_paths.clone();
+ let fs = fs.clone();
+ let worktree_path_key = worktree_path.clone();
+ let cleanup_key = worktree_path_key.clone();
+
+ let task = cx.spawn_in(window, async move |this, cx| {
+ let result = Self::archive_single_worktree(
+ worktree_repo,
+ worktree_path,
+ branch_name,
+ main_repo_path,
+ main_repo,
+ is_last_thread,
+ session_id,
+ folder_paths,
+ fs,
+ cx,
+ )
+ .await;
+ this.update_in(cx, |sidebar, _window, _cx| {
+ sidebar.pending_worktree_archives.remove(&cleanup_key);
+ })
+ .log_err();
+ result
+ });
+ self.pending_worktree_archives
+ .insert(worktree_path_key, task);
+ }
+ }
+
+ async fn archive_single_worktree(
+ worktree_repo: Entity<git_store::Repository>,
+ worktree_path: PathBuf,
+ branch_name: Option<String>,
+ main_repo_path: std::sync::Arc<std::path::Path>,
+ main_repo: Option<Entity<git_store::Repository>>,
+ is_last_thread: bool,
+ session_id: acp::SessionId,
+ folder_paths: PathList,
+ fs: std::sync::Arc<dyn fs::Fs>,
+ cx: &mut AsyncWindowContext,
+ ) -> anyhow::Result<()> {
+ if !is_last_thread {
+ return anyhow::Ok(());
+ }
+
+ let store = cx.update(|_window, cx| ThreadMetadataStore::global(cx))?;
+
+ // Re-check inside the async block to narrow the TOCTOU window:
+ // another thread on the same worktree may have been un-archived
+ // (or a new one created) between the synchronous check and here.
+ // Note: this does not fully close the race — state can still change
+ // at subsequent await points during the commit/archive sequence.
+ let still_last_thread = store.update(cx, |store, _cx| {
+ !store
+ .entries_for_path(&folder_paths)
+ .any(|entry| &entry.session_id != &session_id)
+ });
+ if !still_last_thread {
+ return anyhow::Ok(());
+ }
+
+ // Helper: unarchive the thread so it reappears in the sidebar.
+ let unarchive = |cx: &mut AsyncWindowContext| {
+ store.update(cx, |store, cx| {
+ store.unarchive(&session_id, cx);
+ });
+ };
+
+ // Helper: undo both WIP commits on the worktree.
+ let undo_wip_commits = |cx: &mut AsyncWindowContext| {
+ let reset_receiver = worktree_repo.update(cx, |repo, cx| {
+ repo.reset("HEAD~2".to_string(), ResetMode::Mixed, cx)
+ });
+ async move {
+ match reset_receiver.await {
+ Ok(Ok(())) => true,
+ Ok(Err(err)) => {
+ log::error!("Failed to undo WIP commits: {err}");
+ false
+ }
+ Err(_) => {
+ log::error!("WIP commit undo was canceled");
+ false
+ }
+ }
+ }
+ };
+
+ // We create two commits to preserve the original staging state:
+ // 1. Commit whatever is currently staged (allow-empty).
+ // 2. Stage everything (including untracked), commit again (allow-empty).
+ //
+ // On restore, two resets undo this:
+ // 1. `git reset --mixed HEAD~` — undoes commit 2, puts
+ // previously-unstaged/untracked files back as unstaged.
+ // 2. `git reset --soft HEAD~` — undoes commit 1, leaves
+ // the index as-is so originally-staged files stay staged.
+ //
+ // If any step in this sequence fails, we undo everything and
+ // bail out.
+
+ // Step 1: commit whatever is currently staged.
+ let askpass = AskPassDelegate::new(cx, |_, _, _| {});
+ let first_commit_result = worktree_repo.update(cx, |repo, cx| {
+ repo.commit(
+ "WIP staged".into(),
+ None,
+ CommitOptions {
+ allow_empty: true,
+ ..Default::default()
+ },
+ askpass,
+ cx,
+ )
+ });
+ let first_commit_ok = match first_commit_result.await {
+ Ok(Ok(())) => true,
+ Ok(Err(err)) => {
+ log::error!("Failed to create first WIP commit (staged): {err}");
+ false
+ }
+ Err(_) => {
+ log::error!("First WIP commit was canceled");
+ false
+ }
+ };
+
+ // Step 2: stage everything including untracked, then commit.
+ // If anything fails after the first commit, undo it and bail.
+ let commit_ok = if first_commit_ok {
+ let stage_result =
+ worktree_repo.update(cx, |repo, _cx| repo.stage_all_including_untracked());
+ let stage_ok = match stage_result.await {
+ Ok(Ok(())) => true,
+ Ok(Err(err)) => {
+ log::error!("Failed to stage worktree files: {err}");
+ false
+ }
+ Err(_) => {
+ log::error!("Stage operation was canceled");
+ false
+ }
+ };
+
+ if !stage_ok {
+ let undo = worktree_repo.update(cx, |repo, cx| {
+ repo.reset("HEAD~".to_string(), ResetMode::Mixed, cx)
+ });
+ match undo.await {
+ Ok(Ok(())) => {}
+ Ok(Err(err)) => log::error!("Failed to undo first WIP commit: {err}"),
+ Err(_) => log::error!("Undo of first WIP commit was canceled"),
+ }
+ false
+ } else {
+ let askpass = AskPassDelegate::new(cx, |_, _, _| {});
+ let second_commit_result = worktree_repo.update(cx, |repo, cx| {
+ repo.commit(
+ "WIP unstaged".into(),
+ None,
+ CommitOptions {
+ allow_empty: true,
+ ..Default::default()
+ },
+ askpass,
+ cx,
+ )
+ });
+ match second_commit_result.await {
+ Ok(Ok(())) => true,
+ Ok(Err(err)) => {
+ log::error!("Failed to create second WIP commit (unstaged): {err}");
+ let undo = worktree_repo.update(cx, |repo, cx| {
+ repo.reset("HEAD~".to_string(), ResetMode::Mixed, cx)
+ });
+ match undo.await {
+ Ok(Ok(())) => {}
+ Ok(Err(err)) => {
+ log::error!("Failed to undo first WIP commit: {err}")
+ }
+ Err(_) => {
+ log::error!("Undo of first WIP commit was canceled")
+ }
+ }
+ false
+ }
+ Err(_) => {
+ log::error!("Second WIP commit was canceled");
+ let undo = worktree_repo.update(cx, |repo, cx| {
+ repo.reset("HEAD~".to_string(), ResetMode::Mixed, cx)
+ });
+ match undo.await {
+ Ok(Ok(())) => {}
+ Ok(Err(err)) => {
+ log::error!("Failed to undo first WIP commit: {err}")
+ }
+ Err(_) => {
+ log::error!("Undo of first WIP commit was canceled")
+ }
+ }
+ false
+ }
+ }
+ }
+ } else {
+ false
+ };
+
+ let worktree_path_str = worktree_path.to_string_lossy().to_string();
+ let main_repo_path_str = main_repo_path.to_string_lossy().to_string();
+
+ let mut archived_worktree_id: Option<i64> = None;
+
+ if !commit_ok {
+ // Show a prompt asking the user what to do.
+ let answer = cx.prompt(
+ PromptLevel::Warning,
+ "Failed to save worktree state",
+ Some(
+ "Could not create a WIP commit for this worktree. \
+ If you proceed, the worktree will be deleted and \
+ unarchiving this thread later will not restore the \
+ filesystem to its previous state.\n\n\
+ Cancel to keep the worktree on disk so you can \
+ resolve the issue manually.",
+ ),
+ &["Delete Anyway", "Cancel"],
+ );
+
+ match answer.await {
+ Ok(0) => {
+ // "Delete Anyway" — proceed to worktree deletion
+ // without a WIP commit or DB record.
+ }
+ _ => {
+ // "Cancel" — undo the archive so the thread
+ // reappears in the sidebar.
+ unarchive(cx);
+ return anyhow::Ok(());
+ }
+ }
+ } else {
+ // Commit succeeded — get hash, create archived worktree row, create ref.
+ let head_sha_result = worktree_repo.update(cx, |repo, _cx| repo.head_sha());
+ let commit_hash = match head_sha_result.await {
+ Ok(Ok(Some(sha))) => sha,
+ sha_result => {
+ let reason = match &sha_result {
+ Ok(Ok(None)) => "HEAD SHA is None".into(),
+ Ok(Err(err)) => format!("Failed to get HEAD SHA: {err}"),
+ Err(_) => "HEAD SHA operation was canceled".into(),
+ Ok(Ok(Some(_))) => unreachable!(),
+ };
+ log::error!("{reason} after WIP commits; attempting to undo");
+ let undo_ok = undo_wip_commits(cx).await;
+ unarchive(cx);
+ let detail = if undo_ok {
+ "Could not read the commit hash after creating \
+ the WIP commit. The commit has been undone and \
+ the thread has been restored to the sidebar."
+ } else {
+ "Could not read the commit hash after creating \
+ the WIP commit. The commit could not be automatically \
+ undone \u{2014} you may need to manually run `git reset HEAD~2` \
+ on the worktree. The thread has been restored to the sidebar."
+ };
+ cx.prompt(
+ PromptLevel::Warning,
+ "Failed to archive worktree",
+ Some(detail),
+ &["OK"],
+ )
+ .await
+ .ok();
+ return anyhow::Ok(());
+ }
+ };
+
+ let create_result = store
+ .update(cx, |store, cx| {
+ store.create_archived_worktree(
+ worktree_path_str.clone(),
+ main_repo_path_str,
+ branch_name,
+ commit_hash.clone(),
+ cx,
+ )
+ })
+ .await;
+
+ match create_result {
+ Ok(id) => {
+ // Link the current thread to the archived worktree record.
+ let link_result = store
+ .update(cx, |store, cx| {
+ store.link_thread_to_archived_worktree(session_id.0.to_string(), id, cx)
+ })
+ .await;
+
+ if let Err(err) = link_result {
+ log::error!("Failed to link thread to archived worktree: {err}");
+ store
+ .update(cx, |store, cx| store.delete_archived_worktree(id, cx))
+ .await
+ .log_err();
+ let undo_ok = undo_wip_commits(cx).await;
+ unarchive(cx);
+ let detail = if undo_ok {
+ "Could not link the thread to the archived worktree record. \
+ The WIP commit has been undone and the thread \
+ has been restored to the sidebar."
+ } else {
+ "Could not link the thread to the archived worktree record. \
+ The WIP commit could not be automatically \
+ undone \u{2014} you may need to manually run `git reset HEAD~2` \
+ on the worktree. The thread has been restored to the sidebar."
+ };
+ cx.prompt(
+ PromptLevel::Warning,
+ "Failed to archive worktree",
+ Some(detail),
+ &["OK"],
+ )
+ .await
+ .ok();
+ return anyhow::Ok(());
+ }
+
+ archived_worktree_id = Some(id);
+
+ // Create a git ref on the main repo (non-fatal if
+ // this fails — the commit hash is in the DB).
+ if let Some(main_repo) = &main_repo {
+ let ref_name = archived_worktree_ref_name(id);
+ let ref_result = main_repo
+ .update(cx, |repo, _cx| repo.update_ref(ref_name, commit_hash));
+ match ref_result.await {
+ Ok(Ok(())) => {}
+ Ok(Err(err)) => {
+ log::warn!("Failed to create archive ref: {err}")
+ }
+ Err(_) => log::warn!("Archive ref creation was canceled"),
+ }
+ }
+ }
+ Err(err) => {
+ log::error!("Failed to create archived worktree record: {err}");
+ let undo_ok = undo_wip_commits(cx).await;
+ unarchive(cx);
+ let detail = if undo_ok {
+ "Could not save the archived worktree record. \
+ The WIP commit has been undone and the thread \
+ has been restored to the sidebar."
+ } else {
+ "Could not save the archived worktree record. \
+ The WIP commit could not be automatically \
+ undone \u{2014} you may need to manually run `git reset HEAD~2` \
+ on the worktree. The thread has been restored to the sidebar."
+ };
+ cx.prompt(
+ PromptLevel::Warning,
+ "Failed to archive worktree",
+ Some(detail),
+ &["OK"],
+ )
+ .await
+ .ok();
+ return anyhow::Ok(());
+ }
+ }
+ }
+
+ let timestamp = std::time::SystemTime::now()
+ .duration_since(std::time::UNIX_EPOCH)
+ .unwrap_or_default()
+ .as_nanos();
+ let temp_path = std::env::temp_dir().join(format!("zed-removing-worktree-{timestamp}"));
+
+ let dir_removed = if fs
+ .rename(
+ &worktree_path,
+ &temp_path,
+ fs::RenameOptions {
+ overwrite: false,
+ ..Default::default()
+ },
+ )
+ .await
+ .is_ok()
+ {
+ if let Some(main_repo) = &main_repo {
+ let receiver = main_repo.update(cx, |repo, _cx| {
+ repo.remove_worktree(worktree_path.clone(), true)
+ });
+ if let Ok(result) = receiver.await {
+ result.log_err();
+ }
+ }
+ fs.remove_dir(
+ &temp_path,
+ fs::RemoveOptions {
+ recursive: true,
+ ignore_if_not_exists: true,
+ },
+ )
+ .await
+ .log_err();
+ true
+ } else if fs
+ .remove_dir(
+ &worktree_path,
+ fs::RemoveOptions {
+ recursive: true,
+ ignore_if_not_exists: true,
+ },
+ )
+ .await
+ .is_ok()
+ {
+ if let Some(main_repo) = &main_repo {
+ let receiver = main_repo.update(cx, |repo, _cx| {
+ repo.remove_worktree(worktree_path.clone(), true)
+ });
+ if let Ok(result) = receiver.await {
+ result.log_err();
+ }
+ }
+ true
+ } else {
+ false
+ };
+
+ if !dir_removed {
+ let undo_ok = if commit_ok {
+ undo_wip_commits(cx).await
+ } else {
+ true
+ };
+ if let Some(id) = archived_worktree_id {
+ if let Some(main_repo) = &main_repo {
+ let ref_name = archived_worktree_ref_name(id);
+ let receiver = main_repo.update(cx, |repo, _cx| repo.delete_ref(ref_name));
+ if let Ok(result) = receiver.await {
+ result.log_err();
+ }
+ }
+ store
+ .update(cx, |store, cx| store.delete_archived_worktree(id, cx))
+ .await
+ .log_err();
+ }
+ unarchive(cx);
+ let detail = if undo_ok {
+ "Could not remove the worktree directory from disk. \
+ Any WIP commits and archive records have been rolled \
+ back, and the thread has been restored to the sidebar."
+ } else {
+ "Could not remove the worktree directory from disk. \
+ The archive records have been rolled back, but the WIP \
+ commits could not be automatically undone \u{2014} you may need \
+ to manually run `git reset HEAD~2` on the worktree. \
+ The thread has been restored to the sidebar."
+ };
+ cx.prompt(
+ PromptLevel::Warning,
+ "Failed to delete worktree",
+ Some(detail),
+ &["OK"],
+ )
+ .await
+ .ok();
+ }
+
+ anyhow::Ok(())
}
fn remove_selected_thread(
@@ -12,10 +12,7 @@ use gpui::TestAppContext;
use pretty_assertions::assert_eq;
use project::AgentId;
use settings::SettingsStore;
-use std::{
- path::{Path, PathBuf},
- sync::Arc,
-};
+use std::{path::PathBuf, sync::Arc};
use util::path_list::PathList;
fn init_test(cx: &mut TestAppContext) {
@@ -149,7 +146,6 @@ fn save_thread_metadata(
updated_at,
created_at,
folder_paths: path_list,
- main_worktree_paths: PathList::default(),
archived: false,
};
cx.update(|cx| {
@@ -698,7 +694,6 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) {
session_id: acp::SessionId::new(Arc::from("t-1")),
agent_id: AgentId::new("zed-agent"),
folder_paths: PathList::default(),
- main_worktree_paths: PathList::default(),
title: "Completed thread".into(),
updated_at: Utc::now(),
created_at: Some(Utc::now()),
@@ -721,7 +716,6 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) {
session_id: acp::SessionId::new(Arc::from("t-2")),
agent_id: AgentId::new("zed-agent"),
folder_paths: PathList::default(),
- main_worktree_paths: PathList::default(),
title: "Running thread".into(),
updated_at: Utc::now(),
created_at: Some(Utc::now()),
@@ -744,7 +738,6 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) {
session_id: acp::SessionId::new(Arc::from("t-3")),
agent_id: AgentId::new("zed-agent"),
folder_paths: PathList::default(),
- main_worktree_paths: PathList::default(),
title: "Error thread".into(),
updated_at: Utc::now(),
created_at: Some(Utc::now()),
@@ -767,7 +760,6 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) {
session_id: acp::SessionId::new(Arc::from("t-4")),
agent_id: AgentId::new("zed-agent"),
folder_paths: PathList::default(),
- main_worktree_paths: PathList::default(),
title: "Waiting thread".into(),
updated_at: Utc::now(),
created_at: Some(Utc::now()),
@@ -790,7 +782,6 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) {
session_id: acp::SessionId::new(Arc::from("t-5")),
agent_id: AgentId::new("zed-agent"),
folder_paths: PathList::default(),
- main_worktree_paths: PathList::default(),
title: "Notified thread".into(),
updated_at: Utc::now(),
created_at: Some(Utc::now()),
@@ -2058,7 +2049,6 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) {
updated_at: Utc::now(),
created_at: None,
folder_paths: PathList::default(),
- main_worktree_paths: PathList::default(),
archived: false,
},
&workspace_a,
@@ -2114,7 +2104,6 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) {
updated_at: Utc::now(),
created_at: None,
folder_paths: PathList::default(),
- main_worktree_paths: PathList::default(),
archived: false,
},
&workspace_b,
@@ -2446,24 +2435,38 @@ async fn test_cmd_n_shows_new_thread_entry_in_absorbed_worktree(cx: &mut TestApp
fs.insert_tree(
"/project",
serde_json::json!({
- ".git": {},
+ ".git": {
+ "worktrees": {
+ "feature-a": {
+ "commondir": "../../",
+ "HEAD": "ref: refs/heads/feature-a",
+ },
+ },
+ },
"src": {},
}),
)
.await;
// Worktree checkout pointing back to the main repo.
- fs.add_linked_worktree_for_repo(
- Path::new("/project/.git"),
- false,
- git::repository::Worktree {
+ fs.insert_tree(
+ "/wt-feature-a",
+ serde_json::json!({
+ ".git": "gitdir: /project/.git/worktrees/feature-a",
+ "src": {},
+ }),
+ )
+ .await;
+
+ fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| {
+ state.worktrees.push(git::repository::Worktree {
path: std::path::PathBuf::from("/wt-feature-a"),
ref_name: Some("refs/heads/feature-a".into()),
sha: "aaa".into(),
is_main: false,
- },
- )
- .await;
+ });
+ })
+ .unwrap();
cx.update(|cx| <dyn fs::Fs>::set_global(fs.clone(), cx));
@@ -2570,17 +2573,15 @@ async fn test_search_matches_worktree_name(cx: &mut TestAppContext) {
let (project, fs) = init_test_project_with_git("/project", cx).await;
fs.as_fake()
- .add_linked_worktree_for_repo(
- Path::new("/project/.git"),
- false,
- git::repository::Worktree {
+ .with_git_state(std::path::Path::new("/project/.git"), false, |state| {
+ state.worktrees.push(git::repository::Worktree {
path: std::path::PathBuf::from("/wt/rosewood"),
ref_name: Some("refs/heads/rosewood".into()),
sha: "abc".into(),
is_main: false,
- },
- )
- .await;
+ });
+ })
+ .unwrap();
project
.update(cx, |project, cx| project.git_scans_complete(cx))
@@ -2634,17 +2635,15 @@ async fn test_git_worktree_added_live_updates_sidebar(cx: &mut TestAppContext) {
// Now add the worktree to the git state and trigger a rescan.
fs.as_fake()
- .add_linked_worktree_for_repo(
- Path::new("/project/.git"),
- true,
- git::repository::Worktree {
+ .with_git_state(std::path::Path::new("/project/.git"), true, |state| {
+ state.worktrees.push(git::repository::Worktree {
path: std::path::PathBuf::from("/wt/rosewood"),
ref_name: Some("refs/heads/rosewood".into()),
sha: "abc".into(),
is_main: false,
- },
- )
- .await;
+ });
+ })
+ .unwrap();
cx.run_until_parked();
@@ -2668,6 +2667,16 @@ async fn test_two_worktree_workspaces_absorbed_when_main_added(cx: &mut TestAppC
"/project",
serde_json::json!({
".git": {
+ "worktrees": {
+ "feature-a": {
+ "commondir": "../../",
+ "HEAD": "ref: refs/heads/feature-a",
+ },
+ "feature-b": {
+ "commondir": "../../",
+ "HEAD": "ref: refs/heads/feature-b",
+ },
+ },
},
"src": {},
}),
@@ -2675,26 +2684,20 @@ async fn test_two_worktree_workspaces_absorbed_when_main_added(cx: &mut TestAppC
.await;
// Two worktree checkouts whose .git files point back to the main repo.
- fs.add_linked_worktree_for_repo(
- Path::new("/project/.git"),
- false,
- git::repository::Worktree {
- path: std::path::PathBuf::from("/wt-feature-a"),
- ref_name: Some("refs/heads/feature-a".into()),
- sha: "aaa".into(),
- is_main: false,
- },
+ fs.insert_tree(
+ "/wt-feature-a",
+ serde_json::json!({
+ ".git": "gitdir: /project/.git/worktrees/feature-a",
+ "src": {},
+ }),
)
.await;
- fs.add_linked_worktree_for_repo(
- Path::new("/project/.git"),
- false,
- git::repository::Worktree {
- path: std::path::PathBuf::from("/wt-feature-b"),
- ref_name: Some("refs/heads/feature-b".into()),
- sha: "bbb".into(),
- is_main: false,
- },
+ fs.insert_tree(
+ "/wt-feature-b",
+ serde_json::json!({
+ ".git": "gitdir: /project/.git/worktrees/feature-b",
+ "src": {},
+ }),
)
.await;
@@ -2732,6 +2735,24 @@ async fn test_two_worktree_workspaces_absorbed_when_main_added(cx: &mut TestAppC
]
);
+ // Configure the main repo to list both worktrees before opening
+ // it so the initial git scan picks them up.
+ fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| {
+ state.worktrees.push(git::repository::Worktree {
+ path: std::path::PathBuf::from("/wt-feature-a"),
+ ref_name: Some("refs/heads/feature-a".into()),
+ sha: "aaa".into(),
+ is_main: false,
+ });
+ state.worktrees.push(git::repository::Worktree {
+ path: std::path::PathBuf::from("/wt-feature-b"),
+ ref_name: Some("refs/heads/feature-b".into()),
+ sha: "bbb".into(),
+ is_main: false,
+ });
+ })
+ .unwrap();
+
let main_project = project::Project::test(fs.clone(), ["/project".as_ref()], cx).await;
main_project
.update(cx, |p, cx| p.git_scans_complete(cx))
@@ -2767,33 +2788,54 @@ async fn test_threadless_workspace_shows_new_thread_with_worktree_chip(cx: &mut
fs.insert_tree(
"/project",
serde_json::json!({
- ".git": {},
+ ".git": {
+ "worktrees": {
+ "feature-a": {
+ "commondir": "../../",
+ "HEAD": "ref: refs/heads/feature-a",
+ },
+ "feature-b": {
+ "commondir": "../../",
+ "HEAD": "ref: refs/heads/feature-b",
+ },
+ },
+ },
+ "src": {},
+ }),
+ )
+ .await;
+ fs.insert_tree(
+ "/wt-feature-a",
+ serde_json::json!({
+ ".git": "gitdir: /project/.git/worktrees/feature-a",
+ "src": {},
+ }),
+ )
+ .await;
+ fs.insert_tree(
+ "/wt-feature-b",
+ serde_json::json!({
+ ".git": "gitdir: /project/.git/worktrees/feature-b",
"src": {},
}),
)
.await;
- fs.add_linked_worktree_for_repo(
- Path::new("/project/.git"),
- false,
- git::repository::Worktree {
+
+ fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| {
+ state.worktrees.push(git::repository::Worktree {
path: std::path::PathBuf::from("/wt-feature-a"),
ref_name: Some("refs/heads/feature-a".into()),
sha: "aaa".into(),
is_main: false,
- },
- )
- .await;
- fs.add_linked_worktree_for_repo(
- Path::new("/project/.git"),
- false,
- git::repository::Worktree {
+ });
+ state.worktrees.push(git::repository::Worktree {
path: std::path::PathBuf::from("/wt-feature-b"),
ref_name: Some("refs/heads/feature-b".into()),
sha: "bbb".into(),
is_main: false,
- },
- )
- .await;
+ });
+ })
+ .unwrap();
cx.update(|cx| <dyn fs::Fs>::set_global(fs.clone(), cx));
@@ -2842,7 +2884,18 @@ async fn test_multi_worktree_thread_shows_multiple_chips(cx: &mut TestAppContext
fs.insert_tree(
"/project_a",
serde_json::json!({
- ".git": {},
+ ".git": {
+ "worktrees": {
+ "olivetti": {
+ "commondir": "../../",
+ "HEAD": "ref: refs/heads/olivetti",
+ },
+ "selectric": {
+ "commondir": "../../",
+ "HEAD": "ref: refs/heads/selectric",
+ },
+ },
+ },
"src": {},
}),
)
@@ -2850,28 +2903,56 @@ async fn test_multi_worktree_thread_shows_multiple_chips(cx: &mut TestAppContext
fs.insert_tree(
"/project_b",
serde_json::json!({
- ".git": {},
+ ".git": {
+ "worktrees": {
+ "olivetti": {
+ "commondir": "../../",
+ "HEAD": "ref: refs/heads/olivetti",
+ },
+ "selectric": {
+ "commondir": "../../",
+ "HEAD": "ref: refs/heads/selectric",
+ },
+ },
+ },
"src": {},
}),
)
.await;
// Worktree checkouts.
+ for (repo, branch) in &[
+ ("project_a", "olivetti"),
+ ("project_a", "selectric"),
+ ("project_b", "olivetti"),
+ ("project_b", "selectric"),
+ ] {
+ let worktree_path = format!("/worktrees/{repo}/{branch}/{repo}");
+ let gitdir = format!("gitdir: /{repo}/.git/worktrees/{branch}");
+ fs.insert_tree(
+ &worktree_path,
+ serde_json::json!({
+ ".git": gitdir,
+ "src": {},
+ }),
+ )
+ .await;
+ }
+
+ // Register linked worktrees.
for repo in &["project_a", "project_b"] {
let git_path = format!("/{repo}/.git");
- for branch in &["olivetti", "selectric"] {
- fs.add_linked_worktree_for_repo(
- Path::new(&git_path),
- false,
- git::repository::Worktree {
+ fs.with_git_state(std::path::Path::new(&git_path), false, |state| {
+ for branch in &["olivetti", "selectric"] {
+ state.worktrees.push(git::repository::Worktree {
path: std::path::PathBuf::from(format!("/worktrees/{repo}/{branch}/{repo}")),
ref_name: Some(format!("refs/heads/{branch}").into()),
sha: "aaa".into(),
is_main: false,
- },
- )
- .await;
- }
+ });
+ }
+ })
+ .unwrap();
}
cx.update(|cx| <dyn fs::Fs>::set_global(fs.clone(), cx));
@@ -2924,7 +3005,14 @@ async fn test_same_named_worktree_chips_are_deduplicated(cx: &mut TestAppContext
fs.insert_tree(
"/project_a",
serde_json::json!({
- ".git": {},
+ ".git": {
+ "worktrees": {
+ "olivetti": {
+ "commondir": "../../",
+ "HEAD": "ref: refs/heads/olivetti",
+ },
+ },
+ },
"src": {},
}),
)
@@ -2932,25 +3020,41 @@ async fn test_same_named_worktree_chips_are_deduplicated(cx: &mut TestAppContext
fs.insert_tree(
"/project_b",
serde_json::json!({
- ".git": {},
+ ".git": {
+ "worktrees": {
+ "olivetti": {
+ "commondir": "../../",
+ "HEAD": "ref: refs/heads/olivetti",
+ },
+ },
+ },
"src": {},
}),
)
.await;
for repo in &["project_a", "project_b"] {
+ let worktree_path = format!("/worktrees/{repo}/olivetti/{repo}");
+ let gitdir = format!("gitdir: /{repo}/.git/worktrees/olivetti");
+ fs.insert_tree(
+ &worktree_path,
+ serde_json::json!({
+ ".git": gitdir,
+ "src": {},
+ }),
+ )
+ .await;
+
let git_path = format!("/{repo}/.git");
- fs.add_linked_worktree_for_repo(
- Path::new(&git_path),
- false,
- git::repository::Worktree {
+ fs.with_git_state(std::path::Path::new(&git_path), false, |state| {
+ state.worktrees.push(git::repository::Worktree {
path: std::path::PathBuf::from(format!("/worktrees/{repo}/olivetti/{repo}")),
ref_name: Some("refs/heads/olivetti".into()),
sha: "aaa".into(),
is_main: false,
- },
- )
- .await;
+ });
+ })
+ .unwrap();
}
cx.update(|cx| <dyn fs::Fs>::set_global(fs.clone(), cx));
@@ -3010,24 +3114,38 @@ async fn test_absorbed_worktree_running_thread_shows_live_status(cx: &mut TestAp
fs.insert_tree(
"/project",
serde_json::json!({
- ".git": {},
+ ".git": {
+ "worktrees": {
+ "feature-a": {
+ "commondir": "../../",
+ "HEAD": "ref: refs/heads/feature-a",
+ },
+ },
+ },
"src": {},
}),
)
.await;
// Worktree checkout pointing back to the main repo.
- fs.add_linked_worktree_for_repo(
- Path::new("/project/.git"),
- false,
- git::repository::Worktree {
+ fs.insert_tree(
+ "/wt-feature-a",
+ serde_json::json!({
+ ".git": "gitdir: /project/.git/worktrees/feature-a",
+ "src": {},
+ }),
+ )
+ .await;
+
+ fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| {
+ state.worktrees.push(git::repository::Worktree {
path: std::path::PathBuf::from("/wt-feature-a"),
ref_name: Some("refs/heads/feature-a".into()),
sha: "aaa".into(),
is_main: false,
- },
- )
- .await;
+ });
+ })
+ .unwrap();
cx.update(|cx| <dyn fs::Fs>::set_global(fs.clone(), cx));
@@ -3113,23 +3231,37 @@ async fn test_absorbed_worktree_completion_triggers_notification(cx: &mut TestAp
fs.insert_tree(
"/project",
serde_json::json!({
- ".git": {},
+ ".git": {
+ "worktrees": {
+ "feature-a": {
+ "commondir": "../../",
+ "HEAD": "ref: refs/heads/feature-a",
+ },
+ },
+ },
+ "src": {},
+ }),
+ )
+ .await;
+
+ fs.insert_tree(
+ "/wt-feature-a",
+ serde_json::json!({
+ ".git": "gitdir: /project/.git/worktrees/feature-a",
"src": {},
}),
)
.await;
- fs.add_linked_worktree_for_repo(
- Path::new("/project/.git"),
- false,
- git::repository::Worktree {
+ fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| {
+ state.worktrees.push(git::repository::Worktree {
path: std::path::PathBuf::from("/wt-feature-a"),
ref_name: Some("refs/heads/feature-a".into()),
sha: "aaa".into(),
is_main: false,
- },
- )
- .await;
+ });
+ })
+ .unwrap();
cx.update(|cx| <dyn fs::Fs>::set_global(fs.clone(), cx));
@@ -3206,23 +3338,37 @@ async fn test_clicking_worktree_thread_opens_workspace_when_none_exists(cx: &mut
fs.insert_tree(
"/project",
serde_json::json!({
- ".git": {},
+ ".git": {
+ "worktrees": {
+ "feature-a": {
+ "commondir": "../../",
+ "HEAD": "ref: refs/heads/feature-a",
+ },
+ },
+ },
+ "src": {},
+ }),
+ )
+ .await;
+
+ fs.insert_tree(
+ "/wt-feature-a",
+ serde_json::json!({
+ ".git": "gitdir: /project/.git/worktrees/feature-a",
"src": {},
}),
)
.await;
- fs.add_linked_worktree_for_repo(
- Path::new("/project/.git"),
- false,
- git::repository::Worktree {
+ fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| {
+ state.worktrees.push(git::repository::Worktree {
path: std::path::PathBuf::from("/wt-feature-a"),
ref_name: Some("refs/heads/feature-a".into()),
sha: "aaa".into(),
is_main: false,
- },
- )
- .await;
+ });
+ })
+ .unwrap();
cx.update(|cx| <dyn fs::Fs>::set_global(fs.clone(), cx));
@@ -3298,23 +3444,37 @@ async fn test_clicking_worktree_thread_does_not_briefly_render_as_separate_proje
fs.insert_tree(
"/project",
serde_json::json!({
- ".git": {},
+ ".git": {
+ "worktrees": {
+ "feature-a": {
+ "commondir": "../../",
+ "HEAD": "ref: refs/heads/feature-a",
+ },
+ },
+ },
+ "src": {},
+ }),
+ )
+ .await;
+
+ fs.insert_tree(
+ "/wt-feature-a",
+ serde_json::json!({
+ ".git": "gitdir: /project/.git/worktrees/feature-a",
"src": {},
}),
)
.await;
- fs.add_linked_worktree_for_repo(
- Path::new("/project/.git"),
- false,
- git::repository::Worktree {
+ fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| {
+ state.worktrees.push(git::repository::Worktree {
path: std::path::PathBuf::from("/wt-feature-a"),
ref_name: Some("refs/heads/feature-a".into()),
sha: "aaa".into(),
is_main: false,
- },
- )
- .await;
+ });
+ })
+ .unwrap();
cx.update(|cx| <dyn fs::Fs>::set_global(fs.clone(), cx));
@@ -3435,23 +3595,37 @@ async fn test_clicking_absorbed_worktree_thread_activates_worktree_workspace(
fs.insert_tree(
"/project",
serde_json::json!({
- ".git": {},
+ ".git": {
+ "worktrees": {
+ "feature-a": {
+ "commondir": "../../",
+ "HEAD": "ref: refs/heads/feature-a",
+ },
+ },
+ },
"src": {},
}),
)
.await;
- fs.add_linked_worktree_for_repo(
- Path::new("/project/.git"),
- false,
- git::repository::Worktree {
+ fs.insert_tree(
+ "/wt-feature-a",
+ serde_json::json!({
+ ".git": "gitdir: /project/.git/worktrees/feature-a",
+ "src": {},
+ }),
+ )
+ .await;
+
+ fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| {
+ state.worktrees.push(git::repository::Worktree {
path: std::path::PathBuf::from("/wt-feature-a"),
ref_name: Some("refs/heads/feature-a".into()),
sha: "aaa".into(),
is_main: false,
- },
- )
- .await;
+ });
+ })
+ .unwrap();
cx.update(|cx| <dyn fs::Fs>::set_global(fs.clone(), cx));
@@ -3579,7 +3753,6 @@ async fn test_activate_archived_thread_with_saved_paths_activates_matching_works
updated_at: Utc::now(),
created_at: None,
folder_paths: PathList::new(&[PathBuf::from("/project-b")]),
- main_worktree_paths: PathList::default(),
archived: false,
},
window,
@@ -3642,7 +3815,6 @@ async fn test_activate_archived_thread_cwd_fallback_with_matching_workspace(
updated_at: Utc::now(),
created_at: None,
folder_paths: PathList::new(&[std::path::PathBuf::from("/project-b")]),
- main_worktree_paths: PathList::default(),
archived: false,
},
window,
@@ -3705,7 +3877,6 @@ async fn test_activate_archived_thread_no_paths_no_cwd_uses_active_workspace(
updated_at: Utc::now(),
created_at: None,
folder_paths: PathList::default(),
- main_worktree_paths: PathList::default(),
archived: false,
},
window,
@@ -3760,7 +3931,6 @@ async fn test_activate_archived_thread_saved_paths_opens_new_workspace(cx: &mut
updated_at: Utc::now(),
created_at: None,
folder_paths: path_list_b,
- main_worktree_paths: PathList::default(),
archived: false,
},
window,
@@ -3810,7 +3980,6 @@ async fn test_activate_archived_thread_reuses_workspace_in_another_window(cx: &m
updated_at: Utc::now(),
created_at: None,
folder_paths: PathList::new(&[PathBuf::from("/project-b")]),
- main_worktree_paths: PathList::default(),
archived: false,
},
window,
@@ -3887,7 +4056,6 @@ async fn test_activate_archived_thread_reuses_workspace_in_another_window_with_t
updated_at: Utc::now(),
created_at: None,
folder_paths: PathList::new(&[PathBuf::from("/project-b")]),
- main_worktree_paths: PathList::default(),
archived: false,
},
window,
@@ -3963,7 +4131,6 @@ async fn test_activate_archived_thread_prefers_current_window_for_matching_paths
updated_at: Utc::now(),
created_at: None,
folder_paths: PathList::new(&[PathBuf::from("/project-a")]),
- main_worktree_paths: PathList::default(),
archived: false,
},
window,
@@ -4023,23 +4190,37 @@ async fn test_archive_thread_uses_next_threads_own_workspace(cx: &mut TestAppCon
fs.insert_tree(
"/project",
serde_json::json!({
- ".git": {},
+ ".git": {
+ "worktrees": {
+ "feature-a": {
+ "commondir": "../../",
+ "HEAD": "ref: refs/heads/feature-a",
+ },
+ },
+ },
+ "src": {},
+ }),
+ )
+ .await;
+
+ fs.insert_tree(
+ "/wt-feature-a",
+ serde_json::json!({
+ ".git": "gitdir: /project/.git/worktrees/feature-a",
"src": {},
}),
)
.await;
- fs.add_linked_worktree_for_repo(
- Path::new("/project/.git"),
- false,
- git::repository::Worktree {
+ fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| {
+ state.worktrees.push(git::repository::Worktree {
path: std::path::PathBuf::from("/wt-feature-a"),
ref_name: Some("refs/heads/feature-a".into()),
sha: "aaa".into(),
is_main: false,
- },
- )
- .await;
+ });
+ })
+ .unwrap();
cx.update(|cx| <dyn fs::Fs>::set_global(fs.clone(), cx));
@@ -4173,7 +4354,22 @@ async fn test_linked_worktree_threads_not_duplicated_across_groups(cx: &mut Test
fs.insert_tree(
"/project",
serde_json::json!({
- ".git": {},
+ ".git": {
+ "worktrees": {
+ "feature-a": {
+ "commondir": "../../",
+ "HEAD": "ref: refs/heads/feature-a",
+ },
+ },
+ },
+ "src": {},
+ }),
+ )
+ .await;
+ fs.insert_tree(
+ "/wt-feature-a",
+ serde_json::json!({
+ ".git": "gitdir: /project/.git/worktrees/feature-a",
"src": {},
}),
)
@@ -4188,17 +4384,15 @@ async fn test_linked_worktree_threads_not_duplicated_across_groups(cx: &mut Test
.await;
// Register the linked worktree in the main repo.
- fs.add_linked_worktree_for_repo(
- Path::new("/project/.git"),
- false,
- git::repository::Worktree {
+ fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| {
+ state.worktrees.push(git::repository::Worktree {
path: std::path::PathBuf::from("/wt-feature-a"),
ref_name: Some("refs/heads/feature-a".into()),
sha: "aaa".into(),
is_main: false,
- },
- )
- .await;
+ });
+ })
+ .unwrap();
cx.update(|cx| <dyn fs::Fs>::set_global(fs.clone(), cx));
@@ -4704,86 +4898,422 @@ async fn test_archived_threads_excluded_from_sidebar_entries(cx: &mut TestAppCon
}
#[gpui::test]
-async fn test_linked_worktree_workspace_shows_main_worktree_threads(cx: &mut TestAppContext) {
- // When only a linked worktree workspace is open (not the main repo),
- // threads saved against the main repo should still appear in the sidebar.
- init_test(cx);
+async fn test_archive_and_restore_single_worktree(cx: &mut TestAppContext) {
+ // Test the restore/unarchive flow for a git worktree. We set up a main
+ // repo with an archived worktree record (simulating a prior archive) and
+ // then trigger `activate_archived_thread` to verify:
+ // 1. The worktree directory is recreated.
+ // 2. The archived worktree DB record is cleaned up.
+ // 3. The thread is unarchived in the metadata store.
+ agent_ui::test_support::init_test(cx);
+ cx.update(|cx| {
+ cx.update_flags(false, vec!["agent-v2".into()]);
+ ThreadStore::init_global(cx);
+ ThreadMetadataStore::init_global(cx);
+ language_model::LanguageModelRegistry::test(cx);
+ prompt_store::init(cx);
+ });
+
let fs = FakeFs::new(cx.executor());
- // Create the main repo with a linked worktree.
+ // Set up a main repo at /project. The linked worktree at /wt-feature does
+ // NOT exist on disk — it was deleted during the archive step.
fs.insert_tree(
"/project",
serde_json::json!({
- ".git": {
- "worktrees": {
- "feature-a": {
- "commondir": "../../",
- "HEAD": "ref: refs/heads/feature-a",
- },
- },
- },
- "src": {},
+ ".git": {},
+ "src": { "main.rs": "fn main() {}" },
}),
)
.await;
+ let wip_commit_hash = "fake-wip-sha-123";
+
+ cx.update(|cx| <dyn fs::Fs>::set_global(fs.clone(), cx));
+
+ let main_project = project::Project::test(fs.clone(), ["/project".as_ref()], cx).await;
+ main_project
+ .update(cx, |p, cx| p.git_scans_complete(cx))
+ .await;
+
+ let (multi_workspace, cx) =
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(main_project.clone(), window, cx));
+
+ let sidebar = setup_sidebar(&multi_workspace, cx);
+
+ let main_workspace = multi_workspace.read_with(cx, |mw, _| mw.workspaces()[0].clone());
+ let _main_panel = add_agent_panel(&main_workspace, cx);
+
+ // Create a thread and immediately archive it.
+ let session_id = acp::SessionId::new(Arc::from("wt-thread"));
+ save_thread_metadata(
+ session_id.clone(),
+ "Worktree Thread".into(),
+ chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(),
+ None,
+ PathList::new(&[std::path::PathBuf::from("/wt-feature")]),
+ cx,
+ );
+
+ let store = cx.update(|_, cx| ThreadMetadataStore::global(cx));
+ cx.update(|_, cx| {
+ store.update(cx, |store, cx| store.archive(&session_id, cx));
+ });
+ cx.run_until_parked();
+
+ // Create the archived worktree DB record (simulates what the archive flow
+ // would have written after making a WIP commit) and link it to the thread.
+ let archived_id = store
+ .update_in(cx, |store, _window, cx| {
+ store.create_archived_worktree(
+ "/wt-feature".to_string(),
+ "/project".to_string(),
+ Some("feature".to_string()),
+ wip_commit_hash.to_string(),
+ cx,
+ )
+ })
+ .await
+ .expect("creating archived worktree record should succeed");
+
+ store
+ .update_in(cx, |store, _window, cx| {
+ store.link_thread_to_archived_worktree(session_id.0.to_string(), archived_id, cx)
+ })
+ .await
+ .expect("linking thread to archived worktree should succeed");
+
+ // Verify pre-conditions: the worktree directory does not exist and the
+ // DB record is present.
+ assert!(
+ !fs.directories(false)
+ .iter()
+ .any(|p| p == std::path::Path::new("/wt-feature")),
+ "worktree directory should not exist before restore"
+ );
+
+ let archived_rows = store
+ .update_in(cx, |store, _window, cx| {
+ store.get_archived_worktrees_for_thread(session_id.0.to_string(), cx)
+ })
+ .await
+ .expect("DB query should succeed");
+ assert_eq!(
+ archived_rows.len(),
+ 1,
+ "expected exactly one archived worktree record before restore"
+ );
+ let archived_row = &archived_rows[0];
+ assert_eq!(archived_row.id, archived_id);
+ assert_eq!(archived_row.commit_hash, wip_commit_hash);
+ assert_eq!(archived_row.branch_name.as_deref(), Some("feature"));
+
+ // Now seed the git ref using the actual archived worktree ID.
+ let expected_ref_name = archived_worktree_ref_name(archived_id);
+ fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| {
+ state
+ .refs
+ .insert(expected_ref_name.clone(), wip_commit_hash.into());
+ })
+ .unwrap();
+
+ // Thread should be archived.
+ cx.update(|_, cx| {
+ let store = ThreadMetadataStore::global(cx);
+ let archived: Vec<_> = store.read(cx).archived_entries().collect();
+ assert_eq!(archived.len(), 1);
+ assert_eq!(archived[0].session_id.0.as_ref(), "wt-thread");
+ });
+
+ // Restore / Unarchive
+ let metadata = cx.update(|_, cx| {
+ let store = ThreadMetadataStore::global(cx);
+ store
+ .read(cx)
+ .archived_entries()
+ .find(|e| e.session_id.0.as_ref() == "wt-thread")
+ .cloned()
+ .expect("expected to find archived thread metadata")
+ });
+
+ sidebar.update_in(cx, |sidebar, window, cx| {
+ sidebar.activate_archived_thread(metadata, window, cx);
+ });
+ // The restore flow involves multiple async steps: worktree creation,
+ // project scan, reset, branch switch, DB cleanup.
+ cx.run_until_parked();
+
+ // NOTE: The FakeGitRepository::create_worktree implementation does not
+ // create a `.git` gitfile inside the worktree directory, so the project
+ // scanner does not discover a Repository entity for the restored worktree.
+ // This means the two-reset staging-restoration logic (mixed reset HEAD~,
+ // then soft reset HEAD~) is not exercised by this test. An integration
+ // test with a real git repo would be needed to cover that path.
+
+ // 1. The thread should no longer be archived.
+ cx.update(|_, cx| {
+ let store = ThreadMetadataStore::global(cx);
+ let archived: Vec<_> = store.read(cx).archived_entries().collect();
+ assert!(
+ archived.is_empty(),
+ "expected no archived threads after restore, got: {archived:?}"
+ );
+ });
+
+ // 2. The worktree directory should exist again on disk (recreated via
+ // create_worktree_detached).
+ assert!(
+ fs.directories(false)
+ .iter()
+ .any(|p| p == std::path::Path::new("/wt-feature")),
+ "expected worktree directory to be recreated after restore, dirs: {:?}",
+ fs.directories(false)
+ );
+
+ // 3. The archived worktree DB record should be cleaned up.
+ let archived_rows_after = store
+ .update_in(cx, |store, _window, cx| {
+ store.get_archived_worktrees_for_thread(session_id.0.to_string(), cx)
+ })
+ .await
+ .expect("DB query should succeed");
+ assert!(
+ archived_rows_after.is_empty(),
+ "expected archived worktree records to be empty after restore"
+ );
+
+ // 4. The git ref should have been cleaned up from the main repo.
+ fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| {
+ assert!(
+ !state.refs.contains_key(&expected_ref_name),
+ "expected git ref to be deleted after restore, refs: {:?}",
+ state.refs
+ );
+ })
+ .unwrap();
+}
+
+#[gpui::test]
+async fn test_archive_two_threads_same_path_then_restore_first(cx: &mut TestAppContext) {
+ // Regression test: archiving two different threads that use the same
+ // worktree path should create independent archived worktree records.
+ // Unarchiving the first thread should restore its own record without
+ // losing the second thread's record.
+ agent_ui::test_support::init_test(cx);
+ cx.update(|cx| {
+ cx.update_flags(false, vec!["agent-v2".into()]);
+ ThreadStore::init_global(cx);
+ ThreadMetadataStore::init_global(cx);
+ language_model::LanguageModelRegistry::test(cx);
+ prompt_store::init(cx);
+ });
+
+ let fs = FakeFs::new(cx.executor());
+
fs.insert_tree(
- "/wt-feature-a",
+ "/project",
serde_json::json!({
- ".git": "gitdir: /project/.git/worktrees/feature-a",
- "src": {},
+ ".git": {},
+ "src": { "main.rs": "fn main() {}" },
}),
)
.await;
- fs.add_linked_worktree_for_repo(
- std::path::Path::new("/project/.git"),
- false,
- git::repository::Worktree {
- path: std::path::PathBuf::from("/wt-feature-a"),
- ref_name: Some("refs/heads/feature-a".into()),
- sha: "abc".into(),
- is_main: false,
- },
- )
- .await;
-
cx.update(|cx| <dyn fs::Fs>::set_global(fs.clone(), cx));
- // Only open the linked worktree as a workspace — NOT the main repo.
- let worktree_project = project::Project::test(fs.clone(), ["/wt-feature-a".as_ref()], cx).await;
- worktree_project
+ let main_project = project::Project::test(fs.clone(), ["/project".as_ref()], cx).await;
+ main_project
.update(cx, |p, cx| p.git_scans_complete(cx))
.await;
- let (multi_workspace, cx) = cx.add_window_view(|window, cx| {
- MultiWorkspace::test_new(worktree_project.clone(), window, cx)
- });
+ let (multi_workspace, cx) =
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(main_project.clone(), window, cx));
+
let sidebar = setup_sidebar(&multi_workspace, cx);
- // Save a thread against the MAIN repo path.
- let main_paths = PathList::new(&[std::path::PathBuf::from("/project")]);
- save_named_thread_metadata("main-thread", "Main Repo Thread", &main_paths, cx).await;
+ let main_workspace = multi_workspace.read_with(cx, |mw, _| mw.workspaces()[0].clone());
+ let _main_panel = add_agent_panel(&main_workspace, cx);
- // Save a thread against the linked worktree path.
- let wt_paths = PathList::new(&[std::path::PathBuf::from("/wt-feature-a")]);
- save_named_thread_metadata("wt-thread", "Worktree Thread", &wt_paths, cx).await;
+ let store = cx.update(|_, cx| ThreadMetadataStore::global(cx));
- multi_workspace.update_in(cx, |_, _window, cx| cx.notify());
+ // Thread A: archive with worktree at /wt-feature
+ let session_a = acp::SessionId::new(Arc::from("thread-a"));
+ save_thread_metadata(
+ session_a.clone(),
+ "Thread A".into(),
+ chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(),
+ None,
+ PathList::new(&[std::path::PathBuf::from("/wt-feature")]),
+ cx,
+ );
+ cx.update(|_, cx| {
+ store.update(cx, |store, cx| store.archive(&session_a, cx));
+ });
cx.run_until_parked();
- // Both threads should be visible: the worktree thread by direct lookup,
- // and the main repo thread because the workspace is a linked worktree
- // and we also query the main repo path.
- let entries = visible_entries_as_strings(&sidebar, cx);
- assert!(
- entries.iter().any(|e| e.contains("Main Repo Thread")),
- "expected main repo thread to be visible in linked worktree workspace, got: {entries:?}"
+ let id_a = store
+ .update_in(cx, |store, _window, cx| {
+ store.create_archived_worktree(
+ "/wt-feature".to_string(),
+ "/project".to_string(),
+ Some("feature-a".to_string()),
+ "sha-aaa".to_string(),
+ cx,
+ )
+ })
+ .await
+ .expect("create archived worktree A");
+ store
+ .update_in(cx, |store, _window, cx| {
+ store.link_thread_to_archived_worktree(session_a.0.to_string(), id_a, cx)
+ })
+ .await
+ .expect("link thread A");
+
+ // Seed a git ref for thread A's archive.
+ let ref_a = archived_worktree_ref_name(id_a);
+ fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| {
+ state.refs.insert(ref_a.clone(), "sha-aaa".into());
+ })
+ .unwrap();
+
+ // Thread B: archive with the SAME worktree path
+ let session_b = acp::SessionId::new(Arc::from("thread-b"));
+ save_thread_metadata(
+ session_b.clone(),
+ "Thread B".into(),
+ chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 2, 1, 0, 0, 0).unwrap(),
+ None,
+ PathList::new(&[std::path::PathBuf::from("/wt-feature")]),
+ cx,
);
+ cx.update(|_, cx| {
+ store.update(cx, |store, cx| store.archive(&session_b, cx));
+ });
+ cx.run_until_parked();
+
+ let id_b = store
+ .update_in(cx, |store, _window, cx| {
+ store.create_archived_worktree(
+ "/wt-feature".to_string(),
+ "/project".to_string(),
+ Some("feature-b".to_string()),
+ "sha-bbb".to_string(),
+ cx,
+ )
+ })
+ .await
+ .expect("create archived worktree B");
+ store
+ .update_in(cx, |store, _window, cx| {
+ store.link_thread_to_archived_worktree(session_b.0.to_string(), id_b, cx)
+ })
+ .await
+ .expect("link thread B");
+
+ let ref_b = archived_worktree_ref_name(id_b);
+ fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| {
+ state.refs.insert(ref_b.clone(), "sha-bbb".into());
+ })
+ .unwrap();
+
+ // Both threads should be archived, with independent IDs.
+ assert_ne!(id_a, id_b, "each archive should get its own ID");
+
+ // Verify both records exist independently.
+ let rows_a = store
+ .update_in(cx, |store, _window, cx| {
+ store.get_archived_worktrees_for_thread(session_a.0.to_string(), cx)
+ })
+ .await
+ .unwrap();
+ assert_eq!(rows_a.len(), 1);
+ assert_eq!(rows_a[0].commit_hash, "sha-aaa");
+
+ let rows_b = store
+ .update_in(cx, |store, _window, cx| {
+ store.get_archived_worktrees_for_thread(session_b.0.to_string(), cx)
+ })
+ .await
+ .unwrap();
+ assert_eq!(rows_b.len(), 1);
+ assert_eq!(rows_b[0].commit_hash, "sha-bbb");
+
+ // Unarchive Thread A
+ let metadata_a = cx.update(|_, cx| {
+ let store = ThreadMetadataStore::global(cx);
+ store
+ .read(cx)
+ .archived_entries()
+ .find(|e| e.session_id.0.as_ref() == "thread-a")
+ .cloned()
+ .expect("expected to find archived thread A")
+ });
+
+ sidebar.update_in(cx, |sidebar, window, cx| {
+ sidebar.activate_archived_thread(metadata_a, window, cx);
+ });
+ cx.run_until_parked();
+
+ // Thread A should no longer be archived.
+ cx.update(|_, cx| {
+ let store = ThreadMetadataStore::global(cx);
+ let archived_ids: Vec<_> = store
+ .read(cx)
+ .archived_entries()
+ .map(|e| e.session_id.0.to_string())
+ .collect();
+ assert!(
+ !archived_ids.contains(&"thread-a".to_string()),
+ "thread A should be unarchived, but archived list is: {archived_ids:?}"
+ );
+ });
+
+ // Thread A's archived worktree record should be cleaned up.
+ let rows_a_after = store
+ .update_in(cx, |store, _window, cx| {
+ store.get_archived_worktrees_for_thread(session_a.0.to_string(), cx)
+ })
+ .await
+ .unwrap();
assert!(
- entries.iter().any(|e| e.contains("Worktree Thread")),
- "expected worktree thread to be visible, got: {entries:?}"
+ rows_a_after.is_empty(),
+ "thread A's archived worktree should be cleaned up after restore"
);
+
+ // Thread A's git ref should be cleaned up.
+ fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| {
+ assert!(
+ !state.refs.contains_key(&ref_a),
+ "thread A's ref should be deleted, refs: {:?}",
+ state.refs
+ );
+ })
+ .unwrap();
+
+ // Thread B's record should still be intact.
+ let rows_b_after = store
+ .update_in(cx, |store, _window, cx| {
+ store.get_archived_worktrees_for_thread(session_b.0.to_string(), cx)
+ })
+ .await
+ .unwrap();
+ assert_eq!(
+ rows_b_after.len(),
+ 1,
+ "thread B's archived worktree should still exist"
+ );
+ assert_eq!(rows_b_after[0].commit_hash, "sha-bbb");
+
+ // Thread B's git ref should still be intact.
+ fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| {
+ assert!(
+ state.refs.contains_key(&ref_b),
+ "thread B's ref should still exist, refs: {:?}",
+ state.refs
+ );
+ })
+ .unwrap();
}
mod property_test {