Implement worktree archive persistence and restore

Richard Feldman created

Add the ability to archive and restore git worktrees when threads are
archived, preserving both staged and unstaged state across the cycle.

Key changes:

- Store both staged and unstaged commit SHAs in archived worktree DB
  records instead of a single hash, eliminating fragile HEAD~N arithmetic
  for rollback and restore operations
- Await rollback DB operations and surface errors instead of
  fire-and-forget with .detach()
- Handle multiple worktrees in restore with targeted path replacement
  in complete_worktree_restore, preserving unrelated paths in multi-root
  threads
- Guard cleanup_empty_workspaces against dropped workspace entities
- Add cancel button for the restore spinner (visible on hover)
- Show toast to user on worktree restore failure
- Deserialize persisted project_group_keys when restoring windows
- Add deterministic tests for two-SHA round-trip, single/multiple path
  replacement, and multi-worktree archive scenarios

Change summary

Cargo.lock                                     |    1 
crates/agent_ui/src/agent_ui.rs                |    1 
crates/agent_ui/src/thread_import.rs           |    1 
crates/agent_ui/src/thread_metadata_store.rs   |  772 +++++++++++++
crates/agent_ui/src/thread_worktree_archive.rs | 1167 ++++++++++++++++++++
crates/fs/src/fake_git_repo.rs                 |  161 ++
crates/fs/tests/integration/fake_git_repo.rs   |   12 
crates/git/src/repository.rs                   |   84 +
crates/git_ui/src/commit_modal.rs              |    1 
crates/git_ui/src/git_panel.rs                 |    8 
crates/project/src/git_store.rs                |   86 +
crates/project/src/project.rs                  |   28 
crates/proto/proto/git.proto                   |    1 
crates/sidebar/Cargo.toml                      |    1 
crates/sidebar/src/sidebar.rs                  |  168 ++
crates/sidebar/src/sidebar_tests.rs            |   16 
crates/ui/src/components/ai/thread_item.rs     |   59 
crates/workspace/src/multi_workspace.rs        |    4 
crates/workspace/src/workspace.rs              |   24 
19 files changed, 2,532 insertions(+), 63 deletions(-)

Detailed changes

Cargo.lock 🔗

@@ -15877,6 +15877,7 @@ dependencies = [
  "git",
  "gpui",
  "language_model",
+ "log",
  "menu",
  "platform_title_bar",
  "pretty_assertions",

crates/agent_ui/src/agent_ui.rs 🔗

@@ -32,6 +32,7 @@ mod thread_history;
 mod thread_history_view;
 mod thread_import;
 pub mod thread_metadata_store;
+pub mod thread_worktree_archive;
 pub mod threads_archive_view;
 mod ui;
 

crates/agent_ui/src/thread_import.rs 🔗

@@ -502,6 +502,7 @@ fn collect_importable_threads(
                 folder_paths,
                 main_worktree_paths: PathList::default(),
                 archived: true,
+                pending_worktree_restore: None,
             });
         }
     }

crates/agent_ui/src/thread_metadata_store.rs 🔗

@@ -1,4 +1,7 @@
-use std::{path::Path, sync::Arc};
+use std::{
+    path::{Path, PathBuf},
+    sync::Arc,
+};
 
 use acp_thread::AcpThreadEvent;
 use agent::{ThreadStore, ZED_AGENT_ID};
@@ -68,6 +71,7 @@ fn migrate_thread_metadata(cx: &mut App) {
                         folder_paths: entry.folder_paths,
                         main_worktree_paths: PathList::default(),
                         archived: true,
+                        pending_worktree_restore: None,
                     })
                 })
                 .collect::<Vec<_>>()
@@ -129,6 +133,10 @@ pub struct ThreadMetadata {
     pub folder_paths: PathList,
     pub main_worktree_paths: PathList,
     pub archived: bool,
+    /// When set, the thread's original worktree is being restored in the background.
+    /// The PathBuf is the main repo path shown temporarily while restoration is pending.
+    /// This is runtime-only state — not persisted to the database.
+    pub pending_worktree_restore: Option<PathBuf>,
 }
 
 impl From<&ThreadMetadata> for acp_thread::AgentSessionInfo {
@@ -144,6 +152,25 @@ impl From<&ThreadMetadata> for acp_thread::AgentSessionInfo {
     }
 }
 
+/// Record of a git worktree that was archived (deleted from disk) when its
+/// last thread was archived.
+pub struct ArchivedGitWorktree {
+    /// Auto-incrementing primary key.
+    pub id: i64,
+    /// Absolute path to the worktree directory before deletion.
+    pub worktree_path: PathBuf,
+    /// Absolute path of the main repository that owned this worktree.
+    pub main_repo_path: PathBuf,
+    /// Branch checked out at archive time. None if detached HEAD.
+    pub branch_name: Option<String>,
+    /// SHA of the commit capturing the staged state at archive time.
+    pub staged_commit_hash: String,
+    /// SHA of the commit capturing the unstaged state at archive time.
+    pub unstaged_commit_hash: String,
+    /// Whether this worktree has been restored.
+    pub restored: bool,
+}
+
 /// The store holds all metadata needed to show threads in the sidebar/the archive.
 ///
 /// Automatically listens to AcpThread events and updates metadata if it has changed.
@@ -388,6 +415,123 @@ impl ThreadMetadataStore {
         self.update_archived(session_id, false, cx);
     }
 
+    pub fn set_pending_worktree_restore(
+        &mut self,
+        session_id: &acp::SessionId,
+        main_repo_path: Option<PathBuf>,
+        cx: &mut Context<Self>,
+    ) {
+        if let Some(thread) = self.threads.get_mut(session_id) {
+            thread.pending_worktree_restore = main_repo_path;
+            cx.notify();
+        }
+    }
+
+    pub fn complete_worktree_restore(
+        &mut self,
+        session_id: &acp::SessionId,
+        path_replacements: &[(PathBuf, PathBuf)],
+        cx: &mut Context<Self>,
+    ) {
+        if let Some(thread) = self.threads.get(session_id).cloned() {
+            let mut paths: Vec<PathBuf> = thread.folder_paths.paths().to_vec();
+            for (old_path, new_path) in path_replacements {
+                if let Some(pos) = paths.iter().position(|p| p == old_path) {
+                    paths[pos] = new_path.clone();
+                }
+            }
+            let new_folder_paths = PathList::new(&paths);
+            self.save_internal(ThreadMetadata {
+                pending_worktree_restore: None,
+                folder_paths: new_folder_paths,
+                ..thread
+            });
+            cx.notify();
+        }
+    }
+
+    pub fn create_archived_worktree(
+        &self,
+        worktree_path: &str,
+        main_repo_path: &str,
+        branch_name: Option<&str>,
+        staged_commit_hash: &str,
+        unstaged_commit_hash: &str,
+        cx: &App,
+    ) -> Task<anyhow::Result<i64>> {
+        let db = self.db.clone();
+        let worktree_path = worktree_path.to_string();
+        let main_repo_path = main_repo_path.to_string();
+        let branch_name = branch_name.map(|s| s.to_string());
+        let staged_commit_hash = staged_commit_hash.to_string();
+        let unstaged_commit_hash = unstaged_commit_hash.to_string();
+        cx.background_spawn(async move {
+            db.create_archived_worktree(
+                &worktree_path,
+                &main_repo_path,
+                branch_name.as_deref(),
+                &staged_commit_hash,
+                &unstaged_commit_hash,
+            )
+            .await
+        })
+    }
+
+    pub fn link_thread_to_archived_worktree(
+        &self,
+        session_id: &str,
+        archived_worktree_id: i64,
+        cx: &App,
+    ) -> Task<anyhow::Result<()>> {
+        let db = self.db.clone();
+        let session_id = session_id.to_string();
+        cx.background_spawn(async move {
+            db.link_thread_to_archived_worktree(&session_id, archived_worktree_id)
+                .await
+        })
+    }
+
+    pub fn get_archived_worktrees_for_thread(
+        &self,
+        session_id: &str,
+        cx: &App,
+    ) -> Task<anyhow::Result<Vec<ArchivedGitWorktree>>> {
+        let db = self.db.clone();
+        let session_id = session_id.to_string();
+        cx.background_spawn(async move { db.get_archived_worktrees_for_thread(&session_id).await })
+    }
+
+    pub fn delete_archived_worktree(&self, id: i64, cx: &App) -> Task<anyhow::Result<()>> {
+        let db = self.db.clone();
+        cx.background_spawn(async move { db.delete_archived_worktree(id).await })
+    }
+
+    pub fn set_archived_worktree_restored(
+        &self,
+        id: i64,
+        worktree_path: &str,
+        branch_name: Option<&str>,
+        cx: &App,
+    ) -> Task<anyhow::Result<()>> {
+        let db = self.db.clone();
+        let worktree_path = worktree_path.to_string();
+        let branch_name = branch_name.map(|s| s.to_string());
+        cx.background_spawn(async move {
+            db.set_archived_worktree_restored(id, &worktree_path, branch_name.as_deref())
+                .await
+        })
+    }
+
+    pub fn all_session_ids_for_path<'a>(
+        &'a self,
+        path_list: &PathList,
+    ) -> impl Iterator<Item = &'a acp::SessionId> {
+        self.threads_by_paths
+            .get(path_list)
+            .into_iter()
+            .flat_map(|session_ids| session_ids.iter())
+    }
+
     fn update_archived(
         &mut self,
         session_id: &acp::SessionId,
@@ -598,6 +742,7 @@ impl ThreadMetadataStore {
                     folder_paths,
                     main_worktree_paths,
                     archived,
+                    pending_worktree_restore: None,
                 };
 
                 self.save(metadata, cx);
@@ -634,6 +779,27 @@ impl Domain for ThreadMetadataDb {
         sql!(ALTER TABLE sidebar_threads ADD COLUMN archived INTEGER DEFAULT 0),
         sql!(ALTER TABLE sidebar_threads ADD COLUMN main_worktree_paths TEXT),
         sql!(ALTER TABLE sidebar_threads ADD COLUMN main_worktree_paths_order TEXT),
+        sql!(
+            CREATE TABLE IF NOT EXISTS archived_git_worktrees(
+                id INTEGER PRIMARY KEY,
+                worktree_path TEXT NOT NULL,
+                main_repo_path TEXT NOT NULL,
+                branch_name TEXT,
+                commit_hash TEXT NOT NULL,
+                restored INTEGER NOT NULL DEFAULT 0
+            ) STRICT;
+
+            CREATE TABLE IF NOT EXISTS thread_archived_worktrees(
+                session_id TEXT NOT NULL,
+                archived_worktree_id INTEGER NOT NULL REFERENCES archived_git_worktrees(id),
+                PRIMARY KEY (session_id, archived_worktree_id)
+            ) STRICT;
+        ),
+        sql!(
+            ALTER TABLE archived_git_worktrees ADD COLUMN staged_commit_hash TEXT;
+            ALTER TABLE archived_git_worktrees ADD COLUMN unstaged_commit_hash TEXT;
+            UPDATE archived_git_worktrees SET staged_commit_hash = commit_hash, unstaged_commit_hash = commit_hash WHERE staged_commit_hash IS NULL;
+        ),
     ];
 }
 
@@ -722,6 +888,111 @@ impl ThreadMetadataDb {
         })
         .await
     }
+
+    pub async fn create_archived_worktree(
+        &self,
+        worktree_path: &str,
+        main_repo_path: &str,
+        branch_name: Option<&str>,
+        staged_commit_hash: &str,
+        unstaged_commit_hash: &str,
+    ) -> anyhow::Result<i64> {
+        let worktree_path = worktree_path.to_string();
+        let main_repo_path = main_repo_path.to_string();
+        let branch_name = branch_name.map(|s| s.to_string());
+        let staged_commit_hash = staged_commit_hash.to_string();
+        let unstaged_commit_hash = unstaged_commit_hash.to_string();
+
+        self.write(move |conn| {
+            let mut stmt = Statement::prepare(
+                conn,
+                "INSERT INTO archived_git_worktrees(worktree_path, main_repo_path, branch_name, commit_hash, staged_commit_hash, unstaged_commit_hash) \
+                 VALUES (?1, ?2, ?3, ?4, ?5, ?6) \
+                 RETURNING id",
+            )?;
+            let mut i = stmt.bind(&worktree_path, 1)?;
+            i = stmt.bind(&main_repo_path, i)?;
+            i = stmt.bind(&branch_name, i)?;
+            i = stmt.bind(&unstaged_commit_hash, i)?;
+            i = stmt.bind(&staged_commit_hash, i)?;
+            stmt.bind(&unstaged_commit_hash, i)?;
+            stmt.maybe_row::<i64>()?.context("expected RETURNING id")
+        })
+        .await
+    }
+
+    pub async fn link_thread_to_archived_worktree(
+        &self,
+        session_id: &str,
+        archived_worktree_id: i64,
+    ) -> anyhow::Result<()> {
+        let session_id = session_id.to_string();
+
+        self.write(move |conn| {
+            let mut stmt = Statement::prepare(
+                conn,
+                "INSERT INTO thread_archived_worktrees(session_id, archived_worktree_id) \
+                 VALUES (?1, ?2)",
+            )?;
+            let i = stmt.bind(&session_id, 1)?;
+            stmt.bind(&archived_worktree_id, i)?;
+            stmt.exec()
+        })
+        .await
+    }
+
+    pub async fn get_archived_worktrees_for_thread(
+        &self,
+        session_id: &str,
+    ) -> anyhow::Result<Vec<ArchivedGitWorktree>> {
+        let session_id = session_id.to_string();
+
+        self.select_bound::<String, ArchivedGitWorktree>(
+            "SELECT a.id, a.worktree_path, a.main_repo_path, a.branch_name, a.staged_commit_hash, a.unstaged_commit_hash, a.restored \
+             FROM archived_git_worktrees a \
+             JOIN thread_archived_worktrees t ON a.id = t.archived_worktree_id \
+             WHERE t.session_id = ?1",
+        )?(session_id)
+    }
+
+    pub async fn delete_archived_worktree(&self, id: i64) -> anyhow::Result<()> {
+        self.write(move |conn| {
+            let mut stmt = Statement::prepare(
+                conn,
+                "DELETE FROM thread_archived_worktrees WHERE archived_worktree_id = ?",
+            )?;
+            stmt.bind(&id, 1)?;
+            stmt.exec()?;
+
+            let mut stmt =
+                Statement::prepare(conn, "DELETE FROM archived_git_worktrees WHERE id = ?")?;
+            stmt.bind(&id, 1)?;
+            stmt.exec()
+        })
+        .await
+    }
+
+    pub async fn set_archived_worktree_restored(
+        &self,
+        id: i64,
+        worktree_path: &str,
+        branch_name: Option<&str>,
+    ) -> anyhow::Result<()> {
+        let worktree_path = worktree_path.to_string();
+        let branch_name = branch_name.map(|s| s.to_string());
+
+        self.write(move |conn| {
+            let mut stmt = Statement::prepare(
+                conn,
+                "UPDATE archived_git_worktrees SET restored = 1, worktree_path = ?1, branch_name = ?2 WHERE id = ?3",
+            )?;
+            let mut i = stmt.bind(&worktree_path, 1)?;
+            i = stmt.bind(&branch_name, i)?;
+            stmt.bind(&id, i)?;
+            stmt.exec()
+        })
+        .await
+    }
 }
 
 impl Column for ThreadMetadata {
@@ -779,6 +1050,32 @@ impl Column for ThreadMetadata {
                 folder_paths,
                 main_worktree_paths,
                 archived,
+                pending_worktree_restore: None,
+            },
+            next,
+        ))
+    }
+}
+
+impl Column for ArchivedGitWorktree {
+    fn column(statement: &mut Statement, start_index: i32) -> anyhow::Result<(Self, i32)> {
+        let (id, next): (i64, i32) = Column::column(statement, start_index)?;
+        let (worktree_path_str, next): (String, i32) = Column::column(statement, next)?;
+        let (main_repo_path_str, next): (String, i32) = Column::column(statement, next)?;
+        let (branch_name, next): (Option<String>, i32) = Column::column(statement, next)?;
+        let (staged_commit_hash, next): (String, i32) = Column::column(statement, next)?;
+        let (unstaged_commit_hash, next): (String, i32) = Column::column(statement, next)?;
+        let (restored_int, next): (i64, i32) = Column::column(statement, next)?;
+
+        Ok((
+            ArchivedGitWorktree {
+                id,
+                worktree_path: PathBuf::from(worktree_path_str),
+                main_repo_path: PathBuf::from(main_repo_path_str),
+                branch_name,
+                staged_commit_hash,
+                unstaged_commit_hash,
+                restored: restored_int != 0,
             },
             next,
         ))
@@ -835,6 +1132,7 @@ mod tests {
             created_at: Some(updated_at),
             folder_paths,
             main_worktree_paths: PathList::default(),
+            pending_worktree_restore: None,
         }
     }
 
@@ -1052,6 +1350,7 @@ mod tests {
             folder_paths: project_a_paths.clone(),
             main_worktree_paths: PathList::default(),
             archived: false,
+            pending_worktree_restore: None,
         };
 
         cx.update(|cx| {
@@ -1162,6 +1461,7 @@ mod tests {
             folder_paths: project_paths.clone(),
             main_worktree_paths: PathList::default(),
             archived: false,
+            pending_worktree_restore: None,
         };
 
         cx.update(|cx| {
@@ -1913,4 +2213,474 @@ mod tests {
             );
         });
     }
+
+    #[gpui::test]
+    async fn test_create_and_retrieve_archived_worktree(cx: &mut TestAppContext) {
+        init_test(cx);
+        let store = cx.update(|cx| ThreadMetadataStore::global(cx));
+
+        let id = store
+            .read_with(cx, |store, cx| {
+                store.create_archived_worktree(
+                    "/tmp/worktree",
+                    "/home/user/repo",
+                    Some("feature-branch"),
+                    "abc123def456",
+                    "abc123def456",
+                    cx,
+                )
+            })
+            .await
+            .unwrap();
+
+        store
+            .read_with(cx, |store, cx| {
+                store.link_thread_to_archived_worktree("session-1", id, cx)
+            })
+            .await
+            .unwrap();
+
+        let worktrees = store
+            .read_with(cx, |store, cx| {
+                store.get_archived_worktrees_for_thread("session-1", cx)
+            })
+            .await
+            .unwrap();
+
+        assert_eq!(worktrees.len(), 1);
+        let wt = &worktrees[0];
+        assert_eq!(wt.id, id);
+        assert_eq!(wt.worktree_path, PathBuf::from("/tmp/worktree"));
+        assert_eq!(wt.main_repo_path, PathBuf::from("/home/user/repo"));
+        assert_eq!(wt.branch_name.as_deref(), Some("feature-branch"));
+        assert_eq!(wt.staged_commit_hash, "abc123def456");
+        assert_eq!(wt.unstaged_commit_hash, "abc123def456");
+        assert!(!wt.restored);
+    }
+
+    #[gpui::test]
+    async fn test_delete_archived_worktree(cx: &mut TestAppContext) {
+        init_test(cx);
+        let store = cx.update(|cx| ThreadMetadataStore::global(cx));
+
+        let id = store
+            .read_with(cx, |store, cx| {
+                store.create_archived_worktree(
+                    "/tmp/worktree",
+                    "/home/user/repo",
+                    Some("main"),
+                    "deadbeef",
+                    "deadbeef",
+                    cx,
+                )
+            })
+            .await
+            .unwrap();
+
+        store
+            .read_with(cx, |store, cx| {
+                store.link_thread_to_archived_worktree("session-1", id, cx)
+            })
+            .await
+            .unwrap();
+
+        store
+            .read_with(cx, |store, cx| store.delete_archived_worktree(id, cx))
+            .await
+            .unwrap();
+
+        let worktrees = store
+            .read_with(cx, |store, cx| {
+                store.get_archived_worktrees_for_thread("session-1", cx)
+            })
+            .await
+            .unwrap();
+        assert!(worktrees.is_empty());
+    }
+
+    #[gpui::test]
+    async fn test_set_archived_worktree_restored(cx: &mut TestAppContext) {
+        init_test(cx);
+        let store = cx.update(|cx| ThreadMetadataStore::global(cx));
+
+        let id = store
+            .read_with(cx, |store, cx| {
+                store.create_archived_worktree(
+                    "/tmp/old-worktree",
+                    "/home/user/repo",
+                    Some("old-branch"),
+                    "abc123",
+                    "abc123",
+                    cx,
+                )
+            })
+            .await
+            .unwrap();
+
+        store
+            .read_with(cx, |store, cx| {
+                store.set_archived_worktree_restored(
+                    id,
+                    "/tmp/new-worktree",
+                    Some("new-branch"),
+                    cx,
+                )
+            })
+            .await
+            .unwrap();
+
+        store
+            .read_with(cx, |store, cx| {
+                store.link_thread_to_archived_worktree("session-1", id, cx)
+            })
+            .await
+            .unwrap();
+
+        let worktrees = store
+            .read_with(cx, |store, cx| {
+                store.get_archived_worktrees_for_thread("session-1", cx)
+            })
+            .await
+            .unwrap();
+
+        assert_eq!(worktrees.len(), 1);
+        let wt = &worktrees[0];
+        assert!(wt.restored);
+        assert_eq!(wt.worktree_path, PathBuf::from("/tmp/new-worktree"));
+        assert_eq!(wt.branch_name.as_deref(), Some("new-branch"));
+    }
+
+    #[gpui::test]
+    async fn test_link_multiple_threads_to_archived_worktree(cx: &mut TestAppContext) {
+        init_test(cx);
+        let store = cx.update(|cx| ThreadMetadataStore::global(cx));
+
+        let id = store
+            .read_with(cx, |store, cx| {
+                store.create_archived_worktree(
+                    "/tmp/worktree",
+                    "/home/user/repo",
+                    None,
+                    "abc123",
+                    "abc123",
+                    cx,
+                )
+            })
+            .await
+            .unwrap();
+
+        store
+            .read_with(cx, |store, cx| {
+                store.link_thread_to_archived_worktree("session-1", id, cx)
+            })
+            .await
+            .unwrap();
+
+        store
+            .read_with(cx, |store, cx| {
+                store.link_thread_to_archived_worktree("session-2", id, cx)
+            })
+            .await
+            .unwrap();
+
+        let wt1 = store
+            .read_with(cx, |store, cx| {
+                store.get_archived_worktrees_for_thread("session-1", cx)
+            })
+            .await
+            .unwrap();
+
+        let wt2 = store
+            .read_with(cx, |store, cx| {
+                store.get_archived_worktrees_for_thread("session-2", cx)
+            })
+            .await
+            .unwrap();
+
+        assert_eq!(wt1.len(), 1);
+        assert_eq!(wt2.len(), 1);
+        assert_eq!(wt1[0].id, wt2[0].id);
+    }
+
+    #[gpui::test]
+    async fn test_all_session_ids_for_path(cx: &mut TestAppContext) {
+        init_test(cx);
+        let store = cx.update(|cx| ThreadMetadataStore::global(cx));
+        let paths = PathList::new(&[Path::new("/project-x")]);
+
+        let meta1 = ThreadMetadata {
+            session_id: acp::SessionId::new("session-1"),
+            agent_id: agent::ZED_AGENT_ID.clone(),
+            title: "Thread 1".into(),
+            updated_at: Utc::now(),
+            created_at: Some(Utc::now()),
+            folder_paths: paths.clone(),
+            main_worktree_paths: PathList::default(),
+            archived: false,
+            pending_worktree_restore: None,
+        };
+        let meta2 = ThreadMetadata {
+            session_id: acp::SessionId::new("session-2"),
+            agent_id: agent::ZED_AGENT_ID.clone(),
+            title: "Thread 2".into(),
+            updated_at: Utc::now(),
+            created_at: Some(Utc::now()),
+            folder_paths: paths.clone(),
+            main_worktree_paths: PathList::default(),
+            archived: true,
+            pending_worktree_restore: None,
+        };
+
+        store.update(cx, |store, _cx| {
+            store.save_internal(meta1);
+            store.save_internal(meta2);
+        });
+
+        let ids: HashSet<acp::SessionId> = store.read_with(cx, |store, _cx| {
+            store.all_session_ids_for_path(&paths).cloned().collect()
+        });
+
+        assert!(ids.contains(&acp::SessionId::new("session-1")));
+        assert!(ids.contains(&acp::SessionId::new("session-2")));
+        assert_eq!(ids.len(), 2);
+    }
+
+    #[gpui::test]
+    async fn test_two_sha_round_trip(cx: &mut TestAppContext) {
+        init_test(cx);
+        let store = cx.update(|cx| ThreadMetadataStore::global(cx));
+
+        let id = store
+            .read_with(cx, |store, cx| {
+                store.create_archived_worktree(
+                    "/tmp/worktree",
+                    "/home/user/repo",
+                    Some("feature"),
+                    "staged_sha_aaa",
+                    "unstaged_sha_bbb",
+                    cx,
+                )
+            })
+            .await
+            .unwrap();
+
+        store
+            .read_with(cx, |store, cx| {
+                store.link_thread_to_archived_worktree("session-1", id, cx)
+            })
+            .await
+            .unwrap();
+
+        let worktrees = store
+            .read_with(cx, |store, cx| {
+                store.get_archived_worktrees_for_thread("session-1", cx)
+            })
+            .await
+            .unwrap();
+
+        assert_eq!(worktrees.len(), 1);
+        let wt = &worktrees[0];
+        assert_eq!(wt.staged_commit_hash, "staged_sha_aaa");
+        assert_eq!(wt.unstaged_commit_hash, "unstaged_sha_bbb");
+        assert_eq!(wt.branch_name.as_deref(), Some("feature"));
+        assert!(!wt.restored);
+    }
+
+    #[gpui::test]
+    async fn test_complete_worktree_restore_single_path(cx: &mut TestAppContext) {
+        init_test(cx);
+        let store = cx.update(|cx| ThreadMetadataStore::global(cx));
+
+        let original_paths = PathList::new(&[Path::new("/projects/worktree-a")]);
+        let meta = make_metadata("session-1", "Thread 1", Utc::now(), original_paths);
+
+        store.update(cx, |store, cx| {
+            store.save_manually(meta, cx);
+        });
+
+        let replacements = vec![(
+            PathBuf::from("/projects/worktree-a"),
+            PathBuf::from("/projects/worktree-a-restored"),
+        )];
+
+        store.update(cx, |store, cx| {
+            store.complete_worktree_restore(&acp::SessionId::new("session-1"), &replacements, cx);
+        });
+
+        let entry = store.read_with(cx, |store, _cx| {
+            store.entry(&acp::SessionId::new("session-1")).cloned()
+        });
+        let entry = entry.unwrap();
+        assert!(entry.pending_worktree_restore.is_none());
+        assert_eq!(
+            entry.folder_paths.paths(),
+            &[PathBuf::from("/projects/worktree-a-restored")]
+        );
+    }
+
+    #[gpui::test]
+    async fn test_complete_worktree_restore_multiple_paths(cx: &mut TestAppContext) {
+        init_test(cx);
+        let store = cx.update(|cx| ThreadMetadataStore::global(cx));
+
+        let original_paths = PathList::new(&[
+            Path::new("/projects/worktree-a"),
+            Path::new("/projects/worktree-b"),
+            Path::new("/other/unrelated"),
+        ]);
+        let meta = make_metadata("session-multi", "Multi Thread", Utc::now(), original_paths);
+
+        store.update(cx, |store, cx| {
+            store.save_manually(meta, cx);
+        });
+
+        let replacements = vec![
+            (
+                PathBuf::from("/projects/worktree-a"),
+                PathBuf::from("/restored/worktree-a"),
+            ),
+            (
+                PathBuf::from("/projects/worktree-b"),
+                PathBuf::from("/restored/worktree-b"),
+            ),
+        ];
+
+        store.update(cx, |store, cx| {
+            store.complete_worktree_restore(
+                &acp::SessionId::new("session-multi"),
+                &replacements,
+                cx,
+            );
+        });
+
+        let entry = store.read_with(cx, |store, _cx| {
+            store.entry(&acp::SessionId::new("session-multi")).cloned()
+        });
+        let entry = entry.unwrap();
+        assert!(entry.pending_worktree_restore.is_none());
+
+        let paths = entry.folder_paths.paths();
+        assert_eq!(paths.len(), 3);
+        assert!(paths.contains(&PathBuf::from("/restored/worktree-a")));
+        assert!(paths.contains(&PathBuf::from("/restored/worktree-b")));
+        assert!(paths.contains(&PathBuf::from("/other/unrelated")));
+    }
+
+    #[gpui::test]
+    async fn test_complete_worktree_restore_preserves_unmatched_paths(cx: &mut TestAppContext) {
+        init_test(cx);
+        let store = cx.update(|cx| ThreadMetadataStore::global(cx));
+
+        let original_paths =
+            PathList::new(&[Path::new("/projects/worktree-a"), Path::new("/other/path")]);
+        let meta = make_metadata("session-partial", "Partial", Utc::now(), original_paths);
+
+        store.update(cx, |store, cx| {
+            store.save_manually(meta, cx);
+        });
+
+        let replacements = vec![
+            (
+                PathBuf::from("/projects/worktree-a"),
+                PathBuf::from("/new/worktree-a"),
+            ),
+            (
+                PathBuf::from("/nonexistent/path"),
+                PathBuf::from("/should/not/appear"),
+            ),
+        ];
+
+        store.update(cx, |store, cx| {
+            store.complete_worktree_restore(
+                &acp::SessionId::new("session-partial"),
+                &replacements,
+                cx,
+            );
+        });
+
+        let entry = store.read_with(cx, |store, _cx| {
+            store
+                .entry(&acp::SessionId::new("session-partial"))
+                .cloned()
+        });
+        let entry = entry.unwrap();
+        let paths = entry.folder_paths.paths();
+        assert_eq!(paths.len(), 2);
+        assert!(paths.contains(&PathBuf::from("/new/worktree-a")));
+        assert!(paths.contains(&PathBuf::from("/other/path")));
+        assert!(!paths.contains(&PathBuf::from("/should/not/appear")));
+    }
+
+    #[gpui::test]
+    async fn test_multiple_archived_worktrees_per_thread(cx: &mut TestAppContext) {
+        init_test(cx);
+        let store = cx.update(|cx| ThreadMetadataStore::global(cx));
+
+        let id1 = store
+            .read_with(cx, |store, cx| {
+                store.create_archived_worktree(
+                    "/projects/worktree-a",
+                    "/home/user/repo",
+                    Some("branch-a"),
+                    "staged_a",
+                    "unstaged_a",
+                    cx,
+                )
+            })
+            .await
+            .unwrap();
+
+        let id2 = store
+            .read_with(cx, |store, cx| {
+                store.create_archived_worktree(
+                    "/projects/worktree-b",
+                    "/home/user/repo",
+                    Some("branch-b"),
+                    "staged_b",
+                    "unstaged_b",
+                    cx,
+                )
+            })
+            .await
+            .unwrap();
+
+        store
+            .read_with(cx, |store, cx| {
+                store.link_thread_to_archived_worktree("session-1", id1, cx)
+            })
+            .await
+            .unwrap();
+
+        store
+            .read_with(cx, |store, cx| {
+                store.link_thread_to_archived_worktree("session-1", id2, cx)
+            })
+            .await
+            .unwrap();
+
+        let worktrees = store
+            .read_with(cx, |store, cx| {
+                store.get_archived_worktrees_for_thread("session-1", cx)
+            })
+            .await
+            .unwrap();
+
+        assert_eq!(worktrees.len(), 2);
+
+        let wt_a = worktrees
+            .iter()
+            .find(|w| w.worktree_path.as_path() == Path::new("/projects/worktree-a"))
+            .unwrap();
+        assert_eq!(wt_a.staged_commit_hash, "staged_a");
+        assert_eq!(wt_a.unstaged_commit_hash, "unstaged_a");
+        assert_eq!(wt_a.branch_name.as_deref(), Some("branch-a"));
+
+        let wt_b = worktrees
+            .iter()
+            .find(|w| w.worktree_path.as_path() == Path::new("/projects/worktree-b"))
+            .unwrap();
+        assert_eq!(wt_b.staged_commit_hash, "staged_b");
+        assert_eq!(wt_b.unstaged_commit_hash, "unstaged_b");
+        assert_eq!(wt_b.branch_name.as_deref(), Some("branch-b"));
+    }
 }

crates/agent_ui/src/thread_worktree_archive.rs 🔗

@@ -0,0 +1,1167 @@
+use std::{
+    collections::HashSet,
+    path::{Path, PathBuf},
+    sync::Arc,
+};
+
+use agent_client_protocol as acp;
+use anyhow::{Context as _, Result, anyhow};
+use collections::HashMap;
+use git::repository::{AskPassDelegate, CommitOptions, DEFAULT_WORKTREE_DIRECTORY, ResetMode};
+use gpui::{App, AsyncApp, Entity, Global, Task, WindowHandle};
+use parking_lot::Mutex;
+use project::{
+    LocalProjectFlags, Project, WorktreeId, git_store::Repository, worktrees_directory_for_repo,
+};
+use util::ResultExt;
+use workspace::{
+    AppState, MultiWorkspace, OpenMode, OpenOptions, PathList, Toast, Workspace,
+    notifications::NotificationId, open_new, open_paths,
+};
+
+use crate::thread_metadata_store::{ArchivedGitWorktree, ThreadMetadataStore};
+
+#[derive(Default)]
+pub struct ThreadArchiveCleanupCoordinator {
+    in_flight_roots: Mutex<HashSet<PathBuf>>,
+}
+
+impl Global for ThreadArchiveCleanupCoordinator {}
+
+fn ensure_global(cx: &mut App) {
+    if !cx.has_global::<ThreadArchiveCleanupCoordinator>() {
+        cx.set_global(ThreadArchiveCleanupCoordinator::default());
+    }
+}
+
+#[derive(Clone)]
+pub struct ArchiveOutcome {
+    pub archived_immediately: bool,
+    pub roots_to_delete: Vec<PathBuf>,
+}
+
+#[derive(Clone)]
+struct RootPlan {
+    root_path: PathBuf,
+    main_repo_path: PathBuf,
+    affected_projects: Vec<AffectedProject>,
+    worktree_repo: Option<Entity<Repository>>,
+    branch_name: Option<String>,
+}
+
+#[derive(Clone)]
+struct AffectedProject {
+    project: Entity<Project>,
+    worktree_id: WorktreeId,
+}
+
+#[derive(Clone)]
+enum FallbackTarget {
+    ExistingWorkspace {
+        window: WindowHandle<MultiWorkspace>,
+        workspace: Entity<Workspace>,
+    },
+    OpenPaths {
+        requesting_window: WindowHandle<MultiWorkspace>,
+        paths: Vec<PathBuf>,
+    },
+    OpenEmpty {
+        requesting_window: WindowHandle<MultiWorkspace>,
+    },
+}
+
+#[derive(Clone)]
+struct CleanupPlan {
+    folder_paths: PathList,
+    roots: Vec<RootPlan>,
+    current_workspace: Option<Entity<Workspace>>,
+    current_workspace_will_be_empty: bool,
+    fallback: Option<FallbackTarget>,
+    affected_workspaces: Vec<Entity<Workspace>>,
+}
+
+fn archived_worktree_ref_name(id: i64) -> String {
+    format!("refs/archived-worktrees/{}", id)
+}
+
+struct PersistOutcome {
+    archived_worktree_id: i64,
+    staged_commit_hash: String,
+}
+
+pub fn archive_thread(
+    session_id: &acp::SessionId,
+    current_workspace: Option<Entity<Workspace>>,
+    window: WindowHandle<MultiWorkspace>,
+    cx: &mut App,
+) -> ArchiveOutcome {
+    ensure_global(cx);
+    let plan = build_cleanup_plan(session_id, current_workspace, window, cx);
+
+    ThreadMetadataStore::global(cx).update(cx, |store, cx| store.archive(session_id, cx));
+
+    if let Some(plan) = plan {
+        let roots_to_delete = plan
+            .roots
+            .iter()
+            .map(|root| root.root_path.clone())
+            .collect::<Vec<_>>();
+        if !roots_to_delete.is_empty() {
+            cx.spawn(async move |cx| {
+                run_cleanup(plan, cx).await;
+            })
+            .detach();
+
+            return ArchiveOutcome {
+                archived_immediately: true,
+                roots_to_delete,
+            };
+        }
+    }
+
+    ArchiveOutcome {
+        archived_immediately: true,
+        roots_to_delete: Vec::new(),
+    }
+}
+
+fn build_cleanup_plan(
+    session_id: &acp::SessionId,
+    current_workspace: Option<Entity<Workspace>>,
+    requesting_window: WindowHandle<MultiWorkspace>,
+    cx: &App,
+) -> Option<CleanupPlan> {
+    let metadata = ThreadMetadataStore::global(cx)
+        .read(cx)
+        .entry(session_id)
+        .cloned()?;
+
+    let workspaces = all_open_workspaces(cx);
+
+    let candidate_roots = metadata
+        .folder_paths
+        .ordered_paths()
+        .filter_map(|path| build_root_plan(path, &workspaces, cx))
+        .filter(|plan| {
+            !path_is_referenced_by_other_unarchived_threads(session_id, &plan.root_path, cx)
+        })
+        .collect::<Vec<_>>();
+
+    if candidate_roots.is_empty() {
+        return Some(CleanupPlan {
+            folder_paths: metadata.folder_paths,
+            roots: Vec::new(),
+            current_workspace,
+            current_workspace_will_be_empty: false,
+            fallback: None,
+            affected_workspaces: Vec::new(),
+        });
+    }
+
+    let mut affected_workspaces = Vec::new();
+    let mut current_workspace_will_be_empty = false;
+
+    for workspace in workspaces.iter() {
+        let doomed_root_count = workspace
+            .read(cx)
+            .root_paths(cx)
+            .into_iter()
+            .filter(|path| {
+                candidate_roots
+                    .iter()
+                    .any(|root| root.root_path.as_path() == path.as_ref())
+            })
+            .count();
+
+        if doomed_root_count == 0 {
+            continue;
+        }
+
+        let surviving_root_count = workspace
+            .read(cx)
+            .root_paths(cx)
+            .len()
+            .saturating_sub(doomed_root_count);
+        if current_workspace
+            .as_ref()
+            .is_some_and(|current| current == workspace)
+        {
+            current_workspace_will_be_empty = surviving_root_count == 0;
+        }
+        affected_workspaces.push(workspace.clone());
+    }
+
+    let fallback = if current_workspace_will_be_empty {
+        choose_fallback_target(
+            session_id,
+            current_workspace.as_ref(),
+            &candidate_roots,
+            &requesting_window,
+            &workspaces,
+            cx,
+        )
+    } else {
+        None
+    };
+
+    Some(CleanupPlan {
+        folder_paths: metadata.folder_paths,
+        roots: candidate_roots,
+        current_workspace,
+        current_workspace_will_be_empty,
+        fallback,
+        affected_workspaces,
+    })
+}
+
+fn build_root_plan(path: &Path, workspaces: &[Entity<Workspace>], cx: &App) -> Option<RootPlan> {
+    let path = path.to_path_buf();
+    let affected_projects = workspaces
+        .iter()
+        .filter_map(|workspace| {
+            let project = workspace.read(cx).project().clone();
+            let worktree = project
+                .read(cx)
+                .visible_worktrees(cx)
+                .find(|worktree| worktree.read(cx).abs_path().as_ref() == path.as_path())?;
+            let worktree_id = worktree.read(cx).id();
+            Some(AffectedProject {
+                project,
+                worktree_id,
+            })
+        })
+        .collect::<Vec<_>>();
+
+    let (linked_snapshot, worktree_repo) = workspaces
+        .iter()
+        .flat_map(|workspace| {
+            workspace
+                .read(cx)
+                .project()
+                .read(cx)
+                .repositories(cx)
+                .values()
+                .cloned()
+                .collect::<Vec<_>>()
+        })
+        .find_map(|repo| {
+            let snapshot = repo.read(cx).snapshot();
+            (snapshot.is_linked_worktree()
+                && snapshot.work_directory_abs_path.as_ref() == path.as_path())
+            .then_some((snapshot, repo))
+        })?;
+
+    let branch_name = linked_snapshot
+        .branch
+        .as_ref()
+        .map(|b| b.name().to_string());
+
+    Some(RootPlan {
+        root_path: path,
+        main_repo_path: linked_snapshot.original_repo_abs_path.to_path_buf(),
+        affected_projects,
+        worktree_repo: Some(worktree_repo),
+        branch_name,
+    })
+}
+
+fn path_is_referenced_by_other_unarchived_threads(
+    current_session_id: &acp::SessionId,
+    path: &Path,
+    cx: &App,
+) -> bool {
+    ThreadMetadataStore::global(cx)
+        .read(cx)
+        .entries()
+        .filter(|thread| thread.session_id != *current_session_id)
+        .filter(|thread| !thread.archived)
+        .any(|thread| {
+            thread
+                .folder_paths
+                .paths()
+                .iter()
+                .any(|other_path| other_path.as_path() == path)
+        })
+}
+
+fn choose_fallback_target(
+    current_session_id: &acp::SessionId,
+    current_workspace: Option<&Entity<Workspace>>,
+    roots: &[RootPlan],
+    requesting_window: &WindowHandle<MultiWorkspace>,
+    workspaces: &[Entity<Workspace>],
+    cx: &App,
+) -> Option<FallbackTarget> {
+    let doomed_roots = roots
+        .iter()
+        .map(|root| root.root_path.clone())
+        .collect::<HashSet<_>>();
+
+    let surviving_same_window = requesting_window.read(cx).ok().and_then(|multi_workspace| {
+        multi_workspace
+            .workspaces()
+            .iter()
+            .filter(|workspace| current_workspace.is_none_or(|current| *workspace != current))
+            .find(|workspace| workspace_survives(workspace, &doomed_roots, cx))
+            .cloned()
+    });
+    if let Some(workspace) = surviving_same_window {
+        return Some(FallbackTarget::ExistingWorkspace {
+            window: *requesting_window,
+            workspace,
+        });
+    }
+
+    for window in cx
+        .windows()
+        .into_iter()
+        .filter_map(|window| window.downcast::<MultiWorkspace>())
+    {
+        if window == *requesting_window {
+            continue;
+        }
+        if let Ok(multi_workspace) = window.read(cx) {
+            if let Some(workspace) = multi_workspace
+                .workspaces()
+                .iter()
+                .find(|workspace| workspace_survives(workspace, &doomed_roots, cx))
+                .cloned()
+            {
+                return Some(FallbackTarget::ExistingWorkspace { window, workspace });
+            }
+        }
+    }
+
+    let safe_thread_workspace = ThreadMetadataStore::global(cx)
+        .read(cx)
+        .entries()
+        .filter(|metadata| metadata.session_id != *current_session_id && !metadata.archived)
+        .filter_map(|metadata| {
+            workspaces
+                .iter()
+                .find(|workspace| workspace_path_list(workspace, cx) == metadata.folder_paths)
+                .cloned()
+        })
+        .find(|workspace| workspace_survives(workspace, &doomed_roots, cx));
+
+    if let Some(workspace) = safe_thread_workspace {
+        let window = window_for_workspace(&workspace, cx).unwrap_or(*requesting_window);
+        return Some(FallbackTarget::ExistingWorkspace { window, workspace });
+    }
+
+    if let Some(root) = roots.first() {
+        return Some(FallbackTarget::OpenPaths {
+            requesting_window: *requesting_window,
+            paths: vec![root.main_repo_path.clone()],
+        });
+    }
+
+    Some(FallbackTarget::OpenEmpty {
+        requesting_window: *requesting_window,
+    })
+}
+
+async fn run_cleanup(plan: CleanupPlan, cx: &mut AsyncApp) {
+    let roots_to_delete =
+        cx.update_global::<ThreadArchiveCleanupCoordinator, _>(|coordinator, _cx| {
+            let mut in_flight_roots = coordinator.in_flight_roots.lock();
+            plan.roots
+                .iter()
+                .filter_map(|root| {
+                    if in_flight_roots.insert(root.root_path.clone()) {
+                        Some(root.clone())
+                    } else {
+                        None
+                    }
+                })
+                .collect::<Vec<_>>()
+        });
+
+    if roots_to_delete.is_empty() {
+        return;
+    }
+
+    let active_workspace = plan.current_workspace.clone();
+    if let Some(workspace) = active_workspace
+        .as_ref()
+        .filter(|_| plan.current_workspace_will_be_empty)
+    {
+        let Some(window) = window_for_workspace_async(workspace, cx) else {
+            release_in_flight_roots(&roots_to_delete, cx);
+            return;
+        };
+
+        let should_continue = save_workspace_for_root_removal(workspace.clone(), window, cx).await;
+        if !should_continue {
+            release_in_flight_roots(&roots_to_delete, cx);
+            return;
+        }
+    }
+
+    for workspace in plan
+        .affected_workspaces
+        .iter()
+        .filter(|workspace| Some((*workspace).clone()) != active_workspace)
+    {
+        let Some(window) = window_for_workspace_async(workspace, cx) else {
+            continue;
+        };
+
+        if !save_workspace_for_root_removal(workspace.clone(), window, cx).await {
+            release_in_flight_roots(&roots_to_delete, cx);
+            return;
+        }
+    }
+
+    if plan.current_workspace_will_be_empty {
+        if let Some(fallback) = plan.fallback.clone() {
+            activate_fallback(fallback, cx).await.log_err();
+        }
+    }
+
+    let mut git_removal_errors: Vec<(PathBuf, anyhow::Error)> = Vec::new();
+    let mut persist_errors: Vec<(PathBuf, anyhow::Error)> = Vec::new();
+    let mut persist_outcomes: HashMap<PathBuf, PersistOutcome> = HashMap::default();
+
+    for root in &roots_to_delete {
+        if root.worktree_repo.is_some() {
+            match persist_worktree_state(root, &plan, cx).await {
+                Ok(outcome) => {
+                    persist_outcomes.insert(root.root_path.clone(), outcome);
+                }
+                Err(error) => {
+                    log::error!(
+                        "Failed to persist worktree state for {}: {error}",
+                        root.root_path.display()
+                    );
+                    persist_errors.push((root.root_path.clone(), error));
+                    continue;
+                }
+            }
+        }
+
+        if let Err(error) = remove_root(root.clone(), cx).await {
+            if let Some(outcome) = persist_outcomes.remove(&root.root_path) {
+                rollback_persist(&outcome, root, cx).await;
+            }
+            git_removal_errors.push((root.root_path.clone(), error));
+        }
+    }
+
+    cleanup_empty_workspaces(&plan.affected_workspaces, cx).await;
+
+    let all_errors: Vec<(PathBuf, anyhow::Error)> = persist_errors
+        .into_iter()
+        .chain(git_removal_errors)
+        .collect();
+
+    if !all_errors.is_empty() {
+        let detail = all_errors
+            .into_iter()
+            .map(|(path, error)| format!("{}: {error}", path.display()))
+            .collect::<Vec<_>>()
+            .join("\n");
+        show_error_toast(
+            "Thread archived, but linked worktree cleanup failed",
+            &detail,
+            &plan,
+            cx,
+        );
+    }
+
+    release_in_flight_roots(&roots_to_delete, cx);
+}
+
+async fn save_workspace_for_root_removal(
+    workspace: Entity<Workspace>,
+    window: WindowHandle<MultiWorkspace>,
+    cx: &mut AsyncApp,
+) -> bool {
+    let has_dirty_items = workspace.read_with(cx, |workspace, cx| {
+        workspace.items(cx).any(|item| item.is_dirty(cx))
+    });
+
+    if has_dirty_items {
+        let _ = window.update(cx, |multi_workspace, window, cx| {
+            window.activate_window();
+            multi_workspace.activate(workspace.clone(), window, cx);
+        });
+    }
+
+    let save_task = window.update(cx, |_multi_workspace, window, cx| {
+        workspace.update(cx, |workspace, cx| {
+            workspace.save_for_root_removal(window, cx)
+        })
+    });
+
+    let Ok(task) = save_task else {
+        return false;
+    };
+
+    task.await.unwrap_or(false)
+}
+
+async fn activate_fallback(target: FallbackTarget, cx: &mut AsyncApp) -> Result<()> {
+    match target {
+        FallbackTarget::ExistingWorkspace { window, workspace } => {
+            window.update(cx, |multi_workspace, window, cx| {
+                window.activate_window();
+                multi_workspace.activate(workspace, window, cx);
+            })?;
+        }
+        FallbackTarget::OpenPaths {
+            requesting_window,
+            paths,
+        } => {
+            let app_state = current_app_state(cx).context("no workspace app state available")?;
+            cx.update(|cx| {
+                open_paths(
+                    &paths,
+                    app_state,
+                    OpenOptions {
+                        requesting_window: Some(requesting_window),
+                        open_mode: OpenMode::Activate,
+                        ..Default::default()
+                    },
+                    cx,
+                )
+            })
+            .await?;
+        }
+        FallbackTarget::OpenEmpty { requesting_window } => {
+            let app_state = current_app_state(cx).context("no workspace app state available")?;
+            cx.update(|cx| {
+                open_new(
+                    OpenOptions {
+                        requesting_window: Some(requesting_window),
+                        open_mode: OpenMode::Activate,
+                        ..Default::default()
+                    },
+                    app_state,
+                    cx,
+                    |_workspace, _window, _cx| {},
+                )
+            })
+            .await?;
+        }
+    }
+
+    Ok(())
+}
+
+async fn remove_root(root: RootPlan, cx: &mut AsyncApp) -> Result<()> {
+    let release_tasks: Vec<_> = root
+        .affected_projects
+        .iter()
+        .map(|affected| {
+            let project = affected.project.clone();
+            let worktree_id = affected.worktree_id;
+            project.update(cx, |project, cx| {
+                let wait = project.wait_for_worktree_release(worktree_id, cx);
+                project.remove_worktree(worktree_id, cx);
+                wait
+            })
+        })
+        .collect();
+
+    if let Err(error) = remove_root_after_worktree_removal(&root, release_tasks, cx).await {
+        rollback_root(&root, cx).await;
+        return Err(error);
+    }
+
+    Ok(())
+}
+
+async fn remove_root_after_worktree_removal(
+    root: &RootPlan,
+    release_tasks: Vec<Task<Result<()>>>,
+    cx: &mut AsyncApp,
+) -> Result<()> {
+    for task in release_tasks {
+        task.await?;
+    }
+
+    let (repo, _temp_project) = find_or_create_repository(&root.main_repo_path, cx).await?;
+    let receiver = repo.update(cx, |repo: &mut Repository, _cx| {
+        repo.remove_worktree(root.root_path.clone(), false)
+    });
+    let result = receiver
+        .await
+        .map_err(|_| anyhow!("git worktree removal was canceled"))?;
+    result
+}
+
+/// Finds a live `Repository` entity for the given path, or creates a temporary
+/// `Project::local` to obtain one.
+///
+/// `Repository` entities can only be obtained through a `Project` because
+/// `GitStore` (which creates and manages `Repository` entities) is owned by
+/// `Project`. When no open workspace contains the repo we need, we spin up a
+/// headless `Project::local` just to get a `Repository` handle. The caller
+/// keeps the returned `Option<Entity<Project>>` alive for the duration of the
+/// git operations, then drops it.
+///
+/// Future improvement: decoupling `GitStore` from `Project` so that
+/// `Repository` entities can be created standalone would eliminate this
+/// temporary-project workaround.
+async fn find_or_create_repository(
+    repo_path: &Path,
+    cx: &mut AsyncApp,
+) -> Result<(Entity<Repository>, Option<Entity<Project>>)> {
+    let repo_path_owned = repo_path.to_path_buf();
+    let live_repo = cx.update(|cx| {
+        all_open_workspaces(cx)
+            .into_iter()
+            .flat_map(|workspace| {
+                workspace
+                    .read(cx)
+                    .project()
+                    .read(cx)
+                    .repositories(cx)
+                    .values()
+                    .cloned()
+                    .collect::<Vec<_>>()
+            })
+            .find(|repo| {
+                repo.read(cx).snapshot().work_directory_abs_path.as_ref()
+                    == repo_path_owned.as_path()
+            })
+    });
+
+    if let Some(repo) = live_repo {
+        return Ok((repo, None));
+    }
+
+    let app_state =
+        current_app_state(cx).context("no app state available for temporary project")?;
+    let temp_project = cx.update(|cx| {
+        Project::local(
+            app_state.client.clone(),
+            app_state.node_runtime.clone(),
+            app_state.user_store.clone(),
+            app_state.languages.clone(),
+            app_state.fs.clone(),
+            None,
+            LocalProjectFlags::default(),
+            cx,
+        )
+    });
+
+    let repo_path_for_worktree = repo_path.to_path_buf();
+    let create_worktree = temp_project.update(cx, |project, cx| {
+        project.create_worktree(repo_path_for_worktree, true, cx)
+    });
+    let _worktree = create_worktree.await?;
+    let initial_scan = temp_project.read_with(cx, |project, cx| project.wait_for_initial_scan(cx));
+    initial_scan.await;
+
+    let repo_path_for_find = repo_path.to_path_buf();
+    let repo = temp_project
+        .update(cx, |project, cx| {
+            project
+                .repositories(cx)
+                .values()
+                .find(|repo| {
+                    repo.read(cx).snapshot().work_directory_abs_path.as_ref()
+                        == repo_path_for_find.as_path()
+                })
+                .cloned()
+        })
+        .context("failed to resolve temporary repository handle")?;
+
+    let barrier = repo.update(cx, |repo: &mut Repository, _cx| repo.barrier());
+    barrier
+        .await
+        .map_err(|_| anyhow!("temporary repository barrier canceled"))?;
+    Ok((repo, Some(temp_project)))
+}
+
+async fn rollback_root(root: &RootPlan, cx: &mut AsyncApp) {
+    for affected in &root.affected_projects {
+        let task = affected.project.update(cx, |project, cx| {
+            project.create_worktree(root.root_path.clone(), true, cx)
+        });
+        let _ = task.await;
+    }
+}
+
+async fn persist_worktree_state(
+    root: &RootPlan,
+    plan: &CleanupPlan,
+    cx: &mut AsyncApp,
+) -> Result<PersistOutcome> {
+    let worktree_repo = root
+        .worktree_repo
+        .clone()
+        .context("no worktree repo entity for persistence")?;
+
+    // Step 1: Create WIP commit #1 (staged state)
+    let askpass = AskPassDelegate::new(cx, |_, _, _| {});
+    let commit_rx = worktree_repo.update(cx, |repo, cx| {
+        repo.commit(
+            "WIP staged".into(),
+            None,
+            CommitOptions {
+                allow_empty: true,
+                ..Default::default()
+            },
+            askpass,
+            cx,
+        )
+    });
+    commit_rx
+        .await
+        .map_err(|_| anyhow!("WIP staged commit canceled"))??;
+
+    // Read SHA after staged commit
+    let staged_sha_result = worktree_repo
+        .update(cx, |repo, _cx| repo.head_sha())
+        .await
+        .map_err(|_| anyhow!("head_sha canceled"))
+        .and_then(|r| r.context("failed to read HEAD SHA after staged commit"))
+        .and_then(|opt| opt.context("HEAD SHA is None after staged commit"));
+    let staged_commit_hash = match staged_sha_result {
+        Ok(sha) => sha,
+        Err(error) => {
+            let rx = worktree_repo.update(cx, |repo, cx| {
+                repo.reset("HEAD~1".to_string(), ResetMode::Mixed, cx)
+            });
+            let _ = rx.await;
+            return Err(error);
+        }
+    };
+
+    // Step 2: Stage all files including untracked
+    let stage_rx = worktree_repo.update(cx, |repo, _cx| repo.stage_all_including_untracked());
+    if let Err(error) = stage_rx
+        .await
+        .map_err(|_| anyhow!("stage all canceled"))
+        .and_then(|inner| inner)
+    {
+        let rx = worktree_repo.update(cx, |repo, cx| {
+            repo.reset("HEAD~1".to_string(), ResetMode::Mixed, cx)
+        });
+        let _ = rx.await;
+        return Err(error.context("failed to stage all files including untracked"));
+    }
+
+    // Step 3: Create WIP commit #2 (unstaged/untracked state)
+    let askpass = AskPassDelegate::new(cx, |_, _, _| {});
+    let commit_rx = worktree_repo.update(cx, |repo, cx| {
+        repo.commit(
+            "WIP unstaged".into(),
+            None,
+            CommitOptions {
+                allow_empty: true,
+                ..Default::default()
+            },
+            askpass,
+            cx,
+        )
+    });
+    if let Err(error) = commit_rx
+        .await
+        .map_err(|_| anyhow!("WIP unstaged commit canceled"))
+        .and_then(|inner| inner)
+    {
+        let rx = worktree_repo.update(cx, |repo, cx| {
+            repo.reset("HEAD~1".to_string(), ResetMode::Mixed, cx)
+        });
+        let _ = rx.await;
+        return Err(error);
+    }
+
+    // Step 4: Read HEAD SHA after WIP commits
+    let head_sha_result = worktree_repo
+        .update(cx, |repo, _cx| repo.head_sha())
+        .await
+        .map_err(|_| anyhow!("head_sha canceled"))
+        .and_then(|r| r.context("failed to read HEAD SHA after WIP commits"))
+        .and_then(|opt| opt.context("HEAD SHA is None after WIP commits"));
+    let unstaged_commit_hash = match head_sha_result {
+        Ok(sha) => sha,
+        Err(error) => {
+            let rx = worktree_repo.update(cx, |repo, cx| {
+                repo.reset(format!("{}~1", staged_commit_hash), ResetMode::Mixed, cx)
+            });
+            let _ = rx.await;
+            return Err(error);
+        }
+    };
+
+    // Step 5: Create DB record
+    let store = cx.update(|cx| ThreadMetadataStore::global(cx));
+    let worktree_path_str = root.root_path.to_string_lossy().to_string();
+    let main_repo_path_str = root.main_repo_path.to_string_lossy().to_string();
+    let branch_name = root.branch_name.clone();
+
+    let db_result = store
+        .read_with(cx, |store, cx| {
+            store.create_archived_worktree(
+                &worktree_path_str,
+                &main_repo_path_str,
+                branch_name.as_deref(),
+                &staged_commit_hash,
+                &unstaged_commit_hash,
+                cx,
+            )
+        })
+        .await
+        .context("failed to create archived worktree DB record");
+    let archived_worktree_id = match db_result {
+        Ok(id) => id,
+        Err(error) => {
+            let rx = worktree_repo.update(cx, |repo, cx| {
+                repo.reset(format!("{}~1", staged_commit_hash), ResetMode::Mixed, cx)
+            });
+            let _ = rx.await;
+            return Err(error);
+        }
+    };
+
+    // Step 6: Link all threads on this worktree to the archived record
+    let session_ids: Vec<acp::SessionId> = store.read_with(cx, |store, _cx| {
+        store
+            .all_session_ids_for_path(&plan.folder_paths)
+            .cloned()
+            .collect()
+    });
+
+    for session_id in &session_ids {
+        let link_result = store
+            .read_with(cx, |store, cx| {
+                store.link_thread_to_archived_worktree(&session_id.0, archived_worktree_id, cx)
+            })
+            .await;
+        if let Err(error) = link_result {
+            if let Err(delete_error) = store
+                .read_with(cx, |store, cx| {
+                    store.delete_archived_worktree(archived_worktree_id, cx)
+                })
+                .await
+            {
+                log::error!(
+                    "Failed to delete archived worktree DB record during link rollback: {delete_error:#}"
+                );
+            }
+            let rx = worktree_repo.update(cx, |repo, cx| {
+                repo.reset(format!("{}~1", staged_commit_hash), ResetMode::Mixed, cx)
+            });
+            let _ = rx.await;
+            return Err(error.context("failed to link thread to archived worktree"));
+        }
+    }
+
+    // Step 7: Create git ref on main repo (non-fatal)
+    let ref_name = archived_worktree_ref_name(archived_worktree_id);
+    let main_repo_result = find_or_create_repository(&root.main_repo_path, cx).await;
+    match main_repo_result {
+        Ok((main_repo, _temp_project)) => {
+            let rx = main_repo.update(cx, |repo, _cx| {
+                repo.update_ref(ref_name.clone(), unstaged_commit_hash.clone())
+            });
+            if let Err(error) = rx
+                .await
+                .map_err(|_| anyhow!("update_ref canceled"))
+                .and_then(|r| r)
+            {
+                log::warn!(
+                    "Failed to create ref {} on main repo (non-fatal): {error}",
+                    ref_name
+                );
+            }
+        }
+        Err(error) => {
+            log::warn!(
+                "Could not find main repo to create ref {} (non-fatal): {error}",
+                ref_name
+            );
+        }
+    }
+
+    Ok(PersistOutcome {
+        archived_worktree_id,
+        staged_commit_hash,
+    })
+}
+
+async fn rollback_persist(outcome: &PersistOutcome, root: &RootPlan, cx: &mut AsyncApp) {
+    // Undo WIP commits on the worktree repo
+    if let Some(worktree_repo) = &root.worktree_repo {
+        let rx = worktree_repo.update(cx, |repo, cx| {
+            repo.reset(
+                format!("{}~1", outcome.staged_commit_hash),
+                ResetMode::Mixed,
+                cx,
+            )
+        });
+        let _ = rx.await;
+    }
+
+    // Delete the git ref on main repo
+    if let Ok((main_repo, _temp_project)) =
+        find_or_create_repository(&root.main_repo_path, cx).await
+    {
+        let ref_name = archived_worktree_ref_name(outcome.archived_worktree_id);
+        let rx = main_repo.update(cx, |repo, _cx| repo.delete_ref(ref_name));
+        let _ = rx.await;
+    }
+
+    // Delete the DB record
+    let store = cx.update(|cx| ThreadMetadataStore::global(cx));
+    if let Err(error) = store
+        .read_with(cx, |store, cx| {
+            store.delete_archived_worktree(outcome.archived_worktree_id, cx)
+        })
+        .await
+    {
+        log::error!("Failed to delete archived worktree DB record during rollback: {error:#}");
+    }
+}
+
+async fn cleanup_empty_workspaces(workspaces: &[Entity<Workspace>], cx: &mut AsyncApp) {
+    for workspace in workspaces {
+        let is_empty = match workspace
+            .downgrade()
+            .read_with(cx, |workspace, cx| workspace.root_paths(cx).is_empty())
+        {
+            Ok(is_empty) => is_empty,
+            Err(_) => {
+                log::debug!("Workspace entity already dropped during cleanup; skipping");
+                continue;
+            }
+        };
+        if !is_empty {
+            continue;
+        }
+
+        let Some(window) = window_for_workspace_async(workspace, cx) else {
+            continue;
+        };
+
+        let _ = window.update(cx, |multi_workspace, window, cx| {
+            if !multi_workspace.remove(workspace, window, cx) {
+                window.remove_window();
+            }
+        });
+    }
+}
+
+pub async fn restore_worktree_via_git(
+    row: &ArchivedGitWorktree,
+    cx: &mut AsyncApp,
+) -> Result<PathBuf> {
+    // Step 1: Find the main repo entity
+    let (main_repo, _temp_project) = find_or_create_repository(&row.main_repo_path, cx).await?;
+
+    // Step 2: Handle path conflicts
+    let worktree_path = &row.worktree_path;
+    let app_state = current_app_state(cx).context("no app state available")?;
+    let already_exists = app_state.fs.metadata(worktree_path).await?.is_some();
+
+    let final_path = if already_exists {
+        let worktree_directory =
+            worktrees_directory_for_repo(&row.main_repo_path, DEFAULT_WORKTREE_DIRECTORY)?;
+        let new_name = format!(
+            "{}-restored-{}",
+            row.branch_name.as_deref().unwrap_or("worktree"),
+            row.id
+        );
+        let project_name = row
+            .main_repo_path
+            .file_name()
+            .context("git repo must have a directory name")?;
+        worktree_directory.join(&new_name).join(project_name)
+    } else {
+        worktree_path.clone()
+    };
+
+    // Step 3: Create detached worktree
+    let rx = main_repo.update(cx, |repo, _cx| {
+        repo.create_worktree_detached(final_path.clone(), row.unstaged_commit_hash.clone())
+    });
+    rx.await
+        .map_err(|_| anyhow!("worktree creation was canceled"))?
+        .context("failed to create worktree")?;
+
+    // Step 4: Get the worktree's repo entity
+    let (wt_repo, _temp_wt_project) = find_or_create_repository(&final_path, cx).await?;
+
+    // Step 5: Mixed reset to staged commit (undo the "WIP unstaged" commit)
+    let rx = wt_repo.update(cx, |repo, cx| {
+        repo.reset(row.staged_commit_hash.clone(), ResetMode::Mixed, cx)
+    });
+    match rx.await {
+        Ok(Ok(())) => {}
+        Ok(Err(error)) => {
+            let _ = wt_repo
+                .update(cx, |repo, cx| {
+                    repo.reset(row.unstaged_commit_hash.clone(), ResetMode::Mixed, cx)
+                })
+                .await;
+            return Err(error.context("mixed reset failed while restoring worktree"));
+        }
+        Err(_) => {
+            return Err(anyhow!("mixed reset was canceled"));
+        }
+    }
+
+    // Step 6: Soft reset to parent of staged commit (undo the "WIP staged" commit)
+    let rx = wt_repo.update(cx, |repo, cx| {
+        repo.reset(format!("{}~1", row.staged_commit_hash), ResetMode::Soft, cx)
+    });
+    match rx.await {
+        Ok(Ok(())) => {}
+        Ok(Err(error)) => {
+            let _ = wt_repo
+                .update(cx, |repo, cx| {
+                    repo.reset(row.unstaged_commit_hash.clone(), ResetMode::Mixed, cx)
+                })
+                .await;
+            return Err(error.context("soft reset failed while restoring worktree"));
+        }
+        Err(_) => {
+            return Err(anyhow!("soft reset was canceled"));
+        }
+    }
+
+    // Step 7: Restore the branch
+    if let Some(branch_name) = &row.branch_name {
+        let rx = wt_repo.update(cx, |repo, _cx| repo.change_branch(branch_name.clone()));
+        match rx.await {
+            Ok(Ok(())) => {}
+            _ => {
+                let rx = wt_repo.update(cx, |repo, _cx| {
+                    repo.create_branch(branch_name.clone(), None)
+                });
+                if let Ok(Err(_)) | Err(_) = rx.await {
+                    log::warn!(
+                        "Could not switch to branch '{}' — \
+                         restored worktree is in detached HEAD state.",
+                        branch_name
+                    );
+                }
+            }
+        }
+    }
+
+    Ok(final_path)
+}
+
+pub async fn cleanup_archived_worktree_record(row: &ArchivedGitWorktree, cx: &mut AsyncApp) {
+    // Delete the git ref from the main repo
+    if let Ok((main_repo, _temp_project)) = find_or_create_repository(&row.main_repo_path, cx).await
+    {
+        let ref_name = archived_worktree_ref_name(row.id);
+        let rx = main_repo.update(cx, |repo, _cx| repo.delete_ref(ref_name));
+        match rx.await {
+            Ok(Ok(())) => {}
+            Ok(Err(error)) => log::warn!("Failed to delete archive ref: {error}"),
+            Err(_) => log::warn!("Archive ref deletion was canceled"),
+        }
+    }
+
+    // Delete the DB records
+    let store = cx.update(|cx| ThreadMetadataStore::global(cx));
+    store
+        .read_with(cx, |store, cx| store.delete_archived_worktree(row.id, cx))
+        .await
+        .log_err();
+}
+
+fn show_error_toast(summary: &str, detail: &str, plan: &CleanupPlan, cx: &mut AsyncApp) {
+    let target_workspace = plan
+        .current_workspace
+        .clone()
+        .or_else(|| plan.affected_workspaces.first().cloned());
+    let Some(workspace) = target_workspace else {
+        return;
+    };
+
+    let _ = workspace.update(cx, |workspace, cx| {
+        struct ArchiveCleanupErrorToast;
+        let message = if detail.is_empty() {
+            summary.to_string()
+        } else {
+            format!("{summary}: {detail}")
+        };
+        workspace.show_toast(
+            Toast::new(
+                NotificationId::unique::<ArchiveCleanupErrorToast>(),
+                message,
+            )
+            .autohide(),
+            cx,
+        );
+    });
+}
+
+fn all_open_workspaces(cx: &App) -> Vec<Entity<Workspace>> {
+    cx.windows()
+        .into_iter()
+        .filter_map(|window| window.downcast::<MultiWorkspace>())
+        .flat_map(|multi_workspace| {
+            multi_workspace
+                .read(cx)
+                .map(|multi_workspace| multi_workspace.workspaces().to_vec())
+                .unwrap_or_default()
+        })
+        .collect()
+}
+
+fn workspace_survives(
+    workspace: &Entity<Workspace>,
+    doomed_roots: &HashSet<PathBuf>,
+    cx: &App,
+) -> bool {
+    workspace
+        .read(cx)
+        .root_paths(cx)
+        .into_iter()
+        .any(|root| !doomed_roots.contains(root.as_ref()))
+}
+
+fn workspace_path_list(workspace: &Entity<Workspace>, cx: &App) -> PathList {
+    PathList::new(&workspace.read(cx).root_paths(cx))
+}
+
+fn window_for_workspace(
+    workspace: &Entity<Workspace>,
+    cx: &App,
+) -> Option<WindowHandle<MultiWorkspace>> {
+    cx.windows()
+        .into_iter()
+        .filter_map(|window| window.downcast::<MultiWorkspace>())
+        .find(|window| {
+            window
+                .read(cx)
+                .map(|multi_workspace| multi_workspace.workspaces().contains(workspace))
+                .unwrap_or(false)
+        })
+}
+
+fn window_for_workspace_async(
+    workspace: &Entity<Workspace>,
+    cx: &mut AsyncApp,
+) -> Option<WindowHandle<MultiWorkspace>> {
+    let workspace = workspace.clone();
+    cx.update(|cx| window_for_workspace(&workspace, cx))
+}
+
+fn current_app_state(cx: &mut AsyncApp) -> Option<Arc<AppState>> {
+    cx.update(|cx| {
+        all_open_workspaces(cx)
+            .into_iter()
+            .next()
+            .map(|workspace| workspace.read(cx).app_state().clone())
+    })
+}
+
+fn release_in_flight_roots(roots: &[RootPlan], cx: &mut AsyncApp) {
+    cx.update_global::<ThreadArchiveCleanupCoordinator, _>(|coordinator, _cx| {
+        let mut in_flight_roots = coordinator.in_flight_roots.lock();
+        for root in roots {
+            in_flight_roots.remove(&root.root_path);
+        }
+    });
+}

crates/fs/src/fake_git_repo.rs 🔗

@@ -35,8 +35,16 @@ pub struct FakeGitRepository {
     pub(crate) is_trusted: Arc<AtomicBool>,
 }
 
+#[derive(Debug, Clone)]
+pub struct FakeCommitSnapshot {
+    pub head_contents: HashMap<RepoPath, String>,
+    pub index_contents: HashMap<RepoPath, String>,
+    pub sha: String,
+}
+
 #[derive(Debug, Clone)]
 pub struct FakeGitRepositoryState {
+    pub commit_history: Vec<FakeCommitSnapshot>,
     pub event_emitter: smol::channel::Sender<PathBuf>,
     pub unmerged_paths: HashMap<RepoPath, UnmergedStatus>,
     pub head_contents: HashMap<RepoPath, String>,
@@ -72,6 +80,7 @@ impl FakeGitRepositoryState {
             oids: Default::default(),
             remotes: HashMap::default(),
             graph_commits: Vec::new(),
+            commit_history: Vec::new(),
         }
     }
 }
@@ -214,11 +223,52 @@ impl GitRepository for FakeGitRepository {
 
     fn reset(
         &self,
-        _commit: String,
-        _mode: ResetMode,
+        commit: String,
+        mode: ResetMode,
         _env: Arc<HashMap<String, String>>,
     ) -> BoxFuture<'_, Result<()>> {
-        unimplemented!()
+        self.with_state_async(true, move |state| {
+            let pop_count = if commit == "HEAD~" {
+                1
+            } else if let Some(suffix) = commit.strip_prefix("HEAD~") {
+                suffix
+                    .parse::<usize>()
+                    .with_context(|| format!("Invalid HEAD~ offset: {commit}"))?
+            } else {
+                match state
+                    .commit_history
+                    .iter()
+                    .rposition(|entry| entry.sha == commit)
+                {
+                    Some(index) => state.commit_history.len() - index,
+                    None => anyhow::bail!("Unknown commit ref: {commit}"),
+                }
+            };
+
+            if pop_count == 0 || pop_count > state.commit_history.len() {
+                anyhow::bail!(
+                    "Cannot reset {pop_count} commit(s): only {} in history",
+                    state.commit_history.len()
+                );
+            }
+
+            let target_index = state.commit_history.len() - pop_count;
+            let snapshot = state.commit_history[target_index].clone();
+            state.commit_history.truncate(target_index);
+
+            match mode {
+                ResetMode::Soft => {
+                    state.head_contents = snapshot.head_contents;
+                }
+                ResetMode::Mixed => {
+                    state.head_contents = snapshot.head_contents;
+                    state.index_contents = state.head_contents.clone();
+                }
+            }
+
+            state.refs.insert("HEAD".into(), snapshot.sha);
+            Ok(())
+        })
     }
 
     fn checkout_files(
@@ -483,7 +533,7 @@ impl GitRepository for FakeGitRepository {
 
     fn create_worktree(
         &self,
-        branch_name: String,
+        branch_name: Option<String>,
         path: PathBuf,
         from_commit: Option<String>,
     ) -> BoxFuture<'_, Result<()>> {
@@ -498,8 +548,10 @@ impl GitRepository for FakeGitRepository {
                 if let Some(message) = &state.simulated_create_worktree_error {
                     anyhow::bail!("{message}");
                 }
-                if state.branches.contains(&branch_name) {
-                    bail!("a branch named '{}' already exists", branch_name);
+                if let Some(ref name) = branch_name {
+                    if state.branches.contains(name) {
+                        bail!("a branch named '{}' already exists", name);
+                    }
                 }
                 Ok(())
             })??;
@@ -508,13 +560,22 @@ impl GitRepository for FakeGitRepository {
             fs.create_dir(&path).await?;
 
             // Create .git/worktrees/<name>/ directory with HEAD, commondir, gitdir.
-            let ref_name = format!("refs/heads/{branch_name}");
-            let worktrees_entry_dir = common_dir_path.join("worktrees").join(&branch_name);
+            let worktree_entry_name = branch_name
+                .as_deref()
+                .unwrap_or_else(|| path.file_name().unwrap().to_str().unwrap());
+            let worktrees_entry_dir = common_dir_path.join("worktrees").join(worktree_entry_name);
             fs.create_dir(&worktrees_entry_dir).await?;
 
+            let sha = from_commit.unwrap_or_else(|| "fake-sha".to_string());
+            let head_content = if let Some(ref branch_name) = branch_name {
+                let ref_name = format!("refs/heads/{branch_name}");
+                format!("ref: {ref_name}")
+            } else {
+                sha.clone()
+            };
             fs.write_file_internal(
                 worktrees_entry_dir.join("HEAD"),
-                format!("ref: {ref_name}").into_bytes(),
+                head_content.into_bytes(),
                 false,
             )?;
             fs.write_file_internal(
@@ -537,10 +598,14 @@ impl GitRepository for FakeGitRepository {
             )?;
 
             // Update git state: add ref and branch.
-            let sha = from_commit.unwrap_or_else(|| "fake-sha".to_string());
             fs.with_git_state(&dot_git_path, true, move |state| {
-                state.refs.insert(ref_name, sha);
-                state.branches.insert(branch_name);
+                if let Some(branch_name) = branch_name {
+                    let ref_name = format!("refs/heads/{branch_name}");
+                    state.refs.insert(ref_name, sha);
+                    state.branches.insert(branch_name);
+                } else {
+                    state.refs.insert("HEAD".into(), sha);
+                }
                 Ok::<(), anyhow::Error>(())
             })??;
             Ok(())
@@ -815,11 +880,30 @@ impl GitRepository for FakeGitRepository {
         &self,
         _message: gpui::SharedString,
         _name_and_email: Option<(gpui::SharedString, gpui::SharedString)>,
-        _options: CommitOptions,
+        options: CommitOptions,
         _askpass: AskPassDelegate,
         _env: Arc<HashMap<String, String>>,
     ) -> BoxFuture<'_, Result<()>> {
-        async { Ok(()) }.boxed()
+        self.with_state_async(true, move |state| {
+            if !options.allow_empty && !options.amend && state.index_contents == state.head_contents
+            {
+                anyhow::bail!("nothing to commit (use allow_empty to create an empty commit)");
+            }
+
+            let old_sha = state.refs.get("HEAD").cloned().unwrap_or_default();
+            state.commit_history.push(FakeCommitSnapshot {
+                head_contents: state.head_contents.clone(),
+                index_contents: state.index_contents.clone(),
+                sha: old_sha,
+            });
+
+            state.head_contents = state.index_contents.clone();
+
+            let new_sha = format!("fake-commit-{}", state.commit_history.len());
+            state.refs.insert("HEAD".into(), new_sha);
+
+            Ok(())
+        })
     }
 
     fn run_hook(
@@ -1203,6 +1287,55 @@ impl GitRepository for FakeGitRepository {
         anyhow::bail!("commit_data_reader not supported for FakeGitRepository")
     }
 
+    fn update_ref(&self, ref_name: String, commit: String) -> BoxFuture<'_, Result<()>> {
+        self.with_state_async(true, move |state| {
+            state.refs.insert(ref_name, commit);
+            Ok(())
+        })
+    }
+
+    fn delete_ref(&self, ref_name: String) -> BoxFuture<'_, Result<()>> {
+        self.with_state_async(true, move |state| {
+            state.refs.remove(&ref_name);
+            Ok(())
+        })
+    }
+
+    fn stage_all_including_untracked(&self) -> BoxFuture<'_, Result<()>> {
+        let workdir_path = self.dot_git_path.parent().unwrap();
+        let git_files: Vec<(RepoPath, String)> = self
+            .fs
+            .files()
+            .iter()
+            .filter_map(|path| {
+                let repo_path = path.strip_prefix(workdir_path).ok()?;
+                if repo_path.starts_with(".git") {
+                    return None;
+                }
+                let content = self
+                    .fs
+                    .read_file_sync(path)
+                    .ok()
+                    .and_then(|bytes| String::from_utf8(bytes).ok())?;
+                let rel_path = RelPath::new(repo_path, PathStyle::local()).ok()?;
+                Some((RepoPath::from_rel_path(&rel_path), content))
+            })
+            .collect();
+
+        self.with_state_async(true, move |state| {
+            // Stage all filesystem contents, mirroring `git add -A`.
+            let fs_paths: HashSet<RepoPath> = git_files.iter().map(|(p, _)| p.clone()).collect();
+            for (path, content) in git_files {
+                state.index_contents.insert(path, content);
+            }
+            // Remove index entries for files that no longer exist on disk.
+            state
+                .index_contents
+                .retain(|path, _| fs_paths.contains(path));
+            Ok(())
+        })
+    }
+
     fn set_trusted(&self, trusted: bool) {
         self.is_trusted
             .store(trusted, std::sync::atomic::Ordering::Release);

crates/fs/tests/integration/fake_git_repo.rs 🔗

@@ -24,7 +24,7 @@ async fn test_fake_worktree_lifecycle(cx: &mut TestAppContext) {
     // Create a worktree
     let worktree_1_dir = worktrees_dir.join("feature-branch");
     repo.create_worktree(
-        "feature-branch".to_string(),
+        Some("feature-branch".to_string()),
         worktree_1_dir.clone(),
         Some("abc123".to_string()),
     )
@@ -47,9 +47,13 @@ async fn test_fake_worktree_lifecycle(cx: &mut TestAppContext) {
 
     // Create a second worktree (without explicit commit)
     let worktree_2_dir = worktrees_dir.join("bugfix-branch");
-    repo.create_worktree("bugfix-branch".to_string(), worktree_2_dir.clone(), None)
-        .await
-        .unwrap();
+    repo.create_worktree(
+        Some("bugfix-branch".to_string()),
+        worktree_2_dir.clone(),
+        None,
+    )
+    .await
+    .unwrap();
 
     let worktrees = repo.worktrees().await.unwrap();
     assert_eq!(worktrees.len(), 3);

crates/git/src/repository.rs 🔗

@@ -329,6 +329,7 @@ impl Upstream {
 pub struct CommitOptions {
     pub amend: bool,
     pub signoff: bool,
+    pub allow_empty: bool,
 }
 
 #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
@@ -715,7 +716,7 @@ pub trait GitRepository: Send + Sync {
 
     fn create_worktree(
         &self,
-        branch_name: String,
+        branch_name: Option<String>,
         path: PathBuf,
         from_commit: Option<String>,
     ) -> BoxFuture<'_, Result<()>>;
@@ -916,6 +917,12 @@ pub trait GitRepository: Send + Sync {
 
     fn commit_data_reader(&self) -> Result<CommitDataReader>;
 
+    fn update_ref(&self, ref_name: String, commit: String) -> BoxFuture<'_, Result<()>>;
+
+    fn delete_ref(&self, ref_name: String) -> BoxFuture<'_, Result<()>>;
+
+    fn stage_all_including_untracked(&self) -> BoxFuture<'_, Result<()>>;
+
     fn set_trusted(&self, trusted: bool);
     fn is_trusted(&self) -> bool;
 }
@@ -1660,19 +1667,20 @@ impl GitRepository for RealGitRepository {
 
     fn create_worktree(
         &self,
-        branch_name: String,
+        branch_name: Option<String>,
         path: PathBuf,
         from_commit: Option<String>,
     ) -> BoxFuture<'_, Result<()>> {
         let git_binary = self.git_binary();
-        let mut args = vec![
-            OsString::from("worktree"),
-            OsString::from("add"),
-            OsString::from("-b"),
-            OsString::from(branch_name.as_str()),
-            OsString::from("--"),
-            OsString::from(path.as_os_str()),
-        ];
+        let mut args = vec![OsString::from("worktree"), OsString::from("add")];
+        if let Some(branch_name) = &branch_name {
+            args.push(OsString::from("-b"));
+            args.push(OsString::from(branch_name.as_str()));
+        } else {
+            args.push(OsString::from("--detach"));
+        }
+        args.push(OsString::from("--"));
+        args.push(OsString::from(path.as_os_str()));
         if let Some(from_commit) = from_commit {
             args.push(OsString::from(from_commit));
         } else {
@@ -2165,6 +2173,10 @@ impl GitRepository for RealGitRepository {
                 cmd.arg("--signoff");
             }
 
+            if options.allow_empty {
+                cmd.arg("--allow-empty");
+            }
+
             if let Some((name, email)) = name_and_email {
                 cmd.arg("--author").arg(&format!("{name} <{email}>"));
             }
@@ -2176,6 +2188,50 @@ impl GitRepository for RealGitRepository {
         .boxed()
     }
 
+    fn update_ref(&self, ref_name: String, commit: String) -> BoxFuture<'_, Result<()>> {
+        let git_binary = self.git_binary();
+        self.executor
+            .spawn(async move {
+                let args: Vec<OsString> = vec![
+                    "--no-optional-locks".into(),
+                    "update-ref".into(),
+                    ref_name.into(),
+                    commit.into(),
+                ];
+                git_binary?.run(&args).await?;
+                Ok(())
+            })
+            .boxed()
+    }
+
+    fn delete_ref(&self, ref_name: String) -> BoxFuture<'_, Result<()>> {
+        let git_binary = self.git_binary();
+        self.executor
+            .spawn(async move {
+                let args: Vec<OsString> = vec![
+                    "--no-optional-locks".into(),
+                    "update-ref".into(),
+                    "-d".into(),
+                    ref_name.into(),
+                ];
+                git_binary?.run(&args).await?;
+                Ok(())
+            })
+            .boxed()
+    }
+
+    fn stage_all_including_untracked(&self) -> BoxFuture<'_, Result<()>> {
+        let git_binary = self.git_binary();
+        self.executor
+            .spawn(async move {
+                let args: Vec<OsString> =
+                    vec!["--no-optional-locks".into(), "add".into(), "-A".into()];
+                git_binary?.run(&args).await?;
+                Ok(())
+            })
+            .boxed()
+    }
+
     fn push(
         &self,
         branch_name: String,
@@ -4009,7 +4065,7 @@ mod tests {
 
         // Create a new worktree
         repo.create_worktree(
-            "test-branch".to_string(),
+            Some("test-branch".to_string()),
             worktree_path.clone(),
             Some("HEAD".to_string()),
         )
@@ -4068,7 +4124,7 @@ mod tests {
         // Create a worktree
         let worktree_path = worktrees_dir.join("worktree-to-remove");
         repo.create_worktree(
-            "to-remove".to_string(),
+            Some("to-remove".to_string()),
             worktree_path.clone(),
             Some("HEAD".to_string()),
         )
@@ -4092,7 +4148,7 @@ mod tests {
         // Create a worktree
         let worktree_path = worktrees_dir.join("dirty-wt");
         repo.create_worktree(
-            "dirty-wt".to_string(),
+            Some("dirty-wt".to_string()),
             worktree_path.clone(),
             Some("HEAD".to_string()),
         )
@@ -4162,7 +4218,7 @@ mod tests {
         // Create a worktree
         let old_path = worktrees_dir.join("old-worktree-name");
         repo.create_worktree(
-            "old-name".to_string(),
+            Some("old-name".to_string()),
             old_path.clone(),
             Some("HEAD".to_string()),
         )

crates/git_ui/src/commit_modal.rs 🔗

@@ -453,6 +453,7 @@ impl CommitModal {
                                     CommitOptions {
                                         amend: is_amend_pending,
                                         signoff: is_signoff_enabled,
+                                        allow_empty: false,
                                     },
                                     window,
                                     cx,

crates/git_ui/src/git_panel.rs 🔗

@@ -2155,6 +2155,7 @@ impl GitPanel {
                 CommitOptions {
                     amend: false,
                     signoff: self.signoff_enabled,
+                    allow_empty: false,
                 },
                 window,
                 cx,
@@ -2195,6 +2196,7 @@ impl GitPanel {
                         CommitOptions {
                             amend: true,
                             signoff: self.signoff_enabled,
+                            allow_empty: false,
                         },
                         window,
                         cx,
@@ -4454,7 +4456,11 @@ impl GitPanel {
                         git_panel
                             .update(cx, |git_panel, cx| {
                                 git_panel.commit_changes(
-                                    CommitOptions { amend, signoff },
+                                    CommitOptions {
+                                        amend,
+                                        signoff,
+                                        allow_empty: false,
+                                    },
                                     window,
                                     cx,
                                 );

crates/project/src/git_store.rs 🔗

@@ -2338,6 +2338,7 @@ impl GitStore {
                     CommitOptions {
                         amend: options.amend,
                         signoff: options.signoff,
+                        allow_empty: options.allow_empty,
                     },
                     askpass,
                     cx,
@@ -5484,6 +5485,7 @@ impl Repository {
                             options: Some(proto::commit::CommitOptions {
                                 amend: options.amend,
                                 signoff: options.signoff,
+                                allow_empty: options.allow_empty,
                             }),
                             askpass_id,
                         })
@@ -5977,7 +5979,9 @@ impl Repository {
             move |repo, _cx| async move {
                 match repo {
                     RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
-                        backend.create_worktree(branch_name, path, commit).await
+                        backend
+                            .create_worktree(Some(branch_name), path, commit)
+                            .await
                     }
                     RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
                         client
@@ -5997,6 +6001,86 @@ impl Repository {
         )
     }
 
+    pub fn create_worktree_detached(
+        &mut self,
+        path: PathBuf,
+        commit: String,
+    ) -> oneshot::Receiver<Result<()>> {
+        self.send_job(
+            Some("git worktree add (detached)".into()),
+            move |repo, _cx| async move {
+                match repo {
+                    RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
+                        backend.create_worktree(None, path, Some(commit)).await
+                    }
+                    RepositoryState::Remote(_) => {
+                        anyhow::bail!(
+                            "create_worktree_detached is not supported for remote repositories"
+                        )
+                    }
+                }
+            },
+        )
+    }
+
+    pub fn head_sha(&mut self) -> oneshot::Receiver<Result<Option<String>>> {
+        self.send_job(None, move |repo, _cx| async move {
+            match repo {
+                RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
+                    Ok(backend.head_sha().await)
+                }
+                RepositoryState::Remote(_) => {
+                    anyhow::bail!("head_sha is not supported for remote repositories")
+                }
+            }
+        })
+    }
+
+    pub fn update_ref(
+        &mut self,
+        ref_name: String,
+        commit: String,
+    ) -> oneshot::Receiver<Result<()>> {
+        self.send_job(None, move |repo, _cx| async move {
+            match repo {
+                RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
+                    backend.update_ref(ref_name, commit).await
+                }
+                RepositoryState::Remote(_) => {
+                    anyhow::bail!("update_ref is not supported for remote repositories")
+                }
+            }
+        })
+    }
+
+    pub fn delete_ref(&mut self, ref_name: String) -> oneshot::Receiver<Result<()>> {
+        self.send_job(None, move |repo, _cx| async move {
+            match repo {
+                RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
+                    backend.delete_ref(ref_name).await
+                }
+                RepositoryState::Remote(_) => {
+                    anyhow::bail!("delete_ref is not supported for remote repositories")
+                }
+            }
+        })
+    }
+
+    pub fn stage_all_including_untracked(&mut self) -> oneshot::Receiver<Result<()>> {
+        self.send_job(None, move |repo, _cx| async move {
+            match repo {
+                RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
+                    backend.stage_all_including_untracked().await
+                }
+                RepositoryState::Remote(_) => {
+                    anyhow::bail!(
+                        "stage_all_including_untracked is not supported for remote repositories"
+                    )
+                }
+            }
+        })
+    }
+
     pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
         let id = self.id;
         self.send_job(

crates/project/src/project.rs 🔗

@@ -4757,6 +4757,33 @@ impl Project {
         })
     }
 
+    pub fn wait_for_worktree_release(
+        &mut self,
+        worktree_id: WorktreeId,
+        cx: &mut Context<Self>,
+    ) -> Task<Result<()>> {
+        let Some(worktree) = self.worktree_for_id(worktree_id, cx) else {
+            return Task::ready(Ok(()));
+        };
+
+        let (released_tx, released_rx) = futures::channel::oneshot::channel();
+        let released_tx = std::sync::Arc::new(Mutex::new(Some(released_tx)));
+        let release_subscription =
+            cx.observe_release(&worktree, move |_project, _released_worktree, _cx| {
+                if let Some(released_tx) = released_tx.lock().take() {
+                    let _ = released_tx.send(());
+                }
+            });
+
+        cx.spawn(async move |_project, _cx| {
+            let _release_subscription = release_subscription;
+            released_rx
+                .await
+                .map_err(|_| anyhow!("worktree release observer dropped before release"))?;
+            Ok(())
+        })
+    }
+
     pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut Context<Self>) {
         self.worktree_store.update(cx, |worktree_store, cx| {
             worktree_store.remove_worktree(id_to_remove, cx);
@@ -6089,7 +6116,6 @@ impl ProjectGroupKey {
         self.host.clone()
     }
 }
-
 pub struct PathMatchCandidateSet {
     pub snapshot: Snapshot,
     pub include_ignored: bool,

crates/sidebar/Cargo.toml 🔗

@@ -28,6 +28,7 @@ feature_flags.workspace = true
 fs.workspace = true
 git.workspace = true
 gpui.workspace = true
+log.workspace = true
 menu.workspace = true
 platform_title_bar.workspace = true
 project.workspace = true

crates/sidebar/src/sidebar.rs 🔗

@@ -5,6 +5,7 @@ use action_log::DiffStats;
 use agent_client_protocol::{self as acp};
 use agent_settings::AgentSettings;
 use agent_ui::thread_metadata_store::{ThreadMetadata, ThreadMetadataStore};
+use agent_ui::thread_worktree_archive;
 use agent_ui::threads_archive_view::{
     ThreadsArchiveView, ThreadsArchiveViewEvent, format_history_entry_timestamp,
 };
@@ -16,8 +17,8 @@ use chrono::{DateTime, Utc};
 use editor::Editor;
 use feature_flags::{AgentV2FeatureFlag, FeatureFlagViewExt as _};
 use gpui::{
-    Action as _, AnyElement, App, Context, Entity, FocusHandle, Focusable, KeyContext, ListState,
-    Pixels, Render, SharedString, WeakEntity, Window, WindowHandle, linear_color_stop,
+    Action as _, AnyElement, App, ClickEvent, Context, Entity, FocusHandle, Focusable, KeyContext,
+    ListState, Pixels, Render, SharedString, WeakEntity, Window, WindowHandle, linear_color_stop,
     linear_gradient, list, prelude::*, px,
 };
 use menu::{
@@ -34,6 +35,7 @@ use serde::{Deserialize, Serialize};
 use settings::Settings as _;
 use std::collections::{HashMap, HashSet};
 use std::mem;
+use std::path::PathBuf;
 use std::rc::Rc;
 use theme::ActiveTheme;
 use ui::{
@@ -45,8 +47,8 @@ use util::ResultExt as _;
 use util::path_list::{PathList, SerializedPathList};
 use workspace::{
     AddFolderToProject, CloseWindow, FocusWorkspaceSidebar, MultiWorkspace, MultiWorkspaceEvent,
-    Open, Sidebar as WorkspaceSidebar, SidebarSide, ToggleWorkspaceSidebar, Workspace, WorkspaceId,
-    sidebar_side_context_menu,
+    Open, Sidebar as WorkspaceSidebar, SidebarSide, Toast, ToggleWorkspaceSidebar, Workspace,
+    WorkspaceId, notifications::NotificationId, sidebar_side_context_menu,
 };
 
 use zed_actions::OpenRecent;
@@ -2182,33 +2184,127 @@ impl Sidebar {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
-        ThreadMetadataStore::global(cx)
-            .update(cx, |store, cx| store.unarchive(&metadata.session_id, cx));
+        let session_id = metadata.session_id.clone();
 
-        if !metadata.folder_paths.paths().is_empty() {
-            let path_list = metadata.folder_paths.clone();
-            if let Some(workspace) = self.find_current_workspace_for_path_list(&path_list, cx) {
+        ThreadMetadataStore::global(cx).update(cx, |store, cx| store.unarchive(&session_id, cx));
+
+        if metadata.folder_paths.paths().is_empty() {
+            let active_workspace = self.multi_workspace.upgrade().and_then(|w| {
+                w.read(cx)
+                    .workspaces()
+                    .get(w.read(cx).active_workspace_index())
+                    .cloned()
+            });
+
+            if let Some(workspace) = active_workspace {
                 self.activate_thread_locally(&metadata, &workspace, window, cx);
-            } else if let Some((target_window, workspace)) =
-                self.find_open_workspace_for_path_list(&path_list, cx)
-            {
-                self.activate_thread_in_other_window(metadata, workspace, target_window, cx);
-            } else {
-                self.open_workspace_and_activate_thread(metadata, path_list, window, cx);
             }
             return;
         }
 
-        let active_workspace = self.multi_workspace.upgrade().and_then(|w| {
-            w.read(cx)
-                .workspaces()
-                .get(w.read(cx).active_workspace_index())
-                .cloned()
-        });
+        let store = ThreadMetadataStore::global(cx);
+        let task = store
+            .read(cx)
+            .get_archived_worktrees_for_thread(&session_id.0, cx);
+        let path_list = metadata.folder_paths.clone();
 
-        if let Some(workspace) = active_workspace {
-            self.activate_thread_locally(&metadata, &workspace, window, cx);
-        }
+        cx.spawn_in(window, async move |this, cx| {
+            let archived_worktrees = task.await?;
+
+            if archived_worktrees.is_empty() {
+                this.update_in(cx, |this, window, cx| {
+                    if let Some(workspace) =
+                        this.find_current_workspace_for_path_list(&path_list, cx)
+                    {
+                        this.activate_thread_locally(&metadata, &workspace, window, cx);
+                    } else if let Some((target_window, workspace)) =
+                        this.find_open_workspace_for_path_list(&path_list, cx)
+                    {
+                        this.activate_thread_in_other_window(
+                            metadata,
+                            workspace,
+                            target_window,
+                            cx,
+                        );
+                    } else {
+                        this.open_workspace_and_activate_thread(metadata, path_list, window, cx);
+                    }
+                })?;
+                return anyhow::Ok(());
+            }
+
+            let first = &archived_worktrees[0];
+            let main_repo_path = first.main_repo_path.clone();
+
+            cx.update(|_window, cx| {
+                store.update(cx, |store, cx| {
+                    store.set_pending_worktree_restore(&session_id, Some(main_repo_path), cx);
+                });
+            })?;
+
+            let mut path_replacements: Vec<(PathBuf, PathBuf)> = Vec::new();
+            for row in &archived_worktrees {
+                match thread_worktree_archive::restore_worktree_via_git(row, &mut *cx).await {
+                    Ok(restored_path) => {
+                        thread_worktree_archive::cleanup_archived_worktree_record(row, &mut *cx)
+                            .await;
+                        path_replacements.push((row.worktree_path.clone(), restored_path));
+                    }
+                    Err(error) => {
+                        log::error!("Failed to restore worktree: {error:#}");
+                        cx.update(|_window, cx| {
+                            store.update(cx, |store, cx| {
+                                store.set_pending_worktree_restore(&session_id, None, cx);
+                            });
+                        })?;
+                        this.update_in(cx, |this, _window, cx| {
+                            if let Some(multi_workspace) = this.multi_workspace.upgrade() {
+                                let workspace = multi_workspace.read(cx).workspace().clone();
+                                workspace.update(cx, |workspace, cx| {
+                                    struct RestoreWorktreeErrorToast;
+                                    workspace.show_toast(
+                                        Toast::new(
+                                            NotificationId::unique::<RestoreWorktreeErrorToast>(),
+                                            format!("Failed to restore worktree: {error:#}"),
+                                        )
+                                        .autohide(),
+                                        cx,
+                                    );
+                                });
+                            }
+                        })
+                        .ok();
+                        return anyhow::Ok(());
+                    }
+                }
+            }
+
+            if !path_replacements.is_empty() {
+                cx.update(|_window, cx| {
+                    store.update(cx, |store, cx| {
+                        store.complete_worktree_restore(&session_id, &path_replacements, cx);
+                    });
+                })?;
+
+                let updated_metadata =
+                    cx.update(|_window, cx| store.read(cx).entry(&session_id).cloned())?;
+
+                if let Some(updated_metadata) = updated_metadata {
+                    let new_paths = updated_metadata.folder_paths.clone();
+                    this.update_in(cx, |this, window, cx| {
+                        this.open_workspace_and_activate_thread(
+                            updated_metadata,
+                            new_paths,
+                            window,
+                            cx,
+                        );
+                    })?;
+                }
+            }
+
+            anyhow::Ok(())
+        })
+        .detach_and_log_err(cx);
     }
 
     fn expand_selected_entry(
@@ -2357,7 +2453,17 @@ impl Sidebar {
         window: &mut Window,
         cx: &mut Context<Self>,
     ) {
-        ThreadMetadataStore::global(cx).update(cx, |store, cx| store.archive(session_id, cx));
+        let current_workspace = self.active_entry_workspace().cloned();
+        let Some(multi_workspace_handle) = window.window_handle().downcast::<MultiWorkspace>()
+        else {
+            return;
+        };
+        thread_worktree_archive::archive_thread(
+            session_id,
+            current_workspace,
+            multi_workspace_handle,
+            cx,
+        );
 
         // If we're archiving the currently focused thread, move focus to the
         // nearest thread within the same project group. We never cross group
@@ -2821,6 +2927,18 @@ impl Sidebar {
                     })
                     .collect(),
             )
+            .pending_worktree_restore(thread.metadata.pending_worktree_restore.is_some())
+            .when(thread.metadata.pending_worktree_restore.is_some(), |this| {
+                let session_id = thread.metadata.session_id.clone();
+                this.on_cancel_restore(cx.listener(
+                    move |_this, _event: &ClickEvent, _window, cx| {
+                        let store = ThreadMetadataStore::global(cx);
+                        store.update(cx, |store, cx| {
+                            store.set_pending_worktree_restore(&session_id, None, cx);
+                        });
+                    },
+                ))
+            })
             .timestamp(timestamp)
             .highlight_positions(thread.highlight_positions.to_vec())
             .title_generating(thread.is_title_generating)

crates/sidebar/src/sidebar_tests.rs 🔗

@@ -166,6 +166,7 @@ fn save_thread_metadata(
             folder_paths,
             main_worktree_paths,
             archived: false,
+            pending_worktree_restore: None,
         };
         ThreadMetadataStore::global(cx).update(cx, |store, cx| store.save_manually(metadata, cx));
     });
@@ -736,6 +737,7 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) {
                     updated_at: Utc::now(),
                     created_at: Some(Utc::now()),
                     archived: false,
+                    pending_worktree_restore: None,
                 },
                 icon: IconName::ZedAgent,
                 icon_from_external_svg: None,
@@ -759,6 +761,7 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) {
                     updated_at: Utc::now(),
                     created_at: Some(Utc::now()),
                     archived: false,
+                    pending_worktree_restore: None,
                 },
                 icon: IconName::ZedAgent,
                 icon_from_external_svg: None,
@@ -782,6 +785,7 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) {
                     updated_at: Utc::now(),
                     created_at: Some(Utc::now()),
                     archived: false,
+                    pending_worktree_restore: None,
                 },
                 icon: IconName::ZedAgent,
                 icon_from_external_svg: None,
@@ -805,6 +809,7 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) {
                     updated_at: Utc::now(),
                     created_at: Some(Utc::now()),
                     archived: false,
+                    pending_worktree_restore: None,
                 },
                 icon: IconName::ZedAgent,
                 icon_from_external_svg: None,
@@ -828,6 +833,7 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) {
                     updated_at: Utc::now(),
                     created_at: Some(Utc::now()),
                     archived: false,
+                    pending_worktree_restore: None,
                 },
                 icon: IconName::ZedAgent,
                 icon_from_external_svg: None,
@@ -2059,6 +2065,7 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) {
                 folder_paths: PathList::default(),
                 main_worktree_paths: PathList::default(),
                 archived: false,
+                pending_worktree_restore: None,
             },
             &workspace_a,
             window,
@@ -2114,6 +2121,7 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) {
                 folder_paths: PathList::default(),
                 main_worktree_paths: PathList::default(),
                 archived: false,
+                pending_worktree_restore: None,
             },
             &workspace_b,
             window,
@@ -3554,6 +3562,7 @@ async fn test_activate_archived_thread_with_saved_paths_activates_matching_works
                 folder_paths: PathList::new(&[PathBuf::from("/project-b")]),
                 main_worktree_paths: PathList::default(),
                 archived: false,
+                pending_worktree_restore: None,
             },
             window,
             cx,
@@ -3617,6 +3626,7 @@ async fn test_activate_archived_thread_cwd_fallback_with_matching_workspace(
                 folder_paths: PathList::new(&[std::path::PathBuf::from("/project-b")]),
                 main_worktree_paths: PathList::default(),
                 archived: false,
+                pending_worktree_restore: None,
             },
             window,
             cx,
@@ -3680,6 +3690,7 @@ async fn test_activate_archived_thread_no_paths_no_cwd_uses_active_workspace(
                 folder_paths: PathList::default(),
                 main_worktree_paths: PathList::default(),
                 archived: false,
+                pending_worktree_restore: None,
             },
             window,
             cx,
@@ -3735,6 +3746,7 @@ async fn test_activate_archived_thread_saved_paths_opens_new_workspace(cx: &mut
                 folder_paths: path_list_b,
                 main_worktree_paths: PathList::default(),
                 archived: false,
+                pending_worktree_restore: None,
             },
             window,
             cx,
@@ -3785,6 +3797,7 @@ async fn test_activate_archived_thread_reuses_workspace_in_another_window(cx: &m
                 folder_paths: PathList::new(&[PathBuf::from("/project-b")]),
                 main_worktree_paths: PathList::default(),
                 archived: false,
+                pending_worktree_restore: None,
             },
             window,
             cx,
@@ -3862,6 +3875,7 @@ async fn test_activate_archived_thread_reuses_workspace_in_another_window_with_t
                 folder_paths: PathList::new(&[PathBuf::from("/project-b")]),
                 main_worktree_paths: PathList::default(),
                 archived: false,
+                pending_worktree_restore: None,
             },
             window,
             cx,
@@ -3938,6 +3952,7 @@ async fn test_activate_archived_thread_prefers_current_window_for_matching_paths
                 folder_paths: PathList::new(&[PathBuf::from("/project-a")]),
                 main_worktree_paths: PathList::default(),
                 archived: false,
+                pending_worktree_restore: None,
             },
             window,
             cx,
@@ -4925,6 +4940,7 @@ mod property_test {
             folder_paths: path_list,
             main_worktree_paths,
             archived: false,
+            pending_worktree_restore: None,
         };
         cx.update(|_, cx| {
             ThreadMetadataStore::global(cx)

crates/ui/src/components/ai/thread_item.rs 🔗

@@ -46,6 +46,8 @@ pub struct ThreadItem {
     project_paths: Option<Arc<[PathBuf]>>,
     project_name: Option<SharedString>,
     worktrees: Vec<ThreadItemWorktreeInfo>,
+    pending_worktree_restore: bool,
+    on_cancel_restore: Option<Box<dyn Fn(&ClickEvent, &mut Window, &mut App) + 'static>>,
     on_click: Option<Box<dyn Fn(&ClickEvent, &mut Window, &mut App) + 'static>>,
     on_hover: Box<dyn Fn(&bool, &mut Window, &mut App) + 'static>,
     action_slot: Option<AnyElement>,
@@ -78,6 +80,8 @@ impl ThreadItem {
             project_paths: None,
             project_name: None,
             worktrees: Vec::new(),
+            pending_worktree_restore: false,
+            on_cancel_restore: None,
             on_click: None,
             on_hover: Box::new(|_, _, _| {}),
             action_slot: None,
@@ -171,6 +175,19 @@ impl ThreadItem {
         self
     }
 
+    pub fn pending_worktree_restore(mut self, pending: bool) -> Self {
+        self.pending_worktree_restore = pending;
+        self
+    }
+
+    pub fn on_cancel_restore(
+        mut self,
+        handler: impl Fn(&ClickEvent, &mut Window, &mut App) + 'static,
+    ) -> Self {
+        self.on_cancel_restore = Some(Box::new(handler));
+        self
+    }
+
     pub fn hovered(mut self, hovered: bool) -> Self {
         self.hovered = hovered;
         self
@@ -211,7 +228,7 @@ impl ThreadItem {
 }
 
 impl RenderOnce for ThreadItem {
-    fn render(self, _: &mut Window, cx: &mut App) -> impl IntoElement {
+    fn render(mut self, _: &mut Window, cx: &mut App) -> impl IntoElement {
         let color = cx.theme().colors();
         let sidebar_base_bg = color
             .title_bar_background
@@ -359,7 +376,7 @@ impl RenderOnce for ThreadItem {
 
         let has_project_name = self.project_name.is_some();
         let has_project_paths = project_paths.is_some();
-        let has_worktree = !self.worktrees.is_empty();
+        let has_worktree = !self.worktrees.is_empty() || self.pending_worktree_restore;
         let has_timestamp = !self.timestamp.is_empty();
         let timestamp = self.timestamp;
 
@@ -488,6 +505,44 @@ impl RenderOnce for ThreadItem {
                         );
                     }
 
+                    if self.pending_worktree_restore {
+                        let on_cancel = self.on_cancel_restore.take();
+                        let restore_element = h_flex()
+                            .id(format!("{}-worktree-restore", self.id.clone()))
+                            .gap_1()
+                            .child(
+                                Icon::new(IconName::LoadCircle)
+                                    .size(IconSize::XSmall)
+                                    .color(Color::Muted)
+                                    .with_rotate_animation(2),
+                            )
+                            .child(
+                                Label::new("Restoring worktree\u{2026}")
+                                    .size(LabelSize::Small)
+                                    .color(Color::Muted),
+                            )
+                            .when_some(on_cancel, |this, on_cancel| {
+                                this.child(
+                                    IconButton::new(
+                                        format!("{}-cancel-restore", self.id.clone()),
+                                        IconName::Close,
+                                    )
+                                    .icon_size(IconSize::XSmall)
+                                    .icon_color(Color::Muted)
+                                    .tooltip(Tooltip::text("Cancel Restore"))
+                                    .on_click(
+                                        move |event, window, cx| {
+                                            cx.stop_propagation();
+                                            on_cancel(event, window, cx);
+                                        },
+                                    ),
+                                )
+                            })
+                            .tooltip(Tooltip::text("Restoring the Git worktree for this thread"))
+                            .into_any_element();
+                        worktree_labels.push(restore_element);
+                    }
+
                     this.child(
                         h_flex()
                             .min_w_0()

crates/workspace/src/multi_workspace.rs 🔗

@@ -309,6 +309,10 @@ impl MultiWorkspace {
         self.sidebar.as_deref()
     }
 
+    pub fn window_id(&self) -> WindowId {
+        self.window_id
+    }
+
     pub fn set_sidebar_overlay(&mut self, overlay: Option<AnyView>, cx: &mut Context<Self>) {
         self.sidebar_overlay = overlay;
         cx.notify();

crates/workspace/src/workspace.rs 🔗

@@ -3300,6 +3300,14 @@ impl Workspace {
         state.task.clone().unwrap()
     }
 
+    pub fn save_for_root_removal(
+        &mut self,
+        window: &mut Window,
+        cx: &mut Context<Self>,
+    ) -> Task<Result<bool>> {
+        self.save_all_internal(SaveIntent::Close, window, cx)
+    }
+
     fn save_all_internal(
         &mut self,
         mut save_intent: SaveIntent,
@@ -8715,6 +8723,22 @@ pub async fn restore_multiworkspace(
             .ok();
     }
 
+    if !state.project_group_keys.is_empty() {
+        window_handle
+            .update(cx, |multi_workspace, _window, _cx| {
+                for serialized_key in &state.project_group_keys {
+                    let paths = PathList::deserialize(&serialized_key.path_list);
+                    let host = match &serialized_key.location {
+                        SerializedWorkspaceLocation::Local => None,
+                        SerializedWorkspaceLocation::Remote(opts) => Some(opts.clone()),
+                    };
+                    let key = ProjectGroupKey::new(host, paths);
+                    multi_workspace.add_project_group_key(key);
+                }
+            })
+            .ok();
+    }
+
     if state.sidebar_open {
         window_handle
             .update(cx, |multi_workspace, _, cx| {