From c60d31a726b014e1780f294a93208f4d3e6c60b3 Mon Sep 17 00:00:00 2001 From: Mayank Verma Date: Mon, 10 Nov 2025 07:54:20 +0530 Subject: [PATCH] git: Track worktree references to resolve stale repository state (#41592) Closes #35997 Closes #38018 Closes #41516 Release Notes: - Fixes stale git repositories persisting after removal --- crates/project/src/git_store.rs | 59 ++++++++++++++- crates/project/src/project_tests.rs | 113 ++++++++++++++++++++++++++++ 2 files changed, 170 insertions(+), 2 deletions(-) diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 3ea02d9e49a5edbd951a46a55eed8f48953c12b7..8767e5e2931288519d78739f67292728b6d5b77d 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -55,9 +55,10 @@ use rpc::{ proto::{self, git_reset, split_repository_update}, }; use serde::Deserialize; +use settings::WorktreeId; use std::{ cmp::Ordering, - collections::{BTreeSet, VecDeque}, + collections::{BTreeSet, HashSet, VecDeque}, future::Future, mem, ops::Range, @@ -89,6 +90,7 @@ pub struct GitStore { buffer_store: Entity, worktree_store: Entity, repositories: HashMap>, + worktree_ids: HashMap>, active_repo_id: Option, #[allow(clippy::type_complexity)] loading_diffs: @@ -409,6 +411,7 @@ impl GitStore { buffer_store, worktree_store, repositories: HashMap::default(), + worktree_ids: HashMap::default(), active_repo_id: None, _subscriptions, loading_diffs: HashMap::default(), @@ -1167,6 +1170,7 @@ impl GitStore { return; } self.update_repositories_from_worktree( + *worktree_id, project_environment.clone(), next_repository_id.clone(), downstream @@ -1178,6 +1182,45 @@ impl GitStore { ); self.local_worktree_git_repos_changed(worktree, changed_repos, cx); } + WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => { + let repos_without_worktree: Vec = self + .worktree_ids + .iter_mut() + .filter_map(|(repo_id, worktree_ids)| { + worktree_ids.remove(worktree_id); + if worktree_ids.is_empty() { + Some(*repo_id) + } else { + None + } + }) + .collect(); + let is_active_repo_removed = repos_without_worktree + .iter() + .any(|repo_id| self.active_repo_id == Some(*repo_id)); + + for repo_id in repos_without_worktree { + self.repositories.remove(&repo_id); + self.worktree_ids.remove(&repo_id); + if let Some(updates_tx) = + downstream.as_ref().map(|downstream| &downstream.updates_tx) + { + updates_tx + .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id)) + .ok(); + } + } + + if is_active_repo_removed { + if let Some((&repo_id, _)) = self.repositories.iter().next() { + self.active_repo_id = Some(repo_id); + cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id))); + } else { + self.active_repo_id = None; + cx.emit(GitStoreEvent::ActiveRepositoryChanged(None)); + } + } + } _ => {} } } @@ -1228,6 +1271,7 @@ impl GitStore { /// Update our list of repositories and schedule git scans in response to a notification from a worktree, fn update_repositories_from_worktree( &mut self, + worktree_id: WorktreeId, project_environment: Entity, next_repository_id: Arc, updates_tx: Option>, @@ -1245,15 +1289,25 @@ impl GitStore { || Some(&existing_work_directory_abs_path) == update.new_work_directory_abs_path.as_ref() }) { + let repo_id = *id; if let Some(new_work_directory_abs_path) = update.new_work_directory_abs_path.clone() { + self.worktree_ids + .entry(repo_id) + .or_insert_with(HashSet::new) + .insert(worktree_id); existing.update(cx, |existing, cx| { existing.snapshot.work_directory_abs_path = new_work_directory_abs_path; existing.schedule_scan(updates_tx.clone(), cx); }); } else { - removed_ids.push(*id); + if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) { + worktree_ids.remove(&worktree_id); + if worktree_ids.is_empty() { + removed_ids.push(repo_id); + } + } } } else if let UpdatedGitRepository { new_work_directory_abs_path: Some(work_directory_abs_path), @@ -1291,6 +1345,7 @@ impl GitStore { self._subscriptions .push(cx.subscribe(&repo, Self::on_jobs_updated)); self.repositories.insert(id, repo); + self.worktree_ids.insert(id, HashSet::from([worktree_id])); cx.emit(GitStoreEvent::RepositoryAdded); self.active_repo_id.get_or_insert_with(|| { cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id))); diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 65a7803d64a122c4f9a3774a09a47ec91455da5f..1f76b905be2843605b32918e6d3bf1a037ced636 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -10477,3 +10477,116 @@ async fn test_find_project_path_abs( ); }); } + +#[gpui::test] +async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/root"), + json!({ + "a": { + ".git": {}, + "src": { + "main.rs": "fn main() {}", + } + }, + "b": { + ".git": {}, + "src": { + "main.rs": "fn main() {}", + }, + "script": { + "run.sh": "#!/bin/bash" + } + } + }), + ) + .await; + + let project = Project::test( + fs.clone(), + [ + path!("/root/a").as_ref(), + path!("/root/b/script").as_ref(), + path!("/root/b").as_ref(), + ], + cx, + ) + .await; + let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx)); + scan_complete.await; + + let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::>()); + assert_eq!(worktrees.len(), 3); + + let worktree_id_by_abs_path = worktrees + .into_iter() + .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id()))) + .collect::>(); + let worktree_id = worktree_id_by_abs_path + .get(Path::new(path!("/root/b/script"))) + .unwrap(); + + let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone()); + assert_eq!(repos.len(), 2); + + project.update(cx, |project, cx| { + project.remove_worktree(*worktree_id, cx); + }); + cx.run_until_parked(); + + let mut repo_paths = project + .update(cx, |p, cx| p.git_store().read(cx).repositories().clone()) + .values() + .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone())) + .collect::>(); + repo_paths.sort(); + + pretty_assertions::assert_eq!( + repo_paths, + [ + Path::new(path!("/root/a")).into(), + Path::new(path!("/root/b")).into(), + ] + ); + + let active_repo_path = project + .read_with(cx, |p, cx| { + p.active_repository(cx) + .map(|r| r.read(cx).work_directory_abs_path.clone()) + }) + .unwrap(); + assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a"))); + + let worktree_id = worktree_id_by_abs_path + .get(Path::new(path!("/root/a"))) + .unwrap(); + project.update(cx, |project, cx| { + project.remove_worktree(*worktree_id, cx); + }); + cx.run_until_parked(); + + let active_repo_path = project + .read_with(cx, |p, cx| { + p.active_repository(cx) + .map(|r| r.read(cx).work_directory_abs_path.clone()) + }) + .unwrap(); + assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b"))); + + let worktree_id = worktree_id_by_abs_path + .get(Path::new(path!("/root/b"))) + .unwrap(); + project.update(cx, |project, cx| { + project.remove_worktree(*worktree_id, cx); + }); + cx.run_until_parked(); + + let active_repo_path = project.read_with(cx, |p, cx| { + p.active_repository(cx) + .map(|r| r.read(cx).work_directory_abs_path.clone()) + }); + assert!(active_repo_path.is_none()); +}