diff --git a/Cargo.lock b/Cargo.lock index 28d4d4c0b43f9efe83e6c48cf523d9bd41f4f4d7..a3ece33b1495aef7a38f96619945284ab663b8cf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -396,6 +396,7 @@ dependencies = [ "streaming_diff", "task", "telemetry", + "tempfile", "terminal", "terminal_view", "text", diff --git a/crates/agent_ui/Cargo.toml b/crates/agent_ui/Cargo.toml index 78f035106d37faa16a1494a138dfa38ed304dd8d..add2415b4cde0d8cf623e91bff8f2dd5463527ea 100644 --- a/crates/agent_ui/Cargo.toml +++ b/crates/agent_ui/Cargo.toml @@ -137,6 +137,6 @@ remote_server = { workspace = true, features = ["test-support"] } semver.workspace = true reqwest_client.workspace = true - +tempfile.workspace = true tree-sitter-md.workspace = true unindent.workspace = true diff --git a/crates/agent_ui/src/thread_worktree_archive.rs b/crates/agent_ui/src/thread_worktree_archive.rs index 53c0aadb26485b35b8c64d4714b70992ea6c7269..758171f1a88d173c07646283da1102cd1798b2d6 100644 --- a/crates/agent_ui/src/thread_worktree_archive.rs +++ b/crates/agent_ui/src/thread_worktree_archive.rs @@ -7,8 +7,10 @@ use anyhow::{Context as _, Result, anyhow}; use gpui::{App, AsyncApp, Entity, Task}; use project::{ LocalProjectFlags, Project, WorktreeId, - git_store::{Repository, resolve_git_worktree_to_main_repo}, + git_store::{Repository, resolve_git_worktree_to_main_repo, worktrees_directory_for_repo}, + project_settings::ProjectSettings, }; +use settings::Settings; use util::ResultExt; use workspace::{AppState, MultiWorkspace, Workspace}; @@ -222,7 +224,77 @@ async fn remove_root_after_worktree_removal( .map_err(|_| anyhow!("git worktree removal was canceled"))?; // Keep _temp_project alive until after the await so the headless project isn't dropped mid-operation drop(_temp_project); - result + result.context("git worktree removal failed")?; + + remove_empty_parent_dirs_up_to_worktrees_base( + root.root_path.clone(), + root.main_repo_path.clone(), + cx, + ) + .await; + + Ok(()) +} + +/// After `git worktree remove` deletes the worktree directory, clean up any +/// empty parent directories between it and the Zed-managed worktrees base +/// directory (configured via `git.worktree_directory`). The base directory +/// itself is never removed. +/// +/// If the base directory is not an ancestor of `root_path`, no parent +/// directories are removed. +async fn remove_empty_parent_dirs_up_to_worktrees_base( + root_path: PathBuf, + main_repo_path: PathBuf, + cx: &mut AsyncApp, +) { + let worktrees_base = cx.update(|cx| { + let setting = &ProjectSettings::get_global(cx).git.worktree_directory; + worktrees_directory_for_repo(&main_repo_path, setting).log_err() + }); + + if let Some(worktrees_base) = worktrees_base { + cx.background_executor() + .spawn(async move { + remove_empty_ancestors(&root_path, &worktrees_base); + }) + .await; + } +} + +/// Removes empty directories between `child_path` and `base_path`. +/// +/// Walks upward from `child_path`, removing each empty parent directory, +/// stopping before `base_path` itself is removed. If `base_path` is not +/// an ancestor of `child_path`, nothing is removed. If any directory is +/// non-empty (i.e. `std::fs::remove_dir` fails), the walk stops. +fn remove_empty_ancestors(child_path: &Path, base_path: &Path) { + let mut current = child_path; + while let Some(parent) = current.parent() { + if parent == base_path { + break; + } + if !parent.starts_with(base_path) { + break; + } + match std::fs::remove_dir(parent) { + Ok(()) => { + log::info!("Removed empty parent directory: {}", parent.display()); + } + Err(err) if err.kind() == std::io::ErrorKind::DirectoryNotEmpty => break, + Err(err) if err.kind() == std::io::ErrorKind::NotFound => { + // Already removed by a concurrent process; keep walking upward. + } + Err(err) => { + log::error!( + "Failed to remove parent directory {}: {err}", + parent.display() + ); + break; + } + } + current = parent; + } } /// Finds a live `Repository` entity for the given path, or creates a temporary @@ -681,7 +753,6 @@ fn current_app_state(cx: &mut AsyncApp) -> Option> { .map(|workspace| workspace.read(cx).app_state().clone()) }) } - #[cfg(test)] mod tests { use super::*; @@ -691,6 +762,7 @@ mod tests { use project::Project; use serde_json::json; use settings::SettingsStore; + use tempfile::TempDir; use workspace::MultiWorkspace; fn init_test(cx: &mut TestAppContext) { @@ -703,6 +775,117 @@ mod tests { }); } + #[test] + fn test_remove_empty_ancestors_single_empty_parent() { + let tmp = TempDir::new().unwrap(); + let base = tmp.path().join("worktrees"); + let branch_dir = base.join("my-branch"); + let child = branch_dir.join("zed"); + + std::fs::create_dir_all(&child).unwrap(); + // Simulate git worktree remove having deleted the child. + std::fs::remove_dir(&child).unwrap(); + + assert!(branch_dir.exists()); + remove_empty_ancestors(&child, &base); + assert!(!branch_dir.exists(), "empty parent should be removed"); + assert!(base.exists(), "base directory should be preserved"); + } + + #[test] + fn test_remove_empty_ancestors_nested_empty_parents() { + let tmp = TempDir::new().unwrap(); + let base = tmp.path().join("worktrees"); + // Branch name with slash creates nested dirs: fix/thing/zed + let child = base.join("fix").join("thing").join("zed"); + + std::fs::create_dir_all(&child).unwrap(); + std::fs::remove_dir(&child).unwrap(); + + assert!(base.join("fix").join("thing").exists()); + remove_empty_ancestors(&child, &base); + assert!(!base.join("fix").join("thing").exists()); + assert!( + !base.join("fix").exists(), + "all empty ancestors should be removed" + ); + assert!(base.exists(), "base directory should be preserved"); + } + + #[test] + fn test_remove_empty_ancestors_stops_at_non_empty_parent() { + let tmp = TempDir::new().unwrap(); + let base = tmp.path().join("worktrees"); + let branch_dir = base.join("my-branch"); + let child = branch_dir.join("zed"); + let sibling = branch_dir.join("other-file.txt"); + + std::fs::create_dir_all(&child).unwrap(); + std::fs::write(&sibling, "content").unwrap(); + std::fs::remove_dir(&child).unwrap(); + + remove_empty_ancestors(&child, &base); + assert!(branch_dir.exists(), "non-empty parent should be preserved"); + assert!(sibling.exists()); + } + + #[test] + fn test_remove_empty_ancestors_not_an_ancestor() { + let tmp = TempDir::new().unwrap(); + let base = tmp.path().join("worktrees"); + let unrelated = tmp.path().join("other-place").join("branch").join("zed"); + + std::fs::create_dir_all(&base).unwrap(); + std::fs::create_dir_all(&unrelated).unwrap(); + std::fs::remove_dir(&unrelated).unwrap(); + + let parent = unrelated.parent().unwrap(); + assert!(parent.exists()); + remove_empty_ancestors(&unrelated, &base); + assert!(parent.exists(), "should not remove dirs outside base"); + } + + #[test] + fn test_remove_empty_ancestors_child_is_direct_child_of_base() { + let tmp = TempDir::new().unwrap(); + let base = tmp.path().join("worktrees"); + let child = base.join("zed"); + + std::fs::create_dir_all(&child).unwrap(); + std::fs::remove_dir(&child).unwrap(); + + remove_empty_ancestors(&child, &base); + assert!(base.exists(), "base directory should be preserved"); + } + + #[test] + fn test_remove_empty_ancestors_partially_non_empty_chain() { + let tmp = TempDir::new().unwrap(); + let base = tmp.path().join("worktrees"); + // Structure: base/a/b/c/zed where a/ has another child besides b/ + let child = base.join("a").join("b").join("c").join("zed"); + let other_in_a = base.join("a").join("other-branch"); + + std::fs::create_dir_all(&child).unwrap(); + std::fs::create_dir_all(&other_in_a).unwrap(); + std::fs::remove_dir(&child).unwrap(); + + remove_empty_ancestors(&child, &base); + assert!( + !base.join("a").join("b").join("c").exists(), + "c/ should be removed (empty)" + ); + assert!( + !base.join("a").join("b").exists(), + "b/ should be removed (empty)" + ); + assert!( + base.join("a").exists(), + "a/ should be preserved (has other-branch sibling)" + ); + assert!(other_in_a.exists()); + } + #[gpui::test] async fn test_build_root_plan_returns_none_for_main_worktree(cx: &mut TestAppContext) { init_test(cx); diff --git a/crates/sidebar/src/sidebar.rs b/crates/sidebar/src/sidebar.rs index 48bda326a975a9a3047a4ed63e9e6d013c439d88..bdb637c37832995fc6e3dd3bb8ff567ff995b868 100644 --- a/crates/sidebar/src/sidebar.rs +++ b/crates/sidebar/src/sidebar.rs @@ -16,14 +16,16 @@ use agent_ui::{ use chrono::{DateTime, Utc}; use editor::Editor; use gpui::{ - Action as _, AnyElement, App, Context, DismissEvent, Entity, FocusHandle, Focusable, + Action as _, AnyElement, App, Context, DismissEvent, Entity, EntityId, FocusHandle, Focusable, KeyContext, ListState, Pixels, Render, SharedString, Task, WeakEntity, Window, WindowHandle, linear_color_stop, linear_gradient, list, prelude::*, px, }; use menu::{ Cancel, Confirm, SelectChild, SelectFirst, SelectLast, SelectNext, SelectParent, SelectPrevious, }; -use project::{AgentId, AgentRegistryStore, Event as ProjectEvent, linked_worktree_short_name}; +use project::{ + AgentId, AgentRegistryStore, Event as ProjectEvent, WorktreeId, linked_worktree_short_name, +}; use recent_projects::sidebar_recent_projects::SidebarRecentProjects; use remote::RemoteConnectionOptions; use ui::utils::platform_title_bar_height; @@ -32,7 +34,7 @@ use serde::{Deserialize, Serialize}; use settings::Settings as _; use std::collections::{HashMap, HashSet}; use std::mem; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; use std::rc::Rc; use theme::ActiveTheme; use ui::{ @@ -44,7 +46,7 @@ use util::ResultExt as _; use util::path_list::PathList; use workspace::{ AddFolderToProject, CloseWindow, FocusWorkspaceSidebar, MultiWorkspace, MultiWorkspaceEvent, - NextProject, NextThread, Open, PreviousProject, PreviousThread, ProjectGroupKey, + NextProject, NextThread, Open, PreviousProject, PreviousThread, ProjectGroupKey, SaveIntent, ShowFewerThreads, ShowMoreThreads, Sidebar as WorkspaceSidebar, SidebarSide, Toast, ToggleWorkspaceSidebar, Workspace, notifications::NotificationId, sidebar_side_context_menu, }; @@ -3176,27 +3178,102 @@ impl Sidebar { is_linked_worktree.then_some(workspace) }); - if let Some(workspace_to_remove) = workspace_to_remove { + // Also find workspaces for root plans that aren't covered by + // workspace_to_remove. For workspaces that exclusively contain + // worktrees being archived, remove the whole workspace. For + // "mixed" workspaces (containing both archived and non-archived + // worktrees), close only the editor items referencing the + // archived worktrees so their Entity handles are + // dropped without destroying the user's workspace layout. + let mut workspaces_to_remove: Vec> = + workspace_to_remove.into_iter().collect(); + let mut close_item_tasks: Vec>> = Vec::new(); + + let archive_paths: HashSet<&Path> = roots_to_archive + .iter() + .map(|root| root.root_path.as_path()) + .collect(); + + // Classify workspaces into "exclusive" (all worktrees archived) + // and "mixed" (some worktrees archived, some not). + let mut mixed_workspaces: Vec<(Entity, Vec)> = Vec::new(); + + if let Some(multi_workspace) = self.multi_workspace.upgrade() { + let all_workspaces: Vec<_> = multi_workspace.read(cx).workspaces().cloned().collect(); + + for workspace in all_workspaces { + if workspaces_to_remove.contains(&workspace) { + continue; + } + + let project = workspace.read(cx).project().read(cx); + let visible_worktrees: Vec<_> = project + .visible_worktrees(cx) + .map(|wt| (wt.read(cx).id(), wt.read(cx).abs_path())) + .collect(); + + let archived_worktree_ids: Vec = visible_worktrees + .iter() + .filter(|(_, path)| archive_paths.contains(path.as_ref())) + .map(|(id, _)| *id) + .collect(); + + if archived_worktree_ids.is_empty() { + continue; + } + + if visible_worktrees.len() == archived_worktree_ids.len() { + workspaces_to_remove.push(workspace); + } else { + mixed_workspaces.push((workspace, archived_worktree_ids)); + } + } + } + + // For mixed workspaces, close only items belonging to the + // worktrees being archived. + for (workspace, archived_worktree_ids) in &mixed_workspaces { + let panes: Vec<_> = workspace.read(cx).panes().to_vec(); + for pane in panes { + let items_to_close: Vec = pane + .read(cx) + .items() + .filter(|item| { + item.project_path(cx) + .is_some_and(|pp| archived_worktree_ids.contains(&pp.worktree_id)) + }) + .map(|item| item.item_id()) + .collect(); + + if !items_to_close.is_empty() { + let task = pane.update(cx, |pane, cx| { + pane.close_items(window, cx, SaveIntent::Close, &|item_id| { + items_to_close.contains(&item_id) + }) + }); + close_item_tasks.push(task); + } + } + } + + if !workspaces_to_remove.is_empty() { let multi_workspace = self.multi_workspace.upgrade().unwrap(); let session_id = session_id.clone(); - // For the workspace-removal fallback, use the neighbor's workspace - // paths if available, otherwise fall back to the project group key. let fallback_paths = neighbor .as_ref() .map(|(_, paths)| paths.clone()) .unwrap_or_else(|| { - workspace_to_remove - .read(cx) - .project_group_key(cx) - .path_list() - .clone() + workspaces_to_remove + .first() + .map(|ws| ws.read(cx).project_group_key(cx).path_list().clone()) + .unwrap_or_default() }); - let excluded = [workspace_to_remove.clone()]; + let excluded = workspaces_to_remove.clone(); let remove_task = multi_workspace.update(cx, |mw, cx| { mw.remove( - [workspace_to_remove], + workspaces_to_remove, move |this, window, cx| { this.find_or_create_local_workspace(fallback_paths, &excluded, window, cx) }, @@ -3208,23 +3285,56 @@ impl Sidebar { let neighbor_metadata = neighbor.map(|(metadata, _)| metadata); let thread_folder_paths = thread_folder_paths.clone(); cx.spawn_in(window, async move |this, cx| { - let removed = remove_task.await?; - if removed { - this.update_in(cx, |this, window, cx| { - let in_flight = thread_id.and_then(|tid| { - this.start_archive_worktree_task(tid, roots_to_archive, cx) - }); - this.archive_and_activate( - &session_id, - thread_id, - neighbor_metadata.as_ref(), - thread_folder_paths.as_ref(), - in_flight, - window, - cx, - ); - })?; + if !remove_task.await? { + return anyhow::Ok(()); + } + + for task in close_item_tasks { + let result: anyhow::Result<()> = task.await; + result.log_err(); } + + this.update_in(cx, |this, window, cx| { + let in_flight = thread_id.and_then(|tid| { + this.start_archive_worktree_task(tid, roots_to_archive, cx) + }); + this.archive_and_activate( + &session_id, + thread_id, + neighbor_metadata.as_ref(), + thread_folder_paths.as_ref(), + in_flight, + window, + cx, + ); + })?; + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } else if !close_item_tasks.is_empty() { + let session_id = session_id.clone(); + let neighbor_metadata = neighbor.map(|(metadata, _)| metadata); + let thread_folder_paths = thread_folder_paths.clone(); + cx.spawn_in(window, async move |this, cx| { + for task in close_item_tasks { + let result: anyhow::Result<()> = task.await; + result.log_err(); + } + + this.update_in(cx, |this, window, cx| { + let in_flight = thread_id.and_then(|tid| { + this.start_archive_worktree_task(tid, roots_to_archive, cx) + }); + this.archive_and_activate( + &session_id, + thread_id, + neighbor_metadata.as_ref(), + thread_folder_paths.as_ref(), + in_flight, + window, + cx, + ); + })?; anyhow::Ok(()) }) .detach_and_log_err(cx); diff --git a/crates/sidebar/src/sidebar_tests.rs b/crates/sidebar/src/sidebar_tests.rs index 6bbe9f3a2f64460b804300c38f338be29f49a55e..104f835cc1393077557d8072f1c283ddbdbbc801 100644 --- a/crates/sidebar/src/sidebar_tests.rs +++ b/crates/sidebar/src/sidebar_tests.rs @@ -16,7 +16,7 @@ use std::{ path::{Path, PathBuf}, sync::Arc, }; -use util::path_list::PathList; +use util::{path_list::PathList, rel_path::rel_path}; fn init_test(cx: &mut TestAppContext) { cx.update(|cx| { @@ -10168,3 +10168,369 @@ async fn test_remote_project_integration_does_not_briefly_render_as_separate_pro entries_after_update, ); } + +#[gpui::test] +async fn test_archive_removes_worktree_even_when_workspace_paths_diverge(cx: &mut TestAppContext) { + // When the thread's folder_paths don't exactly match any workspace's + // root paths (e.g. because a folder was added to the workspace after + // the thread was created), workspace_to_remove is None. But the linked + // worktree workspace still needs to be removed so that its worktree + // entities are released, allowing git worktree removal to proceed. + // + // With the fix, archive_thread scans roots_to_archive for any linked + // worktree workspaces and includes them in the removal set, even when + // the thread's folder_paths don't match the workspace's root paths. + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + fs.insert_tree( + "/project", + serde_json::json!({ + ".git": { + "worktrees": { + "feature-a": { + "commondir": "../../", + "HEAD": "ref: refs/heads/feature-a", + }, + }, + }, + "src": {}, + }), + ) + .await; + + fs.insert_tree( + "/wt-feature-a", + serde_json::json!({ + ".git": "gitdir: /project/.git/worktrees/feature-a", + "src": { + "main.rs": "fn main() {}", + }, + }), + ) + .await; + + fs.add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { + path: PathBuf::from("/wt-feature-a"), + ref_name: Some("refs/heads/feature-a".into()), + sha: "abc".into(), + is_main: false, + }, + ) + .await; + + cx.update(|cx| ::set_global(fs.clone(), cx)); + + let main_project = project::Project::test(fs.clone(), ["/project".as_ref()], cx).await; + let worktree_project = project::Project::test(fs.clone(), ["/wt-feature-a".as_ref()], cx).await; + + main_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + worktree_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(main_project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(worktree_project.clone(), window, cx) + }); + + // Save thread metadata using folder_paths that DON'T match the + // workspace's root paths. This simulates the case where the workspace's + // paths diverged (e.g. a folder was added after thread creation). + // This causes workspace_to_remove to be None because + // workspace_for_paths can't find a workspace with these exact paths. + let wt_thread_id = acp::SessionId::new(Arc::from("worktree-thread")); + save_thread_metadata_with_main_paths( + "worktree-thread", + "Worktree Thread", + PathList::new(&[ + PathBuf::from("/wt-feature-a"), + PathBuf::from("/nonexistent"), + ]), + PathList::new(&[PathBuf::from("/project"), PathBuf::from("/nonexistent")]), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), + cx, + ); + + // Also save a main thread so the sidebar has something to show. + save_thread_metadata( + acp::SessionId::new(Arc::from("main-thread")), + Some("Main Thread".into()), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 2, 0, 0, 0).unwrap(), + None, + &main_project, + cx, + ); + cx.run_until_parked(); + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 2, + "should start with 2 workspaces (main + linked worktree)" + ); + + // Archive the worktree thread. + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.archive_thread(&wt_thread_id, window, cx); + }); + + cx.run_until_parked(); + + // The linked worktree workspace should have been removed, even though + // workspace_to_remove was None (paths didn't match). + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 1, + "linked worktree workspace should be removed after archiving, \ + even when folder_paths don't match workspace root paths" + ); + + // The thread should still be archived (not unarchived due to an error). + let still_archived = cx.update(|_, cx| { + ThreadMetadataStore::global(cx) + .read(cx) + .entry_by_session(&wt_thread_id) + .map(|t| t.archived) + }); + assert_eq!( + still_archived, + Some(true), + "thread should still be archived (not rolled back due to error)" + ); + + // The linked worktree directory should be removed from disk. + assert!( + !fs.is_dir(Path::new("/wt-feature-a")).await, + "linked worktree directory should be removed from disk" + ); +} + +#[gpui::test] +async fn test_archive_mixed_workspace_closes_only_archived_worktree_items(cx: &mut TestAppContext) { + // When a workspace contains both a worktree being archived and other + // worktrees that should remain, only the editor items referencing the + // archived worktree should be closed — the workspace itself must be + // preserved. + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + fs.insert_tree( + "/main-repo", + serde_json::json!({ + ".git": { + "worktrees": { + "feature-b": { + "commondir": "../../", + "HEAD": "ref: refs/heads/feature-b", + }, + }, + }, + "src": { + "lib.rs": "pub fn hello() {}", + }, + }), + ) + .await; + + fs.insert_tree( + "/wt-feature-b", + serde_json::json!({ + ".git": "gitdir: /main-repo/.git/worktrees/feature-b", + "src": { + "main.rs": "fn main() { hello(); }", + }, + }), + ) + .await; + + fs.add_linked_worktree_for_repo( + Path::new("/main-repo/.git"), + false, + git::repository::Worktree { + path: PathBuf::from("/wt-feature-b"), + ref_name: Some("refs/heads/feature-b".into()), + sha: "def".into(), + is_main: false, + }, + ) + .await; + + cx.update(|cx| ::set_global(fs.clone(), cx)); + + // Create a single project that contains BOTH the main repo and the + // linked worktree — this makes it a "mixed" workspace. + let mixed_project = project::Project::test( + fs.clone(), + ["/main-repo".as_ref(), "/wt-feature-b".as_ref()], + cx, + ) + .await; + + mixed_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + + let (multi_workspace, cx) = cx + .add_window_view(|window, cx| MultiWorkspace::test_new(mixed_project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Open editor items in both worktrees so we can verify which ones + // get closed. + let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + + let worktree_ids: Vec<(WorktreeId, Arc)> = workspace.read_with(cx, |ws, cx| { + ws.project() + .read(cx) + .visible_worktrees(cx) + .map(|wt| (wt.read(cx).id(), wt.read(cx).abs_path())) + .collect() + }); + + let main_repo_wt_id = worktree_ids + .iter() + .find(|(_, path)| path.ends_with("main-repo")) + .map(|(id, _)| *id) + .expect("should find main-repo worktree"); + + let feature_b_wt_id = worktree_ids + .iter() + .find(|(_, path)| path.ends_with("wt-feature-b")) + .map(|(id, _)| *id) + .expect("should find wt-feature-b worktree"); + + // Open files from both worktrees. + let main_repo_path = project::ProjectPath { + worktree_id: main_repo_wt_id, + path: Arc::from(rel_path("src/lib.rs")), + }; + let feature_b_path = project::ProjectPath { + worktree_id: feature_b_wt_id, + path: Arc::from(rel_path("src/main.rs")), + }; + + workspace + .update_in(cx, |ws, window, cx| { + ws.open_path(main_repo_path.clone(), None, true, window, cx) + }) + .await + .expect("should open main-repo file"); + workspace + .update_in(cx, |ws, window, cx| { + ws.open_path(feature_b_path.clone(), None, true, window, cx) + }) + .await + .expect("should open feature-b file"); + + cx.run_until_parked(); + + // Verify both items are open. + let open_paths_before: Vec = workspace.read_with(cx, |ws, cx| { + ws.panes() + .iter() + .flat_map(|pane| { + pane.read(cx) + .items() + .filter_map(|item| item.project_path(cx)) + }) + .collect() + }); + assert!( + open_paths_before + .iter() + .any(|pp| pp.worktree_id == main_repo_wt_id), + "main-repo file should be open" + ); + assert!( + open_paths_before + .iter() + .any(|pp| pp.worktree_id == feature_b_wt_id), + "feature-b file should be open" + ); + + // Save thread metadata for the linked worktree with deliberately + // mismatched folder_paths to trigger the scan-based detection. + save_thread_metadata_with_main_paths( + "feature-b-thread", + "Feature B Thread", + PathList::new(&[ + PathBuf::from("/wt-feature-b"), + PathBuf::from("/nonexistent"), + ]), + PathList::new(&[PathBuf::from("/main-repo"), PathBuf::from("/nonexistent")]), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), + cx, + ); + + // Save another thread that references only the main repo (not the + // linked worktree) so archiving the feature-b thread's worktree isn't + // blocked by another unarchived thread referencing the same path. + save_thread_metadata_with_main_paths( + "other-thread", + "Other Thread", + PathList::new(&[PathBuf::from("/main-repo")]), + PathList::new(&[PathBuf::from("/main-repo")]), + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 2, 0, 0, 0).unwrap(), + cx, + ); + cx.run_until_parked(); + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // There should still be exactly 1 workspace. + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 1, + "should have 1 workspace (the mixed workspace)" + ); + + // Archive the feature-b thread. + let fb_session_id = acp::SessionId::new(Arc::from("feature-b-thread")); + sidebar.update_in(cx, |sidebar, window, cx| { + sidebar.archive_thread(&fb_session_id, window, cx); + }); + + cx.run_until_parked(); + + // The workspace should still exist (it's "mixed" — has non-archived worktrees). + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 1, + "mixed workspace should be preserved" + ); + + // Only the feature-b editor item should have been closed. + let open_paths_after: Vec = workspace.read_with(cx, |ws, cx| { + ws.panes() + .iter() + .flat_map(|pane| { + pane.read(cx) + .items() + .filter_map(|item| item.project_path(cx)) + }) + .collect() + }); + assert!( + open_paths_after + .iter() + .any(|pp| pp.worktree_id == main_repo_wt_id), + "main-repo file should still be open" + ); + assert!( + !open_paths_after + .iter() + .any(|pp| pp.worktree_id == feature_b_wt_id), + "feature-b file should have been closed" + ); +}