@@ -322,6 +322,11 @@ fn visible_entries_as_strings(
} else {
""
};
+ let is_active = sidebar
+ .active_entry
+ .as_ref()
+ .is_some_and(|active| active.matches_entry(entry));
+ let active_indicator = if is_active { " (active)" } else { "" };
match entry {
ListEntry::ProjectHeader {
label,
@@ -338,7 +343,7 @@ fn visible_entries_as_strings(
}
ListEntry::Thread(thread) => {
let title = thread.metadata.title.as_ref();
- let active = if thread.is_live { " *" } else { "" };
+ let live = if thread.is_live { " *" } else { "" };
let status_str = match thread.status {
AgentThreadStatus::Running => " (running)",
AgentThreadStatus::Error => " (error)",
@@ -354,7 +359,7 @@ fn visible_entries_as_strings(
""
};
let worktree = format_linked_worktree_chips(&thread.worktrees);
- format!(" {title}{worktree}{active}{status_str}{notified}{selected}")
+ format!(" {title}{worktree}{live}{status_str}{notified}{active_indicator}{selected}")
}
ListEntry::ViewMore {
is_fully_expanded, ..
@@ -374,7 +379,7 @@ fn visible_entries_as_strings(
if workspace.is_some() {
format!(" [+ New Thread{}]{}", worktree, selected)
} else {
- format!(" [~ Draft{}]{}", worktree, selected)
+ format!(" [~ Draft{}]{}{}", worktree, active_indicator, selected)
}
}
}
@@ -543,7 +548,10 @@ async fn test_single_workspace_no_threads(cx: &mut TestAppContext) {
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
- vec!["v [my-project]"]
+ vec![
+ //
+ "v [my-project]",
+ ]
);
}
@@ -579,6 +587,7 @@ async fn test_single_workspace_with_saved_threads(cx: &mut TestAppContext) {
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
vec![
+ //
"v [my-project]",
" Fix crash in project panel",
" Add inline diff view",
@@ -609,7 +618,11 @@ async fn test_workspace_lifecycle(cx: &mut TestAppContext) {
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
- vec!["v [project-a]", " Thread A1"]
+ vec![
+ //
+ "v [project-a]",
+ " Thread A1",
+ ]
);
// Add a second workspace
@@ -620,7 +633,11 @@ async fn test_workspace_lifecycle(cx: &mut TestAppContext) {
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
- vec!["v [project-a]", " Thread A1",]
+ vec![
+ //
+ "v [project-a]",
+ " Thread A1",
+ ]
);
}
@@ -639,6 +656,7 @@ async fn test_view_more_pagination(cx: &mut TestAppContext) {
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
vec![
+ //
"v [my-project]",
" Thread 12",
" Thread 11",
@@ -749,7 +767,11 @@ async fn test_collapse_and_expand_group(cx: &mut TestAppContext) {
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
- vec!["v [my-project]", " Thread 1"]
+ vec![
+ //
+ "v [my-project]",
+ " Thread 1",
+ ]
);
// Collapse
@@ -760,7 +782,10 @@ async fn test_collapse_and_expand_group(cx: &mut TestAppContext) {
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
- vec!["> [my-project]"]
+ vec![
+ //
+ "> [my-project]",
+ ]
);
// Expand
@@ -771,7 +796,11 @@ async fn test_collapse_and_expand_group(cx: &mut TestAppContext) {
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
- vec!["v [my-project]", " Thread 1"]
+ vec![
+ //
+ "v [my-project]",
+ " Thread 1",
+ ]
);
}
@@ -941,6 +970,7 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) {
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
vec![
+ //
"v [expanded-project]",
" Completed thread",
" Running thread * (running) <== selected",
@@ -1104,10 +1134,14 @@ async fn test_keyboard_confirm_on_project_header_toggles_collapse(cx: &mut TestA
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
- vec!["v [my-project]", " Thread 1"]
+ vec![
+ //
+ "v [my-project]",
+ " Thread 1",
+ ]
);
- // Focus the sidebar and select the header (index 0)
+ // Focus the sidebar and select the header
focus_sidebar(&sidebar, cx);
sidebar.update_in(cx, |sidebar, _window, _cx| {
sidebar.selection = Some(0);
@@ -1119,7 +1153,10 @@ async fn test_keyboard_confirm_on_project_header_toggles_collapse(cx: &mut TestA
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
- vec!["> [my-project] <== selected"]
+ vec![
+ //
+ "> [my-project] <== selected",
+ ]
);
// Confirm again expands the group
@@ -1128,7 +1165,11 @@ async fn test_keyboard_confirm_on_project_header_toggles_collapse(cx: &mut TestA
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
- vec!["v [my-project] <== selected", " Thread 1",]
+ vec![
+ //
+ "v [my-project] <== selected",
+ " Thread 1",
+ ]
);
}
@@ -1179,7 +1220,11 @@ async fn test_keyboard_expand_and_collapse_selected_entry(cx: &mut TestAppContex
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
- vec!["v [my-project]", " Thread 1"]
+ vec![
+ //
+ "v [my-project]",
+ " Thread 1",
+ ]
);
// Focus sidebar and manually select the header (index 0). Press left to collapse.
@@ -1193,7 +1238,10 @@ async fn test_keyboard_expand_and_collapse_selected_entry(cx: &mut TestAppContex
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
- vec!["> [my-project] <== selected"]
+ vec![
+ //
+ "> [my-project] <== selected",
+ ]
);
// Press right to expand
@@ -1202,7 +1250,11 @@ async fn test_keyboard_expand_and_collapse_selected_entry(cx: &mut TestAppContex
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
- vec!["v [my-project] <== selected", " Thread 1",]
+ vec![
+ //
+ "v [my-project] <== selected",
+ " Thread 1",
+ ]
);
// Press right again on already-expanded header moves selection down
@@ -1229,7 +1281,11 @@ async fn test_keyboard_collapse_from_child_selects_parent(cx: &mut TestAppContex
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
- vec!["v [my-project]", " Thread 1 <== selected",]
+ vec![
+ //
+ "v [my-project]",
+ " Thread 1 <== selected",
+ ]
);
// Pressing left on a child collapses the parent group and selects it
@@ -1239,7 +1295,10 @@ async fn test_keyboard_collapse_from_child_selects_parent(cx: &mut TestAppContex
assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0));
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
- vec!["> [my-project] <== selected"]
+ vec![
+ //
+ "> [my-project] <== selected",
+ ]
);
}
@@ -1253,7 +1312,10 @@ async fn test_keyboard_navigation_on_empty_list(cx: &mut TestAppContext) {
// An empty project has only the header.
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
- vec!["v [empty-project]"]
+ vec![
+ //
+ "v [empty-project]",
+ ]
);
// Focus sidebar — focus_in does not set a selection
@@ -1385,7 +1447,12 @@ async fn test_parallel_threads_shown_with_live_status(cx: &mut TestAppContext) {
entries[1..].sort();
assert_eq!(
entries,
- vec!["v [my-project]", " Hello *", " Hello * (running)",]
+ vec![
+ //
+ "v [my-project]",
+ " Hello * (active)",
+ " Hello * (running)",
+ ]
);
}
@@ -1478,7 +1545,11 @@ async fn test_background_thread_completion_triggers_notification(cx: &mut TestAp
// Thread A is still running; no notification yet.
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
- vec!["v [project-a]", " Hello * (running)",]
+ vec![
+ //
+ "v [project-a]",
+ " Hello * (running) (active)",
+ ]
);
// Complete thread A's turn (transition Running → Completed).
@@ -1488,7 +1559,11 @@ async fn test_background_thread_completion_triggers_notification(cx: &mut TestAp
// The completed background thread shows a notification indicator.
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
- vec!["v [project-a]", " Hello * (!)",]
+ vec![
+ //
+ "v [project-a]",
+ " Hello * (!) (active)",
+ ]
);
}
@@ -1528,6 +1603,7 @@ async fn test_search_narrows_visible_threads_to_matches(cx: &mut TestAppContext)
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
vec![
+ //
"v [my-project]",
" Fix crash in project panel",
" Add inline diff view",
@@ -1540,7 +1616,11 @@ async fn test_search_narrows_visible_threads_to_matches(cx: &mut TestAppContext)
type_in_search(&sidebar, "diff", cx);
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
- vec!["v [my-project]", " Add inline diff view <== selected",]
+ vec![
+ //
+ "v [my-project]",
+ " Add inline diff view <== selected",
+ ]
);
// User changes query to something with no matches — list is empty.
@@ -1575,6 +1655,7 @@ async fn test_search_matches_regardless_of_case(cx: &mut TestAppContext) {
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
vec![
+ //
"v [my-project]",
" Fix Crash In Project Panel <== selected",
]
@@ -1585,6 +1666,7 @@ async fn test_search_matches_regardless_of_case(cx: &mut TestAppContext) {
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
vec![
+ //
"v [my-project]",
" Fix Crash In Project Panel <== selected",
]
@@ -1615,7 +1697,12 @@ async fn test_escape_clears_search_and_restores_full_list(cx: &mut TestAppContex
// Confirm the full list is showing.
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
- vec!["v [my-project]", " Alpha thread", " Beta thread",]
+ vec![
+ //
+ "v [my-project]",
+ " Alpha thread",
+ " Beta thread",
+ ]
);
// User types a search query to filter down.
@@ -1623,7 +1710,11 @@ async fn test_escape_clears_search_and_restores_full_list(cx: &mut TestAppContex
type_in_search(&sidebar, "alpha", cx);
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
- vec!["v [my-project]", " Alpha thread <== selected",]
+ vec![
+ //
+ "v [my-project]",
+ " Alpha thread <== selected",
+ ]
);
// User presses Escape — filter clears, full list is restored.
@@ -1633,6 +1724,7 @@ async fn test_escape_clears_search_and_restores_full_list(cx: &mut TestAppContex
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
vec![
+ //
"v [my-project]",
" Alpha thread <== selected",
" Beta thread",
@@ -1689,6 +1781,7 @@ async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppC
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
vec![
+ //
"v [project-a]",
" Fix bug in sidebar",
" Add tests for editor",
@@ -1699,7 +1792,11 @@ async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppC
type_in_search(&sidebar, "sidebar", cx);
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
- vec!["v [project-a]", " Fix bug in sidebar <== selected",]
+ vec![
+ //
+ "v [project-a]",
+ " Fix bug in sidebar <== selected",
+ ]
);
// "typo" only matches in the second workspace — the first header disappears.
@@ -1715,6 +1812,7 @@ async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppC
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
vec![
+ //
"v [project-a]",
" Fix bug in sidebar <== selected",
" Add tests for editor",
@@ -1774,6 +1872,7 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) {
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
vec![
+ //
"v [alpha-project]",
" Fix bug in sidebar <== selected",
" Add tests for editor",
@@ -1785,7 +1884,11 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) {
type_in_search(&sidebar, "sidebar", cx);
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
- vec!["v [alpha-project]", " Fix bug in sidebar <== selected",]
+ vec![
+ //
+ "v [alpha-project]",
+ " Fix bug in sidebar <== selected",
+ ]
);
// "alpha sidebar" matches the workspace name "alpha-project" (fuzzy: a-l-p-h-a-s-i-d-e-b-a-r
@@ -1795,7 +1898,11 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) {
type_in_search(&sidebar, "fix", cx);
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
- vec!["v [alpha-project]", " Fix bug in sidebar <== selected",]
+ vec![
+ //
+ "v [alpha-project]",
+ " Fix bug in sidebar <== selected",
+ ]
);
// A query that matches a workspace name AND a thread in that same workspace.
@@ -1804,6 +1911,7 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) {
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
vec![
+ //
"v [alpha-project]",
" Fix bug in sidebar <== selected",
" Add tests for editor",
@@ -1817,6 +1925,7 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) {
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
vec![
+ //
"v [alpha-project]",
" Fix bug in sidebar <== selected",
" Add tests for editor",
@@ -1866,7 +1975,11 @@ async fn test_search_finds_threads_hidden_behind_view_more(cx: &mut TestAppConte
let filtered = visible_entries_as_strings(&sidebar, cx);
assert_eq!(
filtered,
- vec!["v [my-project]", " Hidden gem thread <== selected",]
+ vec![
+ //
+ "v [my-project]",
+ " Hidden gem thread <== selected",
+ ]
);
assert!(
!filtered.iter().any(|e| e.contains("View More")),
@@ -1902,14 +2015,21 @@ async fn test_search_finds_threads_inside_collapsed_groups(cx: &mut TestAppConte
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
- vec!["> [my-project] <== selected"]
+ vec![
+ //
+ "> [my-project] <== selected",
+ ]
);
// User types a search — the thread appears even though its group is collapsed.
type_in_search(&sidebar, "important", cx);
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
- vec!["> [my-project]", " Important thread <== selected",]
+ vec![
+ //
+ "> [my-project]",
+ " Important thread <== selected",
+ ]
);
}
@@ -1943,6 +2063,7 @@ async fn test_search_then_keyboard_navigate_and_confirm(cx: &mut TestAppContext)
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
vec![
+ //
"v [my-project]",
" Fix crash in panel <== selected",
" Fix lint warnings",
@@ -1955,6 +2076,7 @@ async fn test_search_then_keyboard_navigate_and_confirm(cx: &mut TestAppContext)
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
vec![
+ //
"v [my-project]",
" Fix crash in panel",
" Fix lint warnings <== selected",
@@ -1966,6 +2088,7 @@ async fn test_search_then_keyboard_navigate_and_confirm(cx: &mut TestAppContext)
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
vec![
+ //
"v [my-project]",
" Fix crash in panel <== selected",
" Fix lint warnings",
@@ -2006,7 +2129,11 @@ async fn test_confirm_on_historical_thread_activates_workspace(cx: &mut TestAppC
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
- vec!["v [my-project]", " Historical Thread",]
+ vec![
+ //
+ "v [my-project]",
+ " Historical Thread",
+ ]
);
// Switch to workspace 1 so we can verify the confirm switches back.
@@ -2067,7 +2194,12 @@ async fn test_click_clears_selection_and_focus_in_restores_it(cx: &mut TestAppCo
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
- vec!["v [my-project]", " Thread A", " Thread B",]
+ vec![
+ //
+ "v [my-project]",
+ " Thread A",
+ " Thread B",
+ ]
);
// Keyboard confirm preserves selection.
@@ -2119,7 +2251,11 @@ async fn test_thread_title_update_propagates_to_sidebar(cx: &mut TestAppContext)
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
- vec!["v [my-project]", " Hello *"]
+ vec![
+ //
+ "v [my-project]",
+ " Hello * (active)",
+ ]
);
// Simulate the agent generating a title. The notification chain is:
@@ -2141,7 +2277,11 @@ async fn test_thread_title_update_propagates_to_sidebar(cx: &mut TestAppContext)
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
- vec!["v [my-project]", " Friendly Greeting with AI *"]
+ vec![
+ //
+ "v [my-project]",
+ " Friendly Greeting with AI * (active)",
+ ]
);
}
@@ -2395,7 +2535,11 @@ async fn test_new_thread_button_works_after_adding_folder(cx: &mut TestAppContex
// Verify the thread appears in the sidebar.
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
- vec!["v [project-a]", " Hello *",]
+ vec![
+ //
+ "v [project-a]",
+ " Hello * (active)",
+ ]
);
// The "New Thread" button should NOT be in "active/draft" state
@@ -2422,15 +2566,14 @@ async fn test_new_thread_button_works_after_adding_folder(cx: &mut TestAppContex
// The workspace path_list is now [project-a, project-b]. The active
// thread's metadata was re-saved with the new paths by the agent panel's
- // project subscription, so it stays visible under the updated group.
- // The old [project-a] group persists in the sidebar (empty) because
- // project_group_keys is append-only.
+ // project subscription. The old [project-a] key is replaced by the new
+ // key since no other workspace claims it.
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
vec![
- "v [project-a, project-b]", //
- " Hello *",
- "v [project-a]",
+ //
+ "v [project-a, project-b]",
+ " Hello * (active)",
]
);
@@ -2465,6 +2608,491 @@ async fn test_new_thread_button_works_after_adding_folder(cx: &mut TestAppContex
});
}
+#[gpui::test]
+async fn test_worktree_add_and_remove_migrates_threads(cx: &mut TestAppContext) {
+ // When a worktree is added to a project, the project group key changes
+ // and all historical threads should be migrated to the new key. Removing
+ // the worktree should migrate them back.
+ let (_fs, project) = init_multi_project_test(&["/project-a", "/project-b"], cx).await;
+ let (multi_workspace, cx) =
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+ let sidebar = setup_sidebar(&multi_workspace, cx);
+
+ // Save two threads against the initial project group [/project-a].
+ save_n_test_threads(2, &project, cx).await;
+ sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx));
+ cx.run_until_parked();
+
+ assert_eq!(
+ visible_entries_as_strings(&sidebar, cx),
+ vec![
+ //
+ "v [project-a]",
+ " Thread 2",
+ " Thread 1",
+ ]
+ );
+
+ // Verify the metadata store has threads under the old key.
+ let old_key_paths = PathList::new(&[PathBuf::from("/project-a")]);
+ cx.update(|_window, cx| {
+ let store = ThreadMetadataStore::global(cx).read(cx);
+ assert_eq!(
+ store.entries_for_main_worktree_path(&old_key_paths).count(),
+ 2,
+ "should have 2 threads under old key before add"
+ );
+ });
+
+ // Add a second worktree to the same project.
+ project
+ .update(cx, |project, cx| {
+ project.find_or_create_worktree("/project-b", true, cx)
+ })
+ .await
+ .expect("should add worktree");
+ cx.run_until_parked();
+
+ // The project group key should now be [/project-a, /project-b].
+ let new_key_paths = PathList::new(&[PathBuf::from("/project-a"), PathBuf::from("/project-b")]);
+
+ // Verify multi-workspace state: exactly one project group key, the new one.
+ multi_workspace.read_with(cx, |mw, _cx| {
+ let keys: Vec<_> = mw.project_group_keys().cloned().collect();
+ assert_eq!(
+ keys.len(),
+ 1,
+ "should have exactly 1 project group key after add"
+ );
+ assert_eq!(
+ keys[0].path_list(),
+ &new_key_paths,
+ "the key should be the new combined path list"
+ );
+ });
+
+ // Verify threads were migrated to the new key.
+ cx.update(|_window, cx| {
+ let store = ThreadMetadataStore::global(cx).read(cx);
+ assert_eq!(
+ store.entries_for_main_worktree_path(&old_key_paths).count(),
+ 0,
+ "should have 0 threads under old key after migration"
+ );
+ assert_eq!(
+ store.entries_for_main_worktree_path(&new_key_paths).count(),
+ 2,
+ "should have 2 threads under new key after migration"
+ );
+ });
+
+ // Sidebar should show threads under the new header.
+ sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx));
+ cx.run_until_parked();
+
+ assert_eq!(
+ visible_entries_as_strings(&sidebar, cx),
+ vec![
+ //
+ "v [project-a, project-b]",
+ " Thread 2",
+ " Thread 1",
+ ]
+ );
+
+ // Now remove the second worktree.
+ let worktree_id = project.read_with(cx, |project, cx| {
+ project
+ .visible_worktrees(cx)
+ .find(|wt| wt.read(cx).abs_path().as_ref() == Path::new("/project-b"))
+ .map(|wt| wt.read(cx).id())
+ .expect("should find project-b worktree")
+ });
+ project.update(cx, |project, cx| {
+ project.remove_worktree(worktree_id, cx);
+ });
+ cx.run_until_parked();
+
+ // The key should revert to [/project-a].
+ multi_workspace.read_with(cx, |mw, _cx| {
+ let keys: Vec<_> = mw.project_group_keys().cloned().collect();
+ assert_eq!(
+ keys.len(),
+ 1,
+ "should have exactly 1 project group key after remove"
+ );
+ assert_eq!(
+ keys[0].path_list(),
+ &old_key_paths,
+ "the key should revert to the original path list"
+ );
+ });
+
+ // Threads should be migrated back to the old key.
+ cx.update(|_window, cx| {
+ let store = ThreadMetadataStore::global(cx).read(cx);
+ assert_eq!(
+ store.entries_for_main_worktree_path(&new_key_paths).count(),
+ 0,
+ "should have 0 threads under new key after revert"
+ );
+ assert_eq!(
+ store.entries_for_main_worktree_path(&old_key_paths).count(),
+ 2,
+ "should have 2 threads under old key after revert"
+ );
+ });
+
+ sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx));
+ cx.run_until_parked();
+
+ assert_eq!(
+ visible_entries_as_strings(&sidebar, cx),
+ vec![
+ //
+ "v [project-a]",
+ " Thread 2",
+ " Thread 1",
+ ]
+ );
+}
+
+#[gpui::test]
+async fn test_worktree_add_key_collision_removes_duplicate_workspace(cx: &mut TestAppContext) {
+ // When a worktree is added to workspace A and the resulting key matches
+ // an existing workspace B's key (and B has the same root paths), B
+ // should be removed as a true duplicate.
+ let (fs, project_a) = init_multi_project_test(&["/project-a", "/project-b"], cx).await;
+ let (multi_workspace, cx) =
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx));
+ let sidebar = setup_sidebar(&multi_workspace, cx);
+
+ // Save a thread against workspace A [/project-a].
+ save_named_thread_metadata("thread-a", "Thread A", &project_a, cx).await;
+
+ // Create workspace B with both worktrees [/project-a, /project-b].
+ let project_b = project::Project::test(
+ fs.clone() as Arc<dyn Fs>,
+ ["/project-a".as_ref(), "/project-b".as_ref()],
+ cx,
+ )
+ .await;
+ let workspace_b = multi_workspace.update_in(cx, |mw, window, cx| {
+ mw.test_add_workspace(project_b.clone(), window, cx)
+ });
+ cx.run_until_parked();
+
+ // Save a thread against workspace B [/project-a, /project-b].
+ save_named_thread_metadata("thread-b", "Thread B", &project_b, cx).await;
+
+ sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx));
+ cx.run_until_parked();
+
+ // Both project groups should be visible.
+ let entries = visible_entries_as_strings(&sidebar, cx);
+ assert!(
+ entries.iter().any(|e| e.contains("[project-a]")),
+ "should show project-a group: {entries:?}"
+ );
+ assert!(
+ entries.iter().any(|e| e.contains("[project-a, project-b]")),
+ "should show project-a,b group: {entries:?}"
+ );
+
+ let workspace_b_id = workspace_b.entity_id();
+
+ // Now add /project-b to workspace A's project, causing a key collision.
+ project_a
+ .update(cx, |project, cx| {
+ project.find_or_create_worktree("/project-b", true, cx)
+ })
+ .await
+ .expect("should add worktree");
+ cx.run_until_parked();
+
+ // Workspace B should have been removed (true duplicate — same root paths).
+ multi_workspace.read_with(cx, |mw, _cx| {
+ let workspace_ids: Vec<_> = mw.workspaces().map(|ws| ws.entity_id()).collect();
+ assert!(
+ !workspace_ids.contains(&workspace_b_id),
+ "workspace B should have been removed after key collision"
+ );
+ });
+
+ // There should be exactly one project group key now.
+ let combined_paths = PathList::new(&[PathBuf::from("/project-a"), PathBuf::from("/project-b")]);
+ multi_workspace.read_with(cx, |mw, _cx| {
+ let keys: Vec<_> = mw.project_group_keys().cloned().collect();
+ assert_eq!(
+ keys.len(),
+ 1,
+ "should have exactly 1 project group key after collision"
+ );
+ assert_eq!(
+ keys[0].path_list(),
+ &combined_paths,
+ "the remaining key should be the combined paths"
+ );
+ });
+
+ // Both threads should be visible under the merged group.
+ sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx));
+ cx.run_until_parked();
+
+ let entries = visible_entries_as_strings(&sidebar, cx);
+ assert!(
+ entries.iter().any(|e| e.contains("Thread A")),
+ "Thread A should be visible: {entries:?}"
+ );
+ assert!(
+ entries.iter().any(|e| e.contains("Thread B")),
+ "Thread B should be visible: {entries:?}"
+ );
+ assert!(
+ entries.iter().filter(|e| e.contains("[project-a")).count() == 1,
+ "should have exactly 1 project header: {entries:?}"
+ );
+}
+
+#[gpui::test]
+async fn test_worktree_collision_removes_active_workspace(cx: &mut TestAppContext) {
+ // When workspace A adds a folder that makes it collide with workspace B,
+ // and B is the *active* workspace, B should still be removed and the
+ // active workspace should fall back to A.
+ let (fs, project_a) = init_multi_project_test(&["/project-a", "/project-b"], cx).await;
+ let (multi_workspace, cx) =
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx));
+ let sidebar = setup_sidebar(&multi_workspace, cx);
+
+ // Create workspace B with both worktrees [/project-a, /project-b].
+ let project_b = project::Project::test(
+ fs.clone() as Arc<dyn Fs>,
+ ["/project-a".as_ref(), "/project-b".as_ref()],
+ cx,
+ )
+ .await;
+ let workspace_b = multi_workspace.update_in(cx, |mw, window, cx| {
+ mw.test_add_workspace(project_b.clone(), window, cx)
+ });
+ cx.run_until_parked();
+
+ // Workspace B is now active (test_add_workspace calls activate).
+ let workspace_a = multi_workspace.read_with(cx, |mw, _| {
+ mw.workspaces()
+ .find(|ws| ws.entity_id() != workspace_b.entity_id())
+ .unwrap()
+ .clone()
+ });
+ assert_eq!(
+ multi_workspace.read_with(cx, |mw, _| mw.workspace().entity_id()),
+ workspace_b.entity_id(),
+ "workspace B should be active before the collision"
+ );
+
+ // Add /project-b to workspace A's project, causing a collision with B.
+ project_a
+ .update(cx, |project, cx| {
+ project.find_or_create_worktree("/project-b", true, cx)
+ })
+ .await
+ .expect("should add worktree");
+ cx.run_until_parked();
+
+ // Workspace B should have been removed.
+ multi_workspace.read_with(cx, |mw, _cx| {
+ let workspace_ids: Vec<_> = mw.workspaces().map(|ws| ws.entity_id()).collect();
+ assert!(
+ !workspace_ids.contains(&workspace_b.entity_id()),
+ "workspace B should have been removed"
+ );
+ });
+
+ // The active workspace should now be A (the one that changed).
+ assert_eq!(
+ multi_workspace.read_with(cx, |mw, _| mw.workspace().entity_id()),
+ workspace_a.entity_id(),
+ "workspace A should be active after collision removed B"
+ );
+}
+
+#[gpui::test]
+async fn test_worktree_add_syncs_linked_worktree_sibling(cx: &mut TestAppContext) {
+ // When a worktree is added to the main workspace, a linked worktree
+ // sibling (different root paths, same project group key) should also
+ // get the new folder added to its project.
+ init_test(cx);
+ let fs = FakeFs::new(cx.executor());
+
+ fs.insert_tree(
+ "/project",
+ serde_json::json!({
+ ".git": {
+ "worktrees": {
+ "feature": {
+ "commondir": "../../",
+ "HEAD": "ref: refs/heads/feature",
+ },
+ },
+ },
+ "src": {},
+ }),
+ )
+ .await;
+
+ fs.insert_tree(
+ "/wt-feature",
+ serde_json::json!({
+ ".git": "gitdir: /project/.git/worktrees/feature",
+ "src": {},
+ }),
+ )
+ .await;
+
+ fs.add_linked_worktree_for_repo(
+ Path::new("/project/.git"),
+ false,
+ git::repository::Worktree {
+ path: PathBuf::from("/wt-feature"),
+ ref_name: Some("refs/heads/feature".into()),
+ sha: "aaa".into(),
+ is_main: false,
+ },
+ )
+ .await;
+
+ // Create a second independent project to add as a folder later.
+ fs.insert_tree(
+ "/other-project",
+ serde_json::json!({ ".git": {}, "src": {} }),
+ )
+ .await;
+
+ cx.update(|cx| <dyn fs::Fs>::set_global(fs.clone(), cx));
+
+ let main_project = project::Project::test(fs.clone(), ["/project".as_ref()], cx).await;
+ let worktree_project = project::Project::test(fs.clone(), ["/wt-feature".as_ref()], cx).await;
+
+ main_project
+ .update(cx, |p, cx| p.git_scans_complete(cx))
+ .await;
+ worktree_project
+ .update(cx, |p, cx| p.git_scans_complete(cx))
+ .await;
+
+ let (multi_workspace, cx) =
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(main_project.clone(), window, cx));
+ let sidebar = setup_sidebar(&multi_workspace, cx);
+
+ // Add agent panel to the main workspace.
+ let main_workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
+ add_agent_panel(&main_workspace, cx);
+
+ // Open the linked worktree as a separate workspace.
+ let wt_workspace = multi_workspace.update_in(cx, |mw, window, cx| {
+ mw.test_add_workspace(worktree_project.clone(), window, cx)
+ });
+ add_agent_panel(&wt_workspace, cx);
+ cx.run_until_parked();
+
+ // Both workspaces should share the same project group key [/project].
+ multi_workspace.read_with(cx, |mw, _cx| {
+ assert_eq!(
+ mw.project_group_keys().count(),
+ 1,
+ "should have 1 project group key before add"
+ );
+ assert_eq!(mw.workspaces().count(), 2, "should have 2 workspaces");
+ });
+
+ // Save threads against each workspace.
+ save_named_thread_metadata("main-thread", "Main Thread", &main_project, cx).await;
+ save_named_thread_metadata("wt-thread", "Worktree Thread", &worktree_project, cx).await;
+
+ // Verify both threads are under the old key [/project].
+ let old_key_paths = PathList::new(&[PathBuf::from("/project")]);
+ cx.update(|_window, cx| {
+ let store = ThreadMetadataStore::global(cx).read(cx);
+ assert_eq!(
+ store.entries_for_main_worktree_path(&old_key_paths).count(),
+ 2,
+ "should have 2 threads under old key before add"
+ );
+ });
+
+ sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx));
+ cx.run_until_parked();
+
+ assert_eq!(
+ visible_entries_as_strings(&sidebar, cx),
+ vec![
+ //
+ "v [project]",
+ " [~ Draft {wt-feature}] (active)",
+ " Worktree Thread {wt-feature}",
+ " Main Thread",
+ ]
+ );
+
+ // Add /other-project as a folder to the main workspace.
+ main_project
+ .update(cx, |project, cx| {
+ project.find_or_create_worktree("/other-project", true, cx)
+ })
+ .await
+ .expect("should add worktree");
+ cx.run_until_parked();
+
+ // The linked worktree workspace should have gotten the new folder too.
+ let wt_worktree_count =
+ worktree_project.read_with(cx, |project, cx| project.visible_worktrees(cx).count());
+ assert_eq!(
+ wt_worktree_count, 2,
+ "linked worktree project should have gotten the new folder"
+ );
+
+ // Both workspaces should still exist under one key.
+ multi_workspace.read_with(cx, |mw, _cx| {
+ assert_eq!(mw.workspaces().count(), 2, "both workspaces should survive");
+ assert_eq!(
+ mw.project_group_keys().count(),
+ 1,
+ "should still have 1 project group key"
+ );
+ });
+
+ // Threads should have been migrated to the new key.
+ let new_key_paths =
+ PathList::new(&[PathBuf::from("/other-project"), PathBuf::from("/project")]);
+ cx.update(|_window, cx| {
+ let store = ThreadMetadataStore::global(cx).read(cx);
+ assert_eq!(
+ store.entries_for_main_worktree_path(&old_key_paths).count(),
+ 0,
+ "should have 0 threads under old key after migration"
+ );
+ assert_eq!(
+ store.entries_for_main_worktree_path(&new_key_paths).count(),
+ 2,
+ "should have 2 threads under new key after migration"
+ );
+ });
+
+ // Both threads should still be visible in the sidebar.
+ sidebar.update_in(cx, |sidebar, _window, cx| sidebar.update_entries(cx));
+ cx.run_until_parked();
+
+ assert_eq!(
+ visible_entries_as_strings(&sidebar, cx),
+ vec![
+ //
+ "v [other-project, project]",
+ " [~ Draft {project:wt-feature}] (active)",
+ " Worktree Thread {project:wt-feature}",
+ " Main Thread",
+ ]
+ );
+}
+
#[gpui::test]
async fn test_cmd_n_shows_new_thread_entry(cx: &mut TestAppContext) {
// When the user presses Cmd-N (NewThread action) while viewing a
@@ -101,6 +101,10 @@ pub enum MultiWorkspaceEvent {
ActiveWorkspaceChanged,
WorkspaceAdded(Entity<Workspace>),
WorkspaceRemoved(EntityId),
+ ProjectGroupKeyChanged {
+ old_key: ProjectGroupKey,
+ new_key: ProjectGroupKey,
+ },
}
pub enum SidebarEvent {
@@ -302,7 +306,7 @@ pub struct MultiWorkspace {
workspaces: Vec<Entity<Workspace>>,
active_workspace: ActiveWorkspace,
project_group_keys: Vec<ProjectGroupKey>,
- provisional_project_group_keys: HashMap<EntityId, ProjectGroupKey>,
+ workspace_group_keys: HashMap<EntityId, ProjectGroupKey>,
sidebar: Option<Box<dyn SidebarHandle>>,
sidebar_open: bool,
sidebar_overlay: Option<AnyView>,
@@ -355,7 +359,7 @@ impl MultiWorkspace {
Self {
window_id: window.window_handle().window_id(),
project_group_keys: Vec::new(),
- provisional_project_group_keys: HashMap::default(),
+ workspace_group_keys: HashMap::default(),
workspaces: Vec::new(),
active_workspace: ActiveWorkspace::Transient(workspace),
sidebar: None,
@@ -561,17 +565,12 @@ impl MultiWorkspace {
move |this, _project, event, _window, cx| match event {
project::Event::WorktreeAdded(_) | project::Event::WorktreeRemoved(_) => {
if let Some(workspace) = workspace.upgrade() {
- this.add_project_group_key(workspace.read(cx).project_group_key(cx));
+ this.handle_workspace_key_change(&workspace, cx);
}
}
project::Event::WorktreeUpdatedRootRepoCommonDir(_) => {
if let Some(workspace) = workspace.upgrade() {
- this.maybe_clear_provisional_project_group_key(&workspace, cx);
- this.add_project_group_key(
- this.project_group_key_for_workspace(&workspace, cx),
- );
- this.remove_stale_project_group_keys(cx);
- cx.notify();
+ this.handle_workspace_key_change(&workspace, cx);
}
}
_ => {}
@@ -587,7 +586,104 @@ impl MultiWorkspace {
.detach();
}
- pub fn add_project_group_key(&mut self, project_group_key: ProjectGroupKey) {
+ fn handle_workspace_key_change(
+ &mut self,
+ workspace: &Entity<Workspace>,
+ cx: &mut Context<Self>,
+ ) {
+ let workspace_id = workspace.entity_id();
+ let old_key = self.project_group_key_for_workspace(workspace, cx);
+ let new_key = workspace.read(cx).project_group_key(cx);
+
+ if new_key.path_list().paths().is_empty() || old_key == new_key {
+ return;
+ }
+
+ let active_workspace = self.workspace().clone();
+
+ self.set_workspace_group_key(workspace, new_key.clone());
+
+ let changed_root_paths = workspace.read(cx).root_paths(cx);
+ let old_paths = old_key.path_list().paths();
+ let new_paths = new_key.path_list().paths();
+
+ // Remove workspaces that already had the new key and have the same
+ // root paths (true duplicates that this workspace is replacing).
+ //
+ // NOTE: These are dropped without prompting for unsaved changes because
+ // the user explicitly added a folder that makes this workspace
+ // identical to the duplicate — they are intentionally overwriting it.
+ let duplicate_workspaces: Vec<Entity<Workspace>> = self
+ .workspaces
+ .iter()
+ .filter(|ws| {
+ ws.entity_id() != workspace_id
+ && self.project_group_key_for_workspace(ws, cx) == new_key
+ && ws.read(cx).root_paths(cx) == changed_root_paths
+ })
+ .cloned()
+ .collect();
+
+ for ws in &duplicate_workspaces {
+ self.detach_workspace(ws, cx);
+ self.workspaces.retain(|w| w != ws);
+ }
+
+ // Propagate folder adds/removes to linked worktree siblings
+ // (different root paths, same old key) so they stay in the group.
+ let group_workspaces: Vec<Entity<Workspace>> = self
+ .workspaces
+ .iter()
+ .filter(|ws| {
+ ws.entity_id() != workspace_id
+ && self.project_group_key_for_workspace(ws, cx) == old_key
+ })
+ .cloned()
+ .collect();
+
+ for workspace in &group_workspaces {
+ // Pre-set this to stop later WorktreeAdded events from triggering
+ self.set_workspace_group_key(&workspace, new_key.clone());
+
+ let project = workspace.read(cx).project().clone();
+
+ for added_path in new_paths.iter().filter(|p| !old_paths.contains(p)) {
+ project
+ .update(cx, |project, cx| {
+ project.find_or_create_worktree(added_path, true, cx)
+ })
+ .detach_and_log_err(cx);
+ }
+
+ for removed_path in old_paths.iter().filter(|p| !new_paths.contains(p)) {
+ project.update(cx, |project, cx| {
+ project.remove_worktree_for_main_worktree_path(removed_path, cx);
+ });
+ }
+ }
+
+ // Restore the active workspace after removals may have shifted
+ // the index. If the previously active workspace was removed,
+ // fall back to the workspace whose key just changed.
+ if let ActiveWorkspace::Persistent(_) = &self.active_workspace {
+ let target = if self.workspaces.contains(&active_workspace) {
+ &active_workspace
+ } else {
+ workspace
+ };
+ if let Some(new_index) = self.workspaces.iter().position(|ws| ws == target) {
+ self.active_workspace = ActiveWorkspace::Persistent(new_index);
+ }
+ }
+
+ self.remove_stale_project_group_keys(cx);
+
+ cx.emit(MultiWorkspaceEvent::ProjectGroupKeyChanged { old_key, new_key });
+ self.serialize(cx);
+ cx.notify();
+ }
+
+ fn add_project_group_key(&mut self, project_group_key: ProjectGroupKey) {
if project_group_key.path_list().paths().is_empty() {
return;
}
@@ -598,12 +694,12 @@ impl MultiWorkspace {
self.project_group_keys.insert(0, project_group_key);
}
- pub fn set_provisional_project_group_key(
+ pub(crate) fn set_workspace_group_key(
&mut self,
workspace: &Entity<Workspace>,
project_group_key: ProjectGroupKey,
) {
- self.provisional_project_group_keys
+ self.workspace_group_keys
.insert(workspace.entity_id(), project_group_key.clone());
self.add_project_group_key(project_group_key);
}
@@ -613,28 +709,12 @@ impl MultiWorkspace {
workspace: &Entity<Workspace>,
cx: &App,
) -> ProjectGroupKey {
- self.provisional_project_group_keys
+ self.workspace_group_keys
.get(&workspace.entity_id())
.cloned()
.unwrap_or_else(|| workspace.read(cx).project_group_key(cx))
}
- fn maybe_clear_provisional_project_group_key(
- &mut self,
- workspace: &Entity<Workspace>,
- cx: &App,
- ) {
- let live_key = workspace.read(cx).project_group_key(cx);
- if self
- .provisional_project_group_keys
- .get(&workspace.entity_id())
- .is_some_and(|key| *key == live_key)
- {
- self.provisional_project_group_keys
- .remove(&workspace.entity_id());
- }
- }
-
fn remove_stale_project_group_keys(&mut self, cx: &App) {
let workspace_keys: HashSet<ProjectGroupKey> = self
.workspaces
@@ -1045,7 +1125,6 @@ impl MultiWorkspace {
self.promote_transient(old, cx);
} else {
self.detach_workspace(&old, cx);
- cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(old.entity_id()));
}
}
} else {
@@ -1056,7 +1135,6 @@ impl MultiWorkspace {
});
if let Some(old) = self.active_workspace.set_transient(workspace) {
self.detach_workspace(&old, cx);
- cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(old.entity_id()));
}
}
@@ -1083,7 +1161,7 @@ impl MultiWorkspace {
/// Returns the index of the newly inserted workspace.
fn promote_transient(&mut self, workspace: Entity<Workspace>, cx: &mut Context<Self>) -> usize {
let project_group_key = self.project_group_key_for_workspace(&workspace, cx);
- self.add_project_group_key(project_group_key);
+ self.set_workspace_group_key(&workspace, project_group_key);
self.workspaces.push(workspace.clone());
cx.emit(MultiWorkspaceEvent::WorkspaceAdded(workspace));
self.workspaces.len() - 1
@@ -1099,10 +1177,10 @@ impl MultiWorkspace {
for workspace in std::mem::take(&mut self.workspaces) {
if workspace != active {
self.detach_workspace(&workspace, cx);
- cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(workspace.entity_id()));
}
}
self.project_group_keys.clear();
+ self.workspace_group_keys.clear();
self.active_workspace = ActiveWorkspace::Transient(active);
cx.notify();
}
@@ -1128,7 +1206,7 @@ impl MultiWorkspace {
workspace.set_multi_workspace(weak_self, cx);
});
- self.add_project_group_key(project_group_key);
+ self.set_workspace_group_key(&workspace, project_group_key);
self.workspaces.push(workspace.clone());
cx.emit(MultiWorkspaceEvent::WorkspaceAdded(workspace));
cx.notify();
@@ -1136,10 +1214,12 @@ impl MultiWorkspace {
}
}
- /// Clears session state and DB binding for a workspace that is being
- /// removed or replaced. The DB row is preserved so the workspace still
- /// appears in the recent-projects list.
+ /// Detaches a workspace: clears session state, DB binding, cached
+ /// group key, and emits `WorkspaceRemoved`. The DB row is preserved
+ /// so the workspace still appears in the recent-projects list.
fn detach_workspace(&mut self, workspace: &Entity<Workspace>, cx: &mut Context<Self>) {
+ self.workspace_group_keys.remove(&workspace.entity_id());
+ cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(workspace.entity_id()));
workspace.update(cx, |workspace, _cx| {
workspace.session_id.take();
workspace._schedule_serialize_workspace.take();
@@ -1313,6 +1393,45 @@ impl MultiWorkspace {
tasks
}
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn assert_project_group_key_integrity(&self, cx: &App) -> anyhow::Result<()> {
+ let stored_keys: HashSet<&ProjectGroupKey> = self.project_group_keys().collect();
+
+ let workspace_group_keys: HashSet<&ProjectGroupKey> =
+ self.workspace_group_keys.values().collect();
+ let extra_keys = &workspace_group_keys - &stored_keys;
+ anyhow::ensure!(
+ extra_keys.is_empty(),
+ "workspace_group_keys values not in project_group_keys: {:?}",
+ extra_keys,
+ );
+
+ let cached_ids: HashSet<EntityId> = self.workspace_group_keys.keys().copied().collect();
+ let workspace_ids: HashSet<EntityId> = self.workspaces().map(|ws| ws.entity_id()).collect();
+ anyhow::ensure!(
+ cached_ids == workspace_ids,
+ "workspace_group_keys entity IDs don't match workspaces.\n\
+ only in cache: {:?}\n\
+ only in workspaces: {:?}",
+ &cached_ids - &workspace_ids,
+ &workspace_ids - &cached_ids,
+ );
+
+ for workspace in self.workspaces() {
+ let live_key = workspace.read(cx).project_group_key(cx);
+ let cached_key = &self.workspace_group_keys[&workspace.entity_id()];
+ anyhow::ensure!(
+ *cached_key == live_key,
+ "workspace {:?} has live key {:?} but cached key {:?}",
+ workspace.entity_id(),
+ live_key,
+ cached_key,
+ );
+ }
+
+ Ok(())
+ }
+
#[cfg(any(test, feature = "test-support"))]
pub fn set_random_database_id(&mut self, cx: &mut Context<Self>) {
self.workspace().update(cx, |workspace, _cx| {
@@ -1471,7 +1590,6 @@ impl MultiWorkspace {
for workspace in &removed_workspaces {
this.detach_workspace(workspace, cx);
- cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(workspace.entity_id()));
}
let removed_any = !removed_workspaces.is_empty();