Detailed changes
@@ -0,0 +1,114 @@
+# ProjectGroup Refactor — Implementation Handoff
+
+## Goal
+
+Introduce `ProjectGroup` as an explicit entity that **owns** both its workspaces and its threads. Replace the current system where:
+- Project groups are identified by `ProjectGroupKey` (derived from filesystem paths — structural identity)
+- Thread-to-group association is derived at runtime via path matching across two HashMap indices
+- Three parallel collections on `MultiWorkspace` are joined on every read
+
+With a system where:
+- Each `ProjectGroup` has a stable `ProjectGroupId` (UUID)
+- `ProjectGroup` directly contains its `Vec<Entity<Workspace>>`
+- Threads store a `project_group_id: Option<ProjectGroupId>` for direct ownership
+- `MultiWorkspace.active_workspace` is an independent `Entity<Workspace>` field (not an enum with index)
+
+## What already exists
+
+`ProjectGroupId` has already been added to `zed/crates/project/src/project.rs` (around L6120):
+
+```rust
+#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, serde::Serialize, serde::Deserialize)]
+pub struct ProjectGroupId(uuid::Uuid);
+
+impl ProjectGroupId {
+ pub fn new() -> Self {
+ Self(uuid::Uuid::new_v4())
+ }
+}
+```
+
+The `uuid` dependency has been added to `project/Cargo.toml`.
+
+---
+
+## File-by-file changes (in dependency order)
+
+### 1. `crates/project/src/project.rs`
+
+**Already done:** `ProjectGroupId` type exists.
+
+**No further changes needed** to this file. `ProjectGroupKey` stays as-is with path-based `Eq`/`Hash` — it's still useful as a computed descriptor for matching.
+
+### 2. `crates/workspace/src/persistence/model.rs`
+
+**Current state (lines 65–110):**
+```rust
+#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
+pub struct SerializedProjectGroupKey {
+ pub path_list: SerializedPathList,
+ pub(crate) location: SerializedWorkspaceLocation,
+}
+// From impls for ProjectGroupKey <-> SerializedProjectGroupKey
+pub struct MultiWorkspaceState {
+ pub active_workspace_id: Option<WorkspaceId>,
+ pub sidebar_open: bool,
+ pub project_group_keys: Vec<SerializedProjectGroupKey>,
+ pub sidebar_state: Option<String>,
+}
+```
+
+**Changes:**
+
+1. Rename `SerializedProjectGroupKey` → `SerializedProjectGroup` and add an `id` field:
+```rust
+#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
+pub struct SerializedProjectGroup {
+ #[serde(default)] // absent in old blobs → None
+ pub id: Option<ProjectGroupId>,
+ pub path_list: SerializedPathList,
+ pub(crate) location: SerializedWorkspaceLocation,
+}
+```
+
+2. Update the `From` impls. The `From<ProjectGroupKey>` impl no longer makes sense because we need an ID. Instead, create a method or `From<(&ProjectGroupId, &ProjectGroupKey)>`:
+```rust
+impl SerializedProjectGroup {
+ pub fn from_group(id: ProjectGroupId, key: &ProjectGroupKey) -> Self {
+ Self {
+ id: Some(id),
+ path_list: key.path_list().serialize(),
+ location: match key.host() {
+ Some(host) => SerializedWorkspaceLocation::Remote(host),
+ None => SerializedWorkspaceLocation::Local,
+ },
+ }
+ }
+
+ pub fn to_key_and_id(self) -> (ProjectGroupId, ProjectGroupKey) {
+ let id = self.id.unwrap_or_else(ProjectGroupId::new);
+ let path_list = PathList::deserialize(&self.path_list);
+ let host = match self.location {
+ SerializedWorkspaceLocation::Local => None,
+ SerializedWorkspaceLocation::Remote(opts) => Some(opts),
+ };
+ (id, ProjectGroupKey::new(host, path_list))
+ }
+}
+```
+
+3. Update `MultiWorkspaceState`:
+```rust
+pub struct MultiWorkspaceState {
+ pub active_workspace_id: Option<WorkspaceId>,
+ pub sidebar_open: bool,
+ pub project_group_keys: Vec<SerializedProjectGroup>, // renamed type
+ pub sidebar_state: Option<String>,
+}
+```
+
+4. Add `use project::ProjectGroupId;` to imports.
+
+5. The old `From<SerializedProjectGroupKey> for ProjectGroupKey` impl should be removed since callers now use `to_key_and_id()`.
+
+###
@@ -13306,6 +13306,7 @@ dependencies = [
"unindent",
"url",
"util",
+ "uuid",
"watch",
"wax",
"which 6.0.3",
@@ -79,6 +79,7 @@ schemars.workspace = true
semver.workspace = true
serde.workspace = true
serde_json.workspace = true
+uuid.workspace = true
settings.workspace = true
sha2.workspace = true
shellexpand.workspace = true
@@ -6116,6 +6116,21 @@ impl Project {
///
/// Paths are mapped to their main worktree path first so we can group
/// workspaces by main repos.
+/// Stable identity for a project group. Unlike `ProjectGroupKey` (which is
+/// derived from filesystem paths and changes when folders are added/removed),
+/// a `ProjectGroupId` is assigned once when the group is created and never
+/// changes.
+#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, serde::Serialize, serde::Deserialize)]
+pub struct ProjectGroupId(uuid::Uuid);
+
+impl ProjectGroupId {
+ pub fn new() -> Self {
+ Self(uuid::Uuid::new_v4())
+ }
+}
+
+/// A computed descriptor of a project's worktree paths + optional remote host.
+/// Used for matching workspaces to existing groups. Equality is path-based.
#[derive(PartialEq, Eq, Hash, Clone, Debug)]
pub struct ProjectGroupKey {
/// The paths of the main worktrees for this project group.
@@ -1963,9 +1963,16 @@ impl RecentProjectsDelegate {
cx.defer(move |cx| {
handle
.update(cx, |multi_workspace, window, cx| {
- multi_workspace
- .remove_project_group(&key_for_remove, window, cx)
- .detach_and_log_err(cx);
+ if let Some(group) = multi_workspace
+ .project_groups()
+ .iter()
+ .find(|g| g.key == key_for_remove)
+ {
+ let group_id = group.id;
+ multi_workspace
+ .remove_project_group(group_id, window, cx)
+ .detach_and_log_err(cx);
+ }
})
.log_err();
});
@@ -24,7 +24,8 @@ use menu::{
Cancel, Confirm, SelectChild, SelectFirst, SelectLast, SelectNext, SelectParent, SelectPrevious,
};
use project::{
- AgentId, AgentRegistryStore, Event as ProjectEvent, ProjectGroupKey, linked_worktree_short_name,
+ AgentId, AgentRegistryStore, Event as ProjectEvent, ProjectGroupId, ProjectGroupKey,
+ linked_worktree_short_name,
};
use recent_projects::sidebar_recent_projects::SidebarRecentProjects;
use remote::RemoteConnectionOptions;
@@ -46,9 +47,9 @@ use util::ResultExt as _;
use util::path_list::PathList;
use workspace::{
AddFolderToProject, CloseWindow, FocusWorkspaceSidebar, MultiWorkspace, MultiWorkspaceEvent,
- NextProject, NextThread, Open, PreviousProject, PreviousThread, SerializedProjectGroupKey,
- ShowFewerThreads, ShowMoreThreads, Sidebar as WorkspaceSidebar, SidebarSide, Toast,
- ToggleWorkspaceSidebar, Workspace, notifications::NotificationId, sidebar_side_context_menu,
+ NextProject, NextThread, Open, PreviousProject, PreviousThread, ProjectGroup, ShowFewerThreads,
+ ShowMoreThreads, Sidebar as WorkspaceSidebar, SidebarSide, Toast, ToggleWorkspaceSidebar,
+ Workspace, notifications::NotificationId, sidebar_side_context_menu,
};
use zed_actions::OpenRecent;
@@ -96,9 +97,9 @@ struct SerializedSidebar {
#[serde(default)]
width: Option<f32>,
#[serde(default)]
- collapsed_groups: Vec<SerializedProjectGroupKey>,
+ collapsed_groups: Vec<ProjectGroupId>,
#[serde(default)]
- expanded_groups: Vec<(SerializedProjectGroupKey, usize)>,
+ expanded_groups: Vec<(ProjectGroupId, usize)>,
#[serde(default)]
active_view: SerializedSidebarView,
}
@@ -240,6 +241,7 @@ impl ThreadEntry {
#[derive(Clone)]
enum ListEntry {
ProjectHeader {
+ group_id: ProjectGroupId,
key: ProjectGroupKey,
label: SharedString,
highlight_positions: Vec<usize>,
@@ -250,6 +252,7 @@ enum ListEntry {
},
Thread(ThreadEntry),
ViewMore {
+ group_id: ProjectGroupId,
key: ProjectGroupKey,
is_fully_expanded: bool,
},
@@ -275,7 +278,7 @@ impl ListEntry {
fn reachable_workspaces<'a>(
&'a self,
multi_workspace: &'a workspace::MultiWorkspace,
- cx: &'a App,
+ _cx: &'a App,
) -> Vec<Entity<Workspace>> {
match self {
ListEntry::Thread(thread) => match &thread.workspace {
@@ -283,10 +286,10 @@ impl ListEntry {
ThreadEntryWorkspace::Closed { .. } => Vec::new(),
},
ListEntry::DraftThread { workspace, .. } => workspace.iter().cloned().collect(),
- ListEntry::ProjectHeader { key, .. } => multi_workspace
- .workspaces_for_project_group(key, cx)
- .cloned()
- .collect(),
+ ListEntry::ProjectHeader { group_id, .. } => multi_workspace
+ .workspaces_for_project_group(*group_id)
+ .map(|ws| ws.to_vec())
+ .unwrap_or_default(),
ListEntry::ViewMore { .. } => Vec::new(),
}
}
@@ -454,8 +457,8 @@ pub struct Sidebar {
/// Tracks which sidebar entry is currently active (highlighted).
active_entry: Option<ActiveEntry>,
hovered_thread_index: Option<usize>,
- collapsed_groups: HashSet<ProjectGroupKey>,
- expanded_groups: HashMap<ProjectGroupKey, usize>,
+ collapsed_groups: HashSet<ProjectGroupId>,
+ expanded_groups: HashMap<ProjectGroupId, usize>,
/// Updated only in response to explicit user actions (clicking a
/// thread, confirming in the thread switcher, etc.) — never from
/// background data changes. Used to sort the thread switcher popup.
@@ -507,34 +510,6 @@ impl Sidebar {
MultiWorkspaceEvent::WorkspaceRemoved(_) => {
this.update_entries(cx);
}
- MultiWorkspaceEvent::WorktreePathAdded {
- old_main_paths,
- added_path,
- } => {
- let added_path = added_path.clone();
- ThreadMetadataStore::global(cx).update(cx, |store, cx| {
- store.change_worktree_paths(
- old_main_paths,
- |paths| paths.add_path(&added_path, &added_path),
- cx,
- );
- });
- this.update_entries(cx);
- }
- MultiWorkspaceEvent::WorktreePathRemoved {
- old_main_paths,
- removed_path,
- } => {
- let removed_path = removed_path.clone();
- ThreadMetadataStore::global(cx).update(cx, |store, cx| {
- store.change_worktree_paths(
- old_main_paths,
- |paths| paths.remove_main_path(&removed_path),
- cx,
- );
- });
- this.update_entries(cx);
- }
},
)
.detach();
@@ -927,11 +902,11 @@ impl Sidebar {
(icon, icon_from_external_svg)
};
- let groups: Vec<_> = mw.project_groups(cx).collect();
+ let groups: Vec<ProjectGroup> = mw.project_groups().to_vec();
let mut all_paths: Vec<PathBuf> = groups
.iter()
- .flat_map(|(key, _)| key.path_list().paths().iter().cloned())
+ .flat_map(|g| g.key.path_list().paths().iter().cloned())
.collect();
all_paths.sort();
all_paths.dedup();
@@ -942,14 +917,18 @@ impl Sidebar {
let path_detail_map: HashMap<PathBuf, usize> =
all_paths.into_iter().zip(path_details).collect();
- for (group_key, group_workspaces) in &groups {
+ for group in &groups {
+ let group_key = &group.key;
+ let group_workspaces = &group.workspaces;
+ let group_id = group.id;
+
if group_key.path_list().paths().is_empty() {
continue;
}
let label = group_key.display_name(&path_detail_map);
- let is_collapsed = self.collapsed_groups.contains(&group_key);
+ let is_collapsed = self.collapsed_groups.contains(&group_id);
let should_load_threads = !is_collapsed || !query.is_empty();
let is_active = active_workspace
@@ -1190,6 +1169,7 @@ impl Sidebar {
project_header_indices.push(entries.len());
entries.push(ListEntry::ProjectHeader {
+ group_id,
key: group_key.clone(),
label,
highlight_positions: workspace_highlight_positions,
@@ -1206,6 +1186,7 @@ impl Sidebar {
} else {
project_header_indices.push(entries.len());
entries.push(ListEntry::ProjectHeader {
+ group_id,
key: group_key.clone(),
label,
highlight_positions: Vec::new(),
@@ -1261,7 +1242,7 @@ impl Sidebar {
let total = threads.len();
- let extra_batches = self.expanded_groups.get(&group_key).copied().unwrap_or(0);
+ let extra_batches = self.expanded_groups.get(&group_id).copied().unwrap_or(0);
let threads_to_show =
DEFAULT_THREADS_SHOWN + (extra_batches * DEFAULT_THREADS_SHOWN);
let count = threads_to_show.min(total);
@@ -1300,6 +1281,7 @@ impl Sidebar {
if total > DEFAULT_THREADS_SHOWN {
entries.push(ListEntry::ViewMore {
+ group_id,
key: group_key.clone(),
is_fully_expanded,
});
@@ -1388,6 +1370,7 @@ impl Sidebar {
let rendered = match entry {
ListEntry::ProjectHeader {
+ group_id,
key,
label,
highlight_positions,
@@ -1398,6 +1381,7 @@ impl Sidebar {
} => self.render_project_header(
ix,
false,
+ *group_id,
key,
label,
highlight_positions,
@@ -1410,9 +1394,10 @@ impl Sidebar {
),
ListEntry::Thread(thread) => self.render_thread(ix, thread, is_active, is_selected, cx),
ListEntry::ViewMore {
+ group_id,
key,
is_fully_expanded,
- } => self.render_view_more(ix, key, *is_fully_expanded, is_selected, cx),
+ } => self.render_view_more(ix, *group_id, key, *is_fully_expanded, is_selected, cx),
ListEntry::DraftThread {
draft_id,
key,
@@ -1485,6 +1470,7 @@ impl Sidebar {
&self,
ix: usize,
is_sticky: bool,
+ group_id: ProjectGroupId,
key: &ProjectGroupKey,
label: &SharedString,
highlight_positions: &[usize],
@@ -1502,16 +1488,16 @@ impl Sidebar {
let disclosure_id = SharedString::from(format!("disclosure-{ix}"));
let group_name = SharedString::from(format!("{id_prefix}header-group-{ix}"));
- let is_collapsed = self.collapsed_groups.contains(key);
+ let is_collapsed = self.collapsed_groups.contains(&group_id);
let (disclosure_icon, disclosure_tooltip) = if is_collapsed {
(IconName::ChevronRight, "Expand Project")
} else {
(IconName::ChevronDown, "Collapse Project")
};
- let key_for_toggle = key.clone();
- let key_for_collapse = key.clone();
- let view_more_expanded = self.expanded_groups.contains_key(key);
+ let key_for_toggle = group_id;
+ let key_for_collapse = group_id;
+ let view_more_expanded = self.expanded_groups.contains_key(&group_id);
let label = if highlight_positions.is_empty() {
Label::new(label.clone())
@@ -1578,7 +1564,7 @@ impl Sidebar {
.tooltip(Tooltip::text(disclosure_tooltip))
.on_click(cx.listener(move |this, _, window, cx| {
this.selection = None;
- this.toggle_collapse(&key_for_toggle, window, cx);
+ this.toggle_collapse(key_for_toggle, window, cx);
})),
)
.child(label)
@@ -1626,7 +1612,9 @@ impl Sidebar {
.on_mouse_down(gpui::MouseButton::Left, |_, _, cx| {
cx.stop_propagation();
})
- .child(self.render_project_header_ellipsis_menu(ix, id_prefix, key, cx))
+ .child(
+ self.render_project_header_ellipsis_menu(ix, id_prefix, group_id, key, cx),
+ )
.when(view_more_expanded && !is_collapsed, |this| {
this.child(
IconButton::new(
@@ -1637,15 +1625,14 @@ impl Sidebar {
)
.icon_size(IconSize::Small)
.tooltip(Tooltip::text("Collapse Displayed Threads"))
- .on_click(cx.listener({
- let key_for_collapse = key_for_collapse.clone();
+ .on_click(cx.listener(
move |this, _, _window, cx| {
this.selection = None;
this.expanded_groups.remove(&key_for_collapse);
this.serialize(cx);
this.update_entries(cx);
- }
- })),
+ },
+ )),
)
})
.child({
@@ -1667,9 +1654,10 @@ impl Sidebar {
cx,
)
})
- .on_click(cx.listener(
+ .on_click(cx.listener({
+ let key = key.clone();
move |this, _, window, cx| {
- this.collapsed_groups.remove(&key);
+ this.collapsed_groups.remove(&group_id);
this.selection = None;
// If the active workspace belongs to this
// group, use it (preserves linked worktree
@@ -1693,8 +1681,8 @@ impl Sidebar {
} else {
this.open_workspace_and_create_draft(&key, window, cx);
}
- },
- ))
+ }
+ }))
}),
)
.map(|this| {
@@ -1731,6 +1719,7 @@ impl Sidebar {
&self,
ix: usize,
id_prefix: &str,
+ group_id: ProjectGroupId,
project_group_key: &ProjectGroupKey,
cx: &mut Context<Self>,
) -> impl IntoElement {
@@ -1766,7 +1755,6 @@ impl Sidebar {
};
let name: SharedString = name.to_string_lossy().into_owned().into();
let path = path.clone();
- let project_group_key = project_group_key.clone();
let multi_workspace = multi_workspace.clone();
let weak_menu = weak_menu.clone();
menu = menu.entry_with_end_slot_on_hover(
@@ -1779,9 +1767,7 @@ impl Sidebar {
multi_workspace
.update(cx, |multi_workspace, cx| {
multi_workspace.remove_folder_from_project_group(
- &project_group_key,
- &path,
- cx,
+ group_id, &path, cx,
);
})
.ok();
@@ -1794,16 +1780,13 @@ impl Sidebar {
"Add Folder to Project",
Some(Box::new(AddFolderToProject)),
{
- let project_group_key = project_group_key.clone();
let multi_workspace = multi_workspace.clone();
let weak_menu = weak_menu.clone();
move |window, cx| {
multi_workspace
.update(cx, |multi_workspace, cx| {
multi_workspace.prompt_to_add_folders_to_project_group(
- &project_group_key,
- window,
- cx,
+ group_id, window, cx,
);
})
.ok();
@@ -1812,14 +1795,13 @@ impl Sidebar {
},
);
- let project_group_key = project_group_key.clone();
let multi_workspace = multi_workspace.clone();
menu.separator()
.entry("Remove Project", None, move |window, cx| {
multi_workspace
.update(cx, |multi_workspace, cx| {
multi_workspace
- .remove_project_group(&project_group_key, window, cx)
+ .remove_project_group(group_id, window, cx)
.detach_and_log_err(cx);
})
.ok();
@@ -1877,6 +1859,7 @@ impl Sidebar {
}
let ListEntry::ProjectHeader {
+ group_id,
key,
label,
highlight_positions,
@@ -1895,6 +1878,7 @@ impl Sidebar {
let header_element = self.render_project_header(
header_idx,
true,
+ *group_id,
key,
&label,
&highlight_positions,
@@ -1942,14 +1926,14 @@ impl Sidebar {
fn toggle_collapse(
&mut self,
- project_group_key: &ProjectGroupKey,
+ group_id: ProjectGroupId,
_window: &mut Window,
cx: &mut Context<Self>,
) {
- if self.collapsed_groups.contains(project_group_key) {
- self.collapsed_groups.remove(project_group_key);
+ if self.collapsed_groups.contains(&group_id) {
+ self.collapsed_groups.remove(&group_id);
} else {
- self.collapsed_groups.insert(project_group_key.clone());
+ self.collapsed_groups.insert(group_id);
}
self.serialize(cx);
self.update_entries(cx);
@@ -2122,9 +2106,8 @@ impl Sidebar {
};
match entry {
- ListEntry::ProjectHeader { key, .. } => {
- let key = key.clone();
- self.toggle_collapse(&key, window, cx);
+ ListEntry::ProjectHeader { group_id, .. } => {
+ self.toggle_collapse(*group_id, window, cx);
}
ListEntry::Thread(thread) => {
let metadata = thread.metadata.clone();
@@ -2150,15 +2133,14 @@ impl Sidebar {
}
}
ListEntry::ViewMore {
- key,
+ group_id,
is_fully_expanded,
..
} => {
- let key = key.clone();
if *is_fully_expanded {
- self.reset_thread_group_expansion(&key, cx);
+ self.reset_thread_group_expansion(*group_id, cx);
} else {
- self.expand_thread_group(&key, cx);
+ self.expand_thread_group(*group_id, cx);
}
}
ListEntry::DraftThread {
@@ -2601,9 +2583,9 @@ impl Sidebar {
let Some(ix) = self.selection else { return };
match self.contents.entries.get(ix) {
- Some(ListEntry::ProjectHeader { key, .. }) => {
- if self.collapsed_groups.contains(key) {
- self.collapsed_groups.remove(key);
+ Some(ListEntry::ProjectHeader { group_id, .. }) => {
+ if self.collapsed_groups.contains(group_id) {
+ self.collapsed_groups.remove(group_id);
self.update_entries(cx);
} else if ix + 1 < self.contents.entries.len() {
self.selection = Some(ix + 1);
@@ -2624,9 +2606,9 @@ impl Sidebar {
let Some(ix) = self.selection else { return };
match self.contents.entries.get(ix) {
- Some(ListEntry::ProjectHeader { key, .. }) => {
- if !self.collapsed_groups.contains(key) {
- self.collapsed_groups.insert(key.clone());
+ Some(ListEntry::ProjectHeader { group_id, .. }) => {
+ if !self.collapsed_groups.contains(group_id) {
+ self.collapsed_groups.insert(*group_id);
self.update_entries(cx);
}
}
@@ -2634,10 +2616,11 @@ impl Sidebar {
ListEntry::Thread(_) | ListEntry::ViewMore { .. } | ListEntry::DraftThread { .. },
) => {
for i in (0..ix).rev() {
- if let Some(ListEntry::ProjectHeader { key, .. }) = self.contents.entries.get(i)
+ if let Some(ListEntry::ProjectHeader { group_id, .. }) =
+ self.contents.entries.get(i)
{
self.selection = Some(i);
- self.collapsed_groups.insert(key.clone());
+ self.collapsed_groups.insert(*group_id);
self.update_entries(cx);
break;
}
@@ -2670,13 +2653,14 @@ impl Sidebar {
};
if let Some(header_ix) = header_ix {
- if let Some(ListEntry::ProjectHeader { key, .. }) = self.contents.entries.get(header_ix)
+ if let Some(ListEntry::ProjectHeader { group_id, .. }) =
+ self.contents.entries.get(header_ix)
{
- if self.collapsed_groups.contains(key) {
- self.collapsed_groups.remove(key);
+ if self.collapsed_groups.contains(group_id) {
+ self.collapsed_groups.remove(group_id);
} else {
self.selection = Some(header_ix);
- self.collapsed_groups.insert(key.clone());
+ self.collapsed_groups.insert(*group_id);
}
self.update_entries(cx);
}
@@ -2690,8 +2674,8 @@ impl Sidebar {
cx: &mut Context<Self>,
) {
for entry in &self.contents.entries {
- if let ListEntry::ProjectHeader { key, .. } = entry {
- self.collapsed_groups.insert(key.clone());
+ if let ListEntry::ProjectHeader { group_id, .. } = entry {
+ self.collapsed_groups.insert(*group_id);
}
}
self.update_entries(cx);
@@ -3636,12 +3620,12 @@ impl Sidebar {
fn render_view_more(
&self,
ix: usize,
- key: &ProjectGroupKey,
+ group_id: ProjectGroupId,
+ _key: &ProjectGroupKey,
is_fully_expanded: bool,
is_selected: bool,
cx: &mut Context<Self>,
) -> AnyElement {
- let key = key.clone();
let id = SharedString::from(format!("view-more-{}", ix));
let label: SharedString = if is_fully_expanded {
@@ -3657,9 +3641,9 @@ impl Sidebar {
.on_click(cx.listener(move |this, _, _window, cx| {
this.selection = None;
if is_fully_expanded {
- this.reset_thread_group_expansion(&key, cx);
+ this.reset_thread_group_expansion(group_id, cx);
} else {
- this.expand_thread_group(&key, cx);
+ this.expand_thread_group(group_id, cx);
}
}))
.into_any_element()
@@ -3867,6 +3851,18 @@ impl Sidebar {
Some(multi_workspace.project_group_key_for_workspace(multi_workspace.workspace(), cx))
}
+ fn active_project_group_id(&self, cx: &App) -> Option<ProjectGroupId> {
+ let multi_workspace = self.multi_workspace.upgrade()?;
+ let multi_workspace = multi_workspace.read(cx);
+ let active_key =
+ multi_workspace.project_group_key_for_workspace(multi_workspace.workspace(), cx);
+ multi_workspace
+ .project_groups()
+ .iter()
+ .find(|g| g.key == active_key)
+ .map(|g| g.id)
+ }
+
fn active_project_header_position(&self, cx: &App) -> Option<usize> {
let active_key = self.active_project_group_key(cx)?;
self.contents
@@ -3904,14 +3900,16 @@ impl Sidebar {
};
let header_entry_ix = self.contents.project_header_indices[next_pos];
- let Some(ListEntry::ProjectHeader { key, .. }) = self.contents.entries.get(header_entry_ix)
+ let Some(ListEntry::ProjectHeader { group_id, key, .. }) =
+ self.contents.entries.get(header_entry_ix)
else {
return;
};
+ let group_id = *group_id;
let key = key.clone();
// Uncollapse the target group so that threads become visible.
- self.collapsed_groups.remove(&key);
+ self.collapsed_groups.remove(&group_id);
if let Some(workspace) = self.multi_workspace.upgrade().and_then(|mw| {
mw.read(cx)
@@ -4014,40 +4012,26 @@ impl Sidebar {
self.cycle_thread_impl(false, window, cx);
}
- fn expand_thread_group(&mut self, project_group_key: &ProjectGroupKey, cx: &mut Context<Self>) {
- let current = self
- .expanded_groups
- .get(project_group_key)
- .copied()
- .unwrap_or(0);
- self.expanded_groups
- .insert(project_group_key.clone(), current + 1);
+ fn expand_thread_group(&mut self, group_id: ProjectGroupId, cx: &mut Context<Self>) {
+ let current = self.expanded_groups.get(&group_id).copied().unwrap_or(0);
+ self.expanded_groups.insert(group_id, current + 1);
self.serialize(cx);
self.update_entries(cx);
}
- fn reset_thread_group_expansion(
- &mut self,
- project_group_key: &ProjectGroupKey,
- cx: &mut Context<Self>,
- ) {
- self.expanded_groups.remove(project_group_key);
+ fn reset_thread_group_expansion(&mut self, group_id: ProjectGroupId, cx: &mut Context<Self>) {
+ self.expanded_groups.remove(&group_id);
self.serialize(cx);
self.update_entries(cx);
}
- fn collapse_thread_group(
- &mut self,
- project_group_key: &ProjectGroupKey,
- cx: &mut Context<Self>,
- ) {
- match self.expanded_groups.get(project_group_key).copied() {
+ fn collapse_thread_group(&mut self, group_id: ProjectGroupId, cx: &mut Context<Self>) {
+ match self.expanded_groups.get(&group_id).copied() {
Some(batches) if batches > 1 => {
- self.expanded_groups
- .insert(project_group_key.clone(), batches - 1);
+ self.expanded_groups.insert(group_id, batches - 1);
}
Some(_) => {
- self.expanded_groups.remove(project_group_key);
+ self.expanded_groups.remove(&group_id);
}
None => return,
}
@@ -4061,10 +4045,10 @@ impl Sidebar {
_window: &mut Window,
cx: &mut Context<Self>,
) {
- let Some(active_key) = self.active_project_group_key(cx) else {
+ let Some(group_id) = self.active_project_group_id(cx) else {
return;
};
- self.expand_thread_group(&active_key, cx);
+ self.expand_thread_group(group_id, cx);
}
fn on_show_fewer_threads(
@@ -4073,10 +4057,10 @@ impl Sidebar {
_window: &mut Window,
cx: &mut Context<Self>,
) {
- let Some(active_key) = self.active_project_group_key(cx) else {
+ let Some(group_id) = self.active_project_group_id(cx) else {
return;
};
- self.collapse_thread_group(&active_key, cx);
+ self.collapse_thread_group(group_id, cx);
}
fn on_new_thread(
@@ -4678,16 +4662,11 @@ impl WorkspaceSidebar for Sidebar {
fn serialized_state(&self, _cx: &App) -> Option<String> {
let serialized = SerializedSidebar {
width: Some(f32::from(self.width)),
- collapsed_groups: self
- .collapsed_groups
- .iter()
- .cloned()
- .map(SerializedProjectGroupKey::from)
- .collect(),
+ collapsed_groups: self.collapsed_groups.iter().copied().collect(),
expanded_groups: self
.expanded_groups
.iter()
- .map(|(key, count)| (SerializedProjectGroupKey::from(key.clone()), *count))
+ .map(|(id, count)| (*id, *count))
.collect(),
active_view: match self.view {
SidebarView::ThreadList => SerializedSidebarView::ThreadList,
@@ -4707,16 +4686,8 @@ impl WorkspaceSidebar for Sidebar {
if let Some(width) = serialized.width {
self.width = px(width).clamp(MIN_WIDTH, MAX_WIDTH);
}
- self.collapsed_groups = serialized
- .collapsed_groups
- .into_iter()
- .map(ProjectGroupKey::from)
- .collect();
- self.expanded_groups = serialized
- .expanded_groups
- .into_iter()
- .map(|(s, count)| (ProjectGroupKey::from(s), count))
- .collect();
+ self.collapsed_groups = serialized.collapsed_groups.into_iter().collect();
+ self.expanded_groups = serialized.expanded_groups.into_iter().collect();
if serialized.active_view == SerializedSidebarView::Archive {
cx.defer_in(window, |this, window, cx| {
this.show_archive(window, cx);
@@ -0,0 +1,7 @@
+# Seeds for failure cases proptest has generated in the past. It is
+# automatically read and these particular cases re-run before any
+# novel cases are generated.
+#
+# It is recommended to check this file in to source control so that
+# everyone who runs the test benefits from these saved cases.
+cc e5848f13ac6e2dd83819558a65f5836e5196ff6a2dc5289032e28b4cabf2bf83 # shrinks to TestSidebarInvariantsArgs = TestSidebarInvariantsArgs { __seed: 8911164217610394189, raw_operations: [81, 208] }
@@ -343,11 +343,11 @@ fn visible_entries_as_strings(
match entry {
ListEntry::ProjectHeader {
label,
- key,
+ group_id,
highlight_positions: _,
..
} => {
- let icon = if sidebar.collapsed_groups.contains(key) {
+ let icon = if sidebar.collapsed_groups.contains(group_id) {
">"
} else {
"v"
@@ -412,8 +412,15 @@ async fn test_serialization_round_trip(cx: &mut TestAppContext) {
// Set a custom width, collapse the group, and expand "View More".
sidebar.update_in(cx, |sidebar, window, cx| {
sidebar.set_width(Some(px(420.0)), cx);
- sidebar.toggle_collapse(&project_group_key, window, cx);
- sidebar.expanded_groups.insert(project_group_key.clone(), 2);
+ let group_id = multi_workspace
+ .read(cx)
+ .project_groups()
+ .iter()
+ .find(|g| g.key == project_group_key)
+ .unwrap()
+ .id;
+ sidebar.toggle_collapse(group_id, window, cx);
+ sidebar.expanded_groups.insert(group_id, 2);
});
cx.run_until_parked();
@@ -451,8 +458,15 @@ async fn test_serialization_round_trip(cx: &mut TestAppContext) {
assert_eq!(collapsed1, collapsed2);
assert_eq!(expanded1, expanded2);
assert_eq!(width1, px(420.0));
- assert!(collapsed1.contains(&project_group_key));
- assert_eq!(expanded1.get(&project_group_key), Some(&2));
+ let group_id = multi_workspace.read_with(cx, |mw, _| {
+ mw.project_groups()
+ .iter()
+ .find(|g| g.key == project_group_key)
+ .unwrap()
+ .id
+ });
+ assert!(collapsed1.contains(&group_id));
+ assert_eq!(expanded1.get(&group_id), Some(&2));
}
#[gpui::test]
@@ -686,6 +700,13 @@ async fn test_view_more_batched_expansion(cx: &mut TestAppContext) {
save_n_test_threads(17, &project, cx).await;
let project_group_key = project.read_with(cx, |project, cx| project.project_group_key(cx));
+ let group_id = multi_workspace.read_with(cx, |mw, _| {
+ mw.project_groups()
+ .iter()
+ .find(|g| g.key == project_group_key)
+ .unwrap()
+ .id
+ });
multi_workspace.update_in(cx, |_, _window, cx| cx.notify());
cx.run_until_parked();
@@ -710,13 +731,8 @@ async fn test_view_more_batched_expansion(cx: &mut TestAppContext) {
// Expand again by one batch
sidebar.update_in(cx, |s, _window, cx| {
- let current = s
- .expanded_groups
- .get(&project_group_key)
- .copied()
- .unwrap_or(0);
- s.expanded_groups
- .insert(project_group_key.clone(), current + 1);
+ let current = s.expanded_groups.get(&group_id).copied().unwrap_or(0);
+ s.expanded_groups.insert(group_id, current + 1);
s.update_entries(cx);
});
cx.run_until_parked();
@@ -728,13 +744,8 @@ async fn test_view_more_batched_expansion(cx: &mut TestAppContext) {
// Expand one more time - should show all 17 threads with Collapse button
sidebar.update_in(cx, |s, _window, cx| {
- let current = s
- .expanded_groups
- .get(&project_group_key)
- .copied()
- .unwrap_or(0);
- s.expanded_groups
- .insert(project_group_key.clone(), current + 1);
+ let current = s.expanded_groups.get(&group_id).copied().unwrap_or(0);
+ s.expanded_groups.insert(group_id, current + 1);
s.update_entries(cx);
});
cx.run_until_parked();
@@ -747,7 +758,7 @@ async fn test_view_more_batched_expansion(cx: &mut TestAppContext) {
// Click collapse - should go back to showing 5 threads
sidebar.update_in(cx, |s, _window, cx| {
- s.expanded_groups.remove(&project_group_key);
+ s.expanded_groups.remove(&group_id);
s.update_entries(cx);
});
cx.run_until_parked();
@@ -768,6 +779,13 @@ async fn test_collapse_and_expand_group(cx: &mut TestAppContext) {
save_n_test_threads(1, &project, cx).await;
let project_group_key = project.read_with(cx, |project, cx| project.project_group_key(cx));
+ let group_id = multi_workspace.read_with(cx, |mw, _| {
+ mw.project_groups()
+ .iter()
+ .find(|g| g.key == project_group_key)
+ .unwrap()
+ .id
+ });
multi_workspace.update_in(cx, |_, _window, cx| cx.notify());
cx.run_until_parked();
@@ -783,7 +801,7 @@ async fn test_collapse_and_expand_group(cx: &mut TestAppContext) {
// Collapse
sidebar.update_in(cx, |s, window, cx| {
- s.toggle_collapse(&project_group_key, window, cx);
+ s.toggle_collapse(group_id, window, cx);
});
cx.run_until_parked();
@@ -797,7 +815,7 @@ async fn test_collapse_and_expand_group(cx: &mut TestAppContext) {
// Expand
sidebar.update_in(cx, |s, window, cx| {
- s.toggle_collapse(&project_group_key, window, cx);
+ s.toggle_collapse(group_id, window, cx);
});
cx.run_until_parked();
@@ -822,15 +840,17 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) {
let expanded_path = PathList::new(&[std::path::PathBuf::from("/expanded")]);
let collapsed_path = PathList::new(&[std::path::PathBuf::from("/collapsed")]);
+ let expanded_group_id = project::ProjectGroupId::new();
+ let collapsed_group_id = project::ProjectGroupId::new();
sidebar.update_in(cx, |s, _window, _cx| {
- s.collapsed_groups
- .insert(project::ProjectGroupKey::new(None, collapsed_path.clone()));
+ s.collapsed_groups.insert(collapsed_group_id);
s.contents
.notified_threads
.insert(acp::SessionId::new(Arc::from("t-5")));
s.contents.entries = vec![
// Expanded project header
ListEntry::ProjectHeader {
+ group_id: expanded_group_id,
key: project::ProjectGroupKey::new(None, expanded_path.clone()),
label: "expanded-project".into(),
highlight_positions: Vec::new(),
@@ -957,11 +977,13 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) {
}),
// View More entry
ListEntry::ViewMore {
+ group_id: expanded_group_id,
key: project::ProjectGroupKey::new(None, expanded_path.clone()),
is_fully_expanded: false,
},
// Collapsed project header
ListEntry::ProjectHeader {
+ group_id: collapsed_group_id,
key: project::ProjectGroupKey::new(None, collapsed_path.clone()),
label: "collapsed-project".into(),
highlight_positions: Vec::new(),
@@ -2227,9 +2249,16 @@ async fn test_click_clears_selection_and_focus_in_restores_it(cx: &mut TestAppCo
// visual feedback during mouse interaction instead.
sidebar.update_in(cx, |sidebar, window, cx| {
sidebar.selection = None;
- let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]);
- let project_group_key = project::ProjectGroupKey::new(None, path_list);
- sidebar.toggle_collapse(&project_group_key, window, cx);
+ let group_id = sidebar
+ .contents
+ .entries
+ .iter()
+ .find_map(|e| match e {
+ ListEntry::ProjectHeader { group_id, .. } => Some(*group_id),
+ _ => None,
+ })
+ .unwrap();
+ sidebar.toggle_collapse(group_id, window, cx);
});
assert_eq!(sidebar.read_with(cx, |sidebar, _| sidebar.selection), None);
@@ -7238,17 +7267,17 @@ async fn test_linked_worktree_workspace_reachable_after_adding_unrelated_project
// Force a full sidebar rebuild with all groups expanded.
sidebar.update_in(cx, |sidebar, _window, cx| {
sidebar.collapsed_groups.clear();
- let group_keys: Vec<project::ProjectGroupKey> = sidebar
+ let group_ids: Vec<project::ProjectGroupId> = sidebar
.contents
.entries
.iter()
.filter_map(|entry| match entry {
- ListEntry::ProjectHeader { key, .. } => Some(key.clone()),
+ ListEntry::ProjectHeader { group_id, .. } => Some(*group_id),
_ => None,
})
.collect();
- for group_key in group_keys {
- sidebar.expanded_groups.insert(group_key, 10_000);
+ for group_id in group_ids {
+ sidebar.expanded_groups.insert(group_id, 10_000);
}
sidebar.update_entries(cx);
});
@@ -7827,10 +7856,10 @@ mod property_test {
// Find a workspace for this project group and create a real
// thread via its agent panel.
let (workspace, project) = multi_workspace.read_with(cx, |mw, cx| {
- let key = mw.project_group_keys().nth(project_group_index).unwrap();
+ let group = mw.project_groups().get(project_group_index).unwrap();
let ws = mw
- .workspaces_for_project_group(key, cx)
- .next()
+ .workspaces_for_project_group(group.id)
+ .and_then(|ws| ws.first())
.unwrap_or(mw.workspace())
.clone();
let project = ws.read(cx).project().clone();
@@ -7954,10 +7983,10 @@ mod property_test {
}
}
Operation::SwitchToProjectGroup { index } => {
- let workspace = multi_workspace.read_with(cx, |mw, cx| {
- let key = mw.project_group_keys().nth(index).unwrap();
- mw.workspaces_for_project_group(key, cx)
- .next()
+ let workspace = multi_workspace.read_with(cx, |mw, _cx| {
+ let group = mw.project_groups().get(index).unwrap();
+ mw.workspaces_for_project_group(group.id)
+ .and_then(|ws| ws.first())
.unwrap_or(mw.workspace())
.clone()
});
@@ -8021,14 +8050,15 @@ mod property_test {
.await;
// Re-scan the main workspace's project so it discovers the new worktree.
- let main_workspace = multi_workspace.read_with(cx, |mw, cx| {
- let key = mw.project_group_keys().nth(project_group_index).unwrap();
- mw.workspaces_for_project_group(key, cx)
- .next()
+ let main_workspace = multi_workspace.read_with(cx, |mw, _cx| {
+ let group = mw.project_groups().get(project_group_index).unwrap();
+ mw.workspaces_for_project_group(group.id)
+ .and_then(|ws| ws.first())
.unwrap()
.clone()
});
- let main_project = main_workspace.read_with(cx, |ws, _| ws.project().clone());
+ let main_project: Entity<project::Project> =
+ main_workspace.read_with(cx, |ws, _| ws.project().clone());
main_project
.update(cx, |p, cx| p.git_scans_complete(cx))
.await;
@@ -8041,12 +8071,15 @@ mod property_test {
Operation::AddWorktreeToProject {
project_group_index,
} => {
- let workspace = multi_workspace.read_with(cx, |mw, cx| {
- let key = mw.project_group_keys().nth(project_group_index).unwrap();
- mw.workspaces_for_project_group(key, cx).next().cloned()
+ let workspace = multi_workspace.read_with(cx, |mw, _cx| {
+ let group = mw.project_groups().get(project_group_index).unwrap();
+ mw.workspaces_for_project_group(group.id)
+ .and_then(|ws| ws.first())
+ .cloned()
});
let Some(workspace) = workspace else { return };
- let project = workspace.read_with(cx, |ws, _| ws.project().clone());
+ let project: Entity<project::Project> =
+ workspace.read_with(cx, |ws, _| ws.project().clone());
let new_path = state.next_workspace_path();
state
@@ -8067,23 +8100,28 @@ mod property_test {
Operation::RemoveWorktreeFromProject {
project_group_index,
} => {
- let workspace = multi_workspace.read_with(cx, |mw, cx| {
- let key = mw.project_group_keys().nth(project_group_index).unwrap();
- mw.workspaces_for_project_group(key, cx).next().cloned()
+ let workspace = multi_workspace.read_with(cx, |mw, _cx| {
+ let group = mw.project_groups().get(project_group_index).unwrap();
+ mw.workspaces_for_project_group(group.id)
+ .and_then(|ws| ws.first())
+ .cloned()
});
let Some(workspace) = workspace else { return };
- let project = workspace.read_with(cx, |ws, _| ws.project().clone());
+ let project: Entity<project::Project> =
+ workspace.read_with(cx, |ws, _| ws.project().clone());
- let worktree_count = project.read_with(cx, |p, cx| p.visible_worktrees(cx).count());
+ let worktree_count = project.read_with(cx, |p: &project::Project, cx| {
+ p.visible_worktrees(cx).count()
+ });
if worktree_count <= 1 {
return;
}
- let worktree_id = project.read_with(cx, |p, cx| {
+ let worktree_id = project.read_with(cx, |p: &project::Project, cx| {
p.visible_worktrees(cx).last().map(|wt| wt.read(cx).id())
});
if let Some(worktree_id) = worktree_id {
- project.update(cx, |project, cx| {
+ project.update(cx, |project: &mut project::Project, cx| {
project.remove_worktree(worktree_id, cx);
});
cx.run_until_parked();
@@ -8095,17 +8133,17 @@ mod property_test {
fn update_sidebar(sidebar: &Entity<Sidebar>, cx: &mut gpui::VisualTestContext) {
sidebar.update_in(cx, |sidebar, _window, cx| {
sidebar.collapsed_groups.clear();
- let group_keys: Vec<project::ProjectGroupKey> = sidebar
+ let group_ids: Vec<project::ProjectGroupId> = sidebar
.contents
.entries
.iter()
.filter_map(|entry| match entry {
- ListEntry::ProjectHeader { key, .. } => Some(key.clone()),
+ ListEntry::ProjectHeader { group_id, .. } => Some(*group_id),
_ => None,
})
.collect();
- for group_key in group_keys {
- sidebar.expanded_groups.insert(group_key, 10_000);
+ for group_id in group_ids {
+ sidebar.expanded_groups.insert(group_id, 10_000);
}
sidebar.update_entries(cx);
});
@@ -5,11 +5,11 @@ use gpui::{
ManagedView, MouseButton, Pixels, Render, Subscription, Task, Tiling, Window, WindowId,
actions, deferred, px,
};
-use project::{DirectoryLister, DisableAiSettings, Project, ProjectGroupKey};
+use project::{DirectoryLister, DisableAiSettings, Project, ProjectGroupId, ProjectGroupKey};
use remote::RemoteConnectionOptions;
use settings::Settings;
pub use settings::SidebarSide;
-use std::collections::{HashMap, HashSet};
+use std::collections::HashSet;
use std::future::Future;
use std::path::Path;
use std::path::PathBuf;
@@ -101,14 +101,6 @@ pub enum MultiWorkspaceEvent {
ActiveWorkspaceChanged,
WorkspaceAdded(Entity<Workspace>),
WorkspaceRemoved(EntityId),
- WorktreePathAdded {
- old_main_paths: PathList,
- added_path: PathBuf,
- },
- WorktreePathRemoved {
- old_main_paths: PathList,
- removed_path: PathBuf,
- },
}
pub enum SidebarEvent {
@@ -265,52 +257,19 @@ impl<T: Sidebar> SidebarHandle for Entity<T> {
}
}
-/// Tracks which workspace the user is currently looking at.
-///
-/// `Persistent` workspaces live in the `workspaces` vec and are shown in the
-/// sidebar. `Transient` workspaces exist outside the vec and are discarded
-/// when the user switches away.
-enum ActiveWorkspace {
- /// A persistent workspace, identified by index into the `workspaces` vec.
- Persistent(usize),
- /// A workspace not in the `workspaces` vec that will be discarded on
- /// switch or promoted to persistent when the sidebar is opened.
- Transient(Entity<Workspace>),
-}
-
-impl ActiveWorkspace {
- fn transient_workspace(&self) -> Option<&Entity<Workspace>> {
- match self {
- Self::Transient(workspace) => Some(workspace),
- Self::Persistent(_) => None,
- }
- }
-
- /// Sets the active workspace to transient, returning the previous
- /// transient workspace (if any).
- fn set_transient(&mut self, workspace: Entity<Workspace>) -> Option<Entity<Workspace>> {
- match std::mem::replace(self, Self::Transient(workspace)) {
- Self::Transient(old) => Some(old),
- Self::Persistent(_) => None,
- }
- }
-
- /// Sets the active workspace to persistent at the given index,
- /// returning the previous transient workspace (if any).
- fn set_persistent(&mut self, index: usize) -> Option<Entity<Workspace>> {
- match std::mem::replace(self, Self::Persistent(index)) {
- Self::Transient(workspace) => Some(workspace),
- Self::Persistent(_) => None,
- }
- }
+#[derive(Clone)]
+pub struct ProjectGroup {
+ pub id: ProjectGroupId,
+ pub key: ProjectGroupKey,
+ pub workspaces: Vec<Entity<Workspace>>,
+ pub expanded: bool,
+ pub visible_thread_count: Option<usize>,
}
pub struct MultiWorkspace {
window_id: WindowId,
- workspaces: Vec<Entity<Workspace>>,
- active_workspace: ActiveWorkspace,
- project_group_keys: Vec<ProjectGroupKey>,
- workspace_group_keys: HashMap<EntityId, ProjectGroupKey>,
+ project_groups: Vec<ProjectGroup>,
+ active_workspace: Entity<Workspace>,
sidebar: Option<Box<dyn SidebarHandle>>,
sidebar_open: bool,
sidebar_overlay: Option<AnyView>,
@@ -362,10 +321,8 @@ impl MultiWorkspace {
});
Self {
window_id: window.window_handle().window_id(),
- project_group_keys: Vec::new(),
- workspace_group_keys: HashMap::default(),
- workspaces: Vec::new(),
- active_workspace: ActiveWorkspace::Transient(workspace),
+ project_groups: Vec::new(),
+ active_workspace: workspace,
sidebar: None,
sidebar_open: false,
sidebar_overlay: None,
@@ -482,16 +439,14 @@ impl MultiWorkspace {
pub fn open_sidebar(&mut self, cx: &mut Context<Self>) {
self.sidebar_open = true;
- if let ActiveWorkspace::Transient(workspace) = &self.active_workspace {
- let workspace = workspace.clone();
- let index = self.promote_transient(workspace, cx);
- self.active_workspace = ActiveWorkspace::Persistent(index);
- }
+ self.retain_active_workspace(cx);
let sidebar_focus_handle = self.sidebar.as_ref().map(|s| s.focus_handle(cx));
- for workspace in self.workspaces.iter() {
- workspace.update(cx, |workspace, _cx| {
- workspace.set_sidebar_focus_handle(sidebar_focus_handle.clone());
- });
+ for group in &self.project_groups {
+ for workspace in &group.workspaces {
+ workspace.update(cx, |workspace, _cx| {
+ workspace.set_sidebar_focus_handle(sidebar_focus_handle.clone());
+ });
+ }
}
self.serialize(cx);
cx.notify();
@@ -499,10 +454,12 @@ impl MultiWorkspace {
pub fn close_sidebar(&mut self, window: &mut Window, cx: &mut Context<Self>) {
self.sidebar_open = false;
- for workspace in self.workspaces.iter() {
- workspace.update(cx, |workspace, _cx| {
- workspace.set_sidebar_focus_handle(None);
- });
+ for group in &self.project_groups {
+ for workspace in &group.workspaces {
+ workspace.update(cx, |workspace, _cx| {
+ workspace.set_sidebar_focus_handle(None);
+ });
+ }
}
let sidebar_has_focus = self
.sidebar
@@ -592,230 +549,190 @@ impl MultiWorkspace {
workspace: &Entity<Workspace>,
cx: &mut Context<Self>,
) {
- let workspace_id = workspace.entity_id();
- let old_key = self.project_group_key_for_workspace(workspace, cx);
let new_key = workspace.read(cx).project_group_key(cx);
- if new_key.path_list().paths().is_empty() || old_key == new_key {
+ if new_key.path_list().paths().is_empty() {
return;
}
- let active_workspace = self.workspace().clone();
-
- self.set_workspace_group_key(workspace, new_key.clone());
-
- let changed_root_paths = workspace.read(cx).root_paths(cx);
- let old_paths = old_key.path_list().paths();
- let new_paths = new_key.path_list().paths();
-
- // Remove workspaces that already had the new key and have the same
- // root paths (true duplicates that this workspace is replacing).
- //
- // NOTE: These are dropped without prompting for unsaved changes because
- // the user explicitly added a folder that makes this workspace
- // identical to the duplicate — they are intentionally overwriting it.
- let duplicate_workspaces: Vec<Entity<Workspace>> = self
- .workspaces
+ // Find which group this workspace is in
+ let group_idx = self
+ .project_groups
.iter()
- .filter(|ws| {
- ws.entity_id() != workspace_id
- && self.project_group_key_for_workspace(ws, cx) == new_key
- && ws.read(cx).root_paths(cx) == changed_root_paths
- })
- .cloned()
- .collect();
+ .position(|g| g.workspaces.contains(workspace));
- if duplicate_workspaces.contains(&active_workspace) {
- // The active workspace is among the duplicates — drop the
- // incoming workspace instead so the user stays where they are.
- self.detach_workspace(workspace, cx);
- self.workspaces.retain(|w| w != workspace);
- } else {
- for ws in &duplicate_workspaces {
- self.detach_workspace(ws, cx);
- self.workspaces.retain(|w| w != ws);
+ if let Some(idx) = group_idx {
+ let old_key = self.project_groups[idx].key.clone();
+ if old_key == new_key {
+ return;
}
- }
-
- // Propagate folder adds/removes to linked worktree siblings
- // (different root paths, same old key) so they stay in the group.
- let group_workspaces: Vec<Entity<Workspace>> = self
- .workspaces
- .iter()
- .filter(|ws| {
- ws.entity_id() != workspace_id
- && self.project_group_key_for_workspace(ws, cx) == old_key
- })
- .cloned()
- .collect();
-
- for workspace in &group_workspaces {
- // Pre-set this to stop later WorktreeAdded events from triggering
- self.set_workspace_group_key(&workspace, new_key.clone());
- let project = workspace.read(cx).project().clone();
-
- for added_path in new_paths.iter().filter(|p| !old_paths.contains(p)) {
- project
- .update(cx, |project, cx| {
- project.find_or_create_worktree(added_path, true, cx)
- })
- .detach_and_log_err(cx);
+ let old_paths = old_key.path_list().paths().to_vec();
+ let new_paths = new_key.path_list().paths().to_vec();
+ let changed_root_paths = workspace.read(cx).root_paths(cx);
+ let workspace_id = workspace.entity_id();
+
+ // Remove true duplicates from the same group
+ let duplicates: Vec<Entity<Workspace>> = self.project_groups[idx]
+ .workspaces
+ .iter()
+ .filter(|ws| {
+ ws.entity_id() != workspace_id
+ && ws.read(cx).root_paths(cx) == changed_root_paths
+ })
+ .cloned()
+ .collect();
+
+ let active = self.active_workspace.clone();
+ if duplicates.contains(&active) {
+ // Active is a duplicate — remove the incoming workspace instead
+ self.project_groups[idx]
+ .workspaces
+ .retain(|w| w != workspace);
+ self.detach_workspace(workspace, cx);
+ } else {
+ for ws in &duplicates {
+ self.project_groups[idx].workspaces.retain(|w| w != ws);
+ self.detach_workspace(ws, cx);
+ }
}
- for removed_path in old_paths.iter().filter(|p| !new_paths.contains(p)) {
- project.update(cx, |project, cx| {
- project.remove_worktree_for_main_worktree_path(removed_path, cx);
- });
- }
- }
+ // Propagate folder adds/removes to sibling workspaces in the same group
+ let siblings: Vec<Entity<Workspace>> = self.project_groups[idx]
+ .workspaces
+ .iter()
+ .filter(|ws| ws.entity_id() != workspace_id)
+ .cloned()
+ .collect();
+
+ for sibling in &siblings {
+ let project = sibling.read(cx).project().clone();
+
+ for added_path in new_paths.iter().filter(|p| !old_paths.contains(p)) {
+ project
+ .update(cx, |project, cx| {
+ project.find_or_create_worktree(added_path, true, cx)
+ })
+ .detach_and_log_err(cx);
+ }
- // Restore the active workspace after removals may have shifted
- // the index. If the previously active workspace was removed,
- // fall back to the workspace whose key just changed.
- if let ActiveWorkspace::Persistent(_) = &self.active_workspace {
- let target = if self.workspaces.contains(&active_workspace) {
- &active_workspace
- } else {
- workspace
- };
- if let Some(new_index) = self.workspaces.iter().position(|ws| ws == target) {
- self.active_workspace = ActiveWorkspace::Persistent(new_index);
+ for removed_path in old_paths.iter().filter(|p| !new_paths.contains(p)) {
+ project.update(cx, |project, cx| {
+ project.remove_worktree_for_main_worktree_path(removed_path, cx);
+ });
+ }
}
- }
-
- self.remove_stale_project_group_keys(cx);
- let old_main_paths = old_key.path_list().clone();
- for added_path in new_paths.iter().filter(|p| !old_paths.contains(p)) {
- cx.emit(MultiWorkspaceEvent::WorktreePathAdded {
- old_main_paths: old_main_paths.clone(),
- added_path: added_path.clone(),
- });
- }
- for removed_path in old_paths.iter().filter(|p| !new_paths.contains(p)) {
- cx.emit(MultiWorkspaceEvent::WorktreePathRemoved {
- old_main_paths: old_main_paths.clone(),
- removed_path: removed_path.clone(),
- });
+ // Update the group's key in-place — the ID stays the same
+ self.project_groups[idx].key = new_key;
}
self.serialize(cx);
cx.notify();
}
- fn add_project_group_key(&mut self, project_group_key: ProjectGroupKey) {
- if project_group_key.path_list().paths().is_empty() {
- return;
- }
- if self.project_group_keys.contains(&project_group_key) {
- return;
- }
- // Store newest first so the vec is in "most recently added"
- self.project_group_keys.insert(0, project_group_key);
- }
-
- pub(crate) fn set_workspace_group_key(
- &mut self,
- workspace: &Entity<Workspace>,
- project_group_key: ProjectGroupKey,
- ) {
- self.workspace_group_keys
- .insert(workspace.entity_id(), project_group_key.clone());
- self.add_project_group_key(project_group_key);
- }
-
pub fn project_group_key_for_workspace(
&self,
workspace: &Entity<Workspace>,
cx: &App,
) -> ProjectGroupKey {
- self.workspace_group_keys
- .get(&workspace.entity_id())
- .cloned()
+ self.group_for_workspace(workspace)
+ .map(|g| g.key.clone())
.unwrap_or_else(|| workspace.read(cx).project_group_key(cx))
}
- fn remove_stale_project_group_keys(&mut self, cx: &App) {
- let workspace_keys: HashSet<ProjectGroupKey> = self
- .workspaces
- .iter()
- .map(|workspace| self.project_group_key_for_workspace(workspace, cx))
- .collect();
- self.project_group_keys
- .retain(|key| workspace_keys.contains(key));
- }
-
- pub fn restore_project_group_keys(&mut self, keys: Vec<ProjectGroupKey>) {
- let mut restored: Vec<ProjectGroupKey> = Vec::with_capacity(keys.len());
- for key in keys {
+ pub fn restore_project_groups(&mut self, groups: Vec<(ProjectGroupId, ProjectGroupKey)>) {
+ let mut restored: Vec<ProjectGroup> = Vec::new();
+ for (id, key) in groups {
if key.path_list().paths().is_empty() {
continue;
}
- if !restored.contains(&key) {
- restored.push(key);
+ if restored.iter().any(|g| g.id == id) {
+ continue;
}
+ restored.push(ProjectGroup {
+ id,
+ key,
+ workspaces: Vec::new(),
+ expanded: true,
+ visible_thread_count: None,
+ });
}
- for existing_key in &self.project_group_keys {
- if !restored.contains(existing_key) {
- restored.push(existing_key.clone());
+ for existing in &self.project_groups {
+ if !restored.iter().any(|g| g.id == existing.id) {
+ restored.push(existing.clone());
}
}
- self.project_group_keys = restored;
+ self.project_groups = restored;
}
pub fn project_group_keys(&self) -> impl Iterator<Item = &ProjectGroupKey> {
- self.project_group_keys.iter()
+ self.project_groups.iter().map(|g| &g.key)
}
- /// Returns the project groups, ordered by most recently added.
- pub fn project_groups(
- &self,
- cx: &App,
- ) -> impl Iterator<Item = (ProjectGroupKey, Vec<Entity<Workspace>>)> {
- let mut groups = self
- .project_group_keys
+ pub fn project_groups(&self) -> &[ProjectGroup] {
+ &self.project_groups
+ }
+
+ pub fn group(&self, id: ProjectGroupId) -> Option<&ProjectGroup> {
+ self.project_groups.iter().find(|g| g.id == id)
+ }
+
+ pub fn group_mut(&mut self, id: ProjectGroupId) -> Option<&mut ProjectGroup> {
+ self.project_groups.iter_mut().find(|g| g.id == id)
+ }
+
+ pub fn group_for_workspace(&self, workspace: &Entity<Workspace>) -> Option<&ProjectGroup> {
+ self.project_groups
.iter()
- .map(|key| (key.clone(), Vec::new()))
- .collect::<Vec<_>>();
- for workspace in &self.workspaces {
- let key = self.project_group_key_for_workspace(workspace, cx);
- if let Some((_, workspaces)) = groups.iter_mut().find(|(k, _)| k == &key) {
- workspaces.push(workspace.clone());
+ .find(|g| g.workspaces.contains(workspace))
+ }
+
+ pub(crate) fn ensure_workspace_in_group(
+ &mut self,
+ workspace: Entity<Workspace>,
+ key: ProjectGroupKey,
+ cx: &mut Context<Self>,
+ ) {
+ if let Some(group) = self.project_groups.iter_mut().find(|g| g.key == key) {
+ if !group.workspaces.contains(&workspace) {
+ group.workspaces.push(workspace.clone());
+ cx.emit(MultiWorkspaceEvent::WorkspaceAdded(workspace));
}
+ return;
}
- groups.into_iter()
+ let group = ProjectGroup {
+ id: ProjectGroupId::new(),
+ key,
+ expanded: true,
+ visible_thread_count: None,
+ workspaces: vec![workspace.clone()],
+ };
+ self.project_groups.insert(0, group);
+ cx.emit(MultiWorkspaceEvent::WorkspaceAdded(workspace));
}
- pub fn workspaces_for_project_group(
- &self,
- project_group_key: &ProjectGroupKey,
- cx: &App,
- ) -> impl Iterator<Item = &Entity<Workspace>> {
- self.workspaces.iter().filter(move |workspace| {
- self.project_group_key_for_workspace(workspace, cx) == *project_group_key
- })
+ pub fn workspaces_for_project_group(&self, id: ProjectGroupId) -> Option<&[Entity<Workspace>]> {
+ self.group(id).map(|g| g.workspaces.as_slice())
}
pub fn remove_folder_from_project_group(
&mut self,
- project_group_key: &ProjectGroupKey,
+ group_id: ProjectGroupId,
path: &Path,
cx: &mut Context<Self>,
) {
- let new_path_list = project_group_key.path_list().without_path(path);
+ let Some(group) = self.group_mut(group_id) else {
+ return;
+ };
+
+ let new_path_list = group.key.path_list().without_path(path);
if new_path_list.is_empty() {
return;
}
- let new_key = ProjectGroupKey::new(project_group_key.host(), new_path_list);
-
- let workspaces: Vec<_> = self
- .workspaces_for_project_group(project_group_key, cx)
- .cloned()
- .collect();
-
- self.add_project_group_key(new_key);
+ group.key = ProjectGroupKey::new(group.key.host(), new_path_list);
+ let workspaces: Vec<_> = group.workspaces.clone();
for workspace in workspaces {
let project = workspace.read(cx).project().clone();
@@ -830,7 +747,7 @@ impl MultiWorkspace {
pub fn prompt_to_add_folders_to_project_group(
&mut self,
- key: &ProjectGroupKey,
+ group_id: ProjectGroupId,
window: &mut Window,
cx: &mut Context<Self>,
) {
@@ -848,12 +765,11 @@ impl MultiWorkspace {
)
});
- let key = key.clone();
cx.spawn_in(window, async move |this, cx| {
if let Some(new_paths) = paths.await.ok().flatten() {
if !new_paths.is_empty() {
this.update(cx, |multi_workspace, cx| {
- multi_workspace.add_folders_to_project_group(&key, new_paths, cx);
+ multi_workspace.add_folders_to_project_group(group_id, new_paths, cx);
})?;
}
}
@@ -864,21 +780,19 @@ impl MultiWorkspace {
pub fn add_folders_to_project_group(
&mut self,
- project_group_key: &ProjectGroupKey,
+ group_id: ProjectGroupId,
new_paths: Vec<PathBuf>,
cx: &mut Context<Self>,
) {
- let mut all_paths: Vec<PathBuf> = project_group_key.path_list().paths().to_vec();
+ let Some(group) = self.group_mut(group_id) else {
+ return;
+ };
+
+ let mut all_paths: Vec<PathBuf> = group.key.path_list().paths().to_vec();
all_paths.extend(new_paths.iter().cloned());
let new_path_list = PathList::new(&all_paths);
- let new_key = ProjectGroupKey::new(project_group_key.host(), new_path_list);
-
- let workspaces: Vec<_> = self
- .workspaces_for_project_group(project_group_key, cx)
- .cloned()
- .collect();
-
- self.add_project_group_key(new_key);
+ group.key = ProjectGroupKey::new(group.key.host(), new_path_list);
+ let workspaces: Vec<_> = group.workspaces.clone();
for workspace in workspaces {
let project = workspace.read(cx).project().clone();
@@ -897,31 +811,25 @@ impl MultiWorkspace {
pub fn remove_project_group(
&mut self,
- key: &ProjectGroupKey,
+ group_id: ProjectGroupId,
window: &mut Window,
cx: &mut Context<Self>,
) -> Task<Result<bool>> {
- let workspaces: Vec<_> = self
- .workspaces_for_project_group(key, cx)
- .cloned()
- .collect();
-
- // Compute the neighbor while the key is still in the list.
- let neighbor_key = {
- let pos = self.project_group_keys.iter().position(|k| k == key);
- pos.and_then(|pos| {
- // Keys are in display order, so pos+1 is below
- // and pos-1 is above. Try below first.
- self.project_group_keys.get(pos + 1).or_else(|| {
- pos.checked_sub(1)
- .and_then(|i| self.project_group_keys.get(i))
- })
- })
- .cloned()
- };
+ let pos = self.project_groups.iter().position(|g| g.id == group_id);
+ let workspaces: Vec<_> = pos
+ .map(|p| self.project_groups[p].workspaces.clone())
+ .unwrap_or_default();
+
+ // Compute the neighbor while the group is still in the list.
+ let neighbor_key = pos.and_then(|pos| {
+ self.project_groups
+ .get(pos + 1)
+ .or_else(|| pos.checked_sub(1).and_then(|i| self.project_groups.get(i)))
+ .map(|g| g.key.clone())
+ });
- // Now remove the key.
- self.project_group_keys.retain(|k| k != key);
+ // Now remove the group.
+ self.project_groups.retain(|g| g.id != group_id);
self.remove(
workspaces,
@@ -962,8 +870,9 @@ impl MultiWorkspace {
host: Option<&RemoteConnectionOptions>,
cx: &App,
) -> Option<Entity<Workspace>> {
- self.workspaces
+ self.project_groups
.iter()
+ .flat_map(|g| &g.workspaces)
.find(|ws| {
let key = ws.read(cx).project_group_key(cx);
key.host().as_ref() == host
@@ -1068,12 +977,6 @@ impl MultiWorkspace {
return Task::ready(Ok(workspace));
}
- if let Some(transient) = self.active_workspace.transient_workspace() {
- if transient.read(cx).project_group_key(cx).path_list() == &path_list {
- return Task::ready(Ok(transient.clone()));
- }
- }
-
let paths = path_list.paths().to_vec();
let app_state = self.workspace().read(cx).app_state().clone();
let requesting_window = window.window_handle().downcast::<MultiWorkspace>();
@@ -1097,16 +1000,16 @@ impl MultiWorkspace {
}
pub fn workspace(&self) -> &Entity<Workspace> {
- match &self.active_workspace {
- ActiveWorkspace::Persistent(index) => &self.workspaces[*index],
- ActiveWorkspace::Transient(workspace) => workspace,
- }
+ &self.active_workspace
}
pub fn workspaces(&self) -> impl Iterator<Item = &Entity<Workspace>> {
- self.workspaces
- .iter()
- .chain(self.active_workspace.transient_workspace())
+ let grouped = self.project_groups.iter().flat_map(|g| &g.workspaces);
+ let active = std::iter::once(&self.active_workspace);
+ let mut seen = HashSet::new();
+ grouped
+ .chain(active)
+ .filter(move |ws| seen.insert(ws.entity_id()))
}
/// Adds a workspace to this window as persistent without changing which
@@ -1114,7 +1017,16 @@ impl MultiWorkspace {
/// persistent list regardless of sidebar state — it's used for system-
/// initiated additions like deserialization and worktree discovery.
pub fn add(&mut self, workspace: Entity<Workspace>, window: &Window, cx: &mut Context<Self>) {
- self.insert_workspace(workspace, window, cx);
+ if self.group_for_workspace(&workspace).is_some() {
+ return;
+ }
+ let key = workspace.read(cx).project_group_key(cx);
+ Self::subscribe_to_workspace(&workspace, window, cx);
+ self.sync_sidebar_to_workspace(&workspace, cx);
+ let weak_self = cx.weak_entity();
+ workspace.update(cx, |ws, cx| ws.set_multi_workspace(weak_self, cx));
+ self.ensure_workspace_in_group(workspace, key, cx);
+ cx.notify();
}
/// Ensures the workspace is in the multiworkspace and makes it the active one.
@@ -1124,41 +1036,25 @@ impl MultiWorkspace {
window: &mut Window,
cx: &mut Context<Self>,
) {
- // Re-activating the current workspace is a no-op.
if self.workspace() == &workspace {
self.focus_active_workspace(window, cx);
return;
}
- // Resolve where we're going.
- let new_index = if let Some(index) = self.workspaces.iter().position(|w| *w == workspace) {
- Some(index)
- } else if self.sidebar_open {
- Some(self.insert_workspace(workspace.clone(), &*window, cx))
- } else {
- None
- };
-
- // Transition the active workspace.
- if let Some(index) = new_index {
- if let Some(old) = self.active_workspace.set_persistent(index) {
- if self.sidebar_open {
- self.promote_transient(old, cx);
- } else {
- self.detach_workspace(&old, cx);
- }
- }
- } else {
+ // If the workspace isn't in any group yet, subscribe and optionally group it
+ if self.group_for_workspace(&workspace).is_none() {
Self::subscribe_to_workspace(&workspace, window, cx);
+ self.sync_sidebar_to_workspace(&workspace, cx);
let weak_self = cx.weak_entity();
- workspace.update(cx, |workspace, cx| {
- workspace.set_multi_workspace(weak_self, cx);
- });
- if let Some(old) = self.active_workspace.set_transient(workspace) {
- self.detach_workspace(&old, cx);
+ workspace.update(cx, |ws, cx| ws.set_multi_workspace(weak_self, cx));
+
+ if self.sidebar_open {
+ let key = workspace.read(cx).project_group_key(cx);
+ self.ensure_workspace_in_group(workspace.clone(), key, cx);
}
}
+ self.active_workspace = workspace;
cx.emit(MultiWorkspaceEvent::ActiveWorkspaceChanged);
self.serialize(cx);
self.focus_active_workspace(window, cx);
@@ -1169,77 +1065,42 @@ impl MultiWorkspace {
/// transient, so it is retained across workspace switches even when
/// the sidebar is closed. No-op if the workspace is already persistent.
pub fn retain_active_workspace(&mut self, cx: &mut Context<Self>) {
- if let ActiveWorkspace::Transient(workspace) = &self.active_workspace {
- let workspace = workspace.clone();
- let index = self.promote_transient(workspace, cx);
- self.active_workspace = ActiveWorkspace::Persistent(index);
+ let workspace = self.active_workspace.clone();
+ if self.group_for_workspace(&workspace).is_none() {
+ let key = workspace.read(cx).project_group_key(cx);
+ self.ensure_workspace_in_group(workspace, key, cx);
self.serialize(cx);
cx.notify();
}
}
- /// Promotes a former transient workspace into the persistent list.
- /// Returns the index of the newly inserted workspace.
- fn promote_transient(&mut self, workspace: Entity<Workspace>, cx: &mut Context<Self>) -> usize {
- let project_group_key = self.project_group_key_for_workspace(&workspace, cx);
- self.set_workspace_group_key(&workspace, project_group_key);
- self.workspaces.push(workspace.clone());
- cx.emit(MultiWorkspaceEvent::WorkspaceAdded(workspace));
- self.workspaces.len() - 1
- }
-
- /// Collapses to a single transient workspace, discarding all persistent
- /// workspaces. Used when multi-workspace is disabled (e.g. disable_ai).
+ /// Collapses to a single workspace, discarding all groups.
+ /// Used when multi-workspace is disabled (e.g. disable_ai).
fn collapse_to_single_workspace(&mut self, window: &mut Window, cx: &mut Context<Self>) {
if self.sidebar_open {
self.close_sidebar(window, cx);
}
- let active = self.workspace().clone();
- for workspace in std::mem::take(&mut self.workspaces) {
- if workspace != active {
- self.detach_workspace(&workspace, cx);
+ let active = self.active_workspace.clone();
+ for group in std::mem::take(&mut self.project_groups) {
+ for workspace in group.workspaces {
+ if workspace != active {
+ self.detach_workspace(&workspace, cx);
+ }
}
}
- self.project_group_keys.clear();
- self.workspace_group_keys.clear();
- self.active_workspace = ActiveWorkspace::Transient(active);
cx.notify();
}
- /// Inserts a workspace into the list if not already present. Returns the
- /// index of the workspace (existing or newly inserted). Does not change
- /// the active workspace index.
- fn insert_workspace(
- &mut self,
- workspace: Entity<Workspace>,
- window: &Window,
- cx: &mut Context<Self>,
- ) -> usize {
- if let Some(index) = self.workspaces.iter().position(|w| *w == workspace) {
- index
- } else {
- let project_group_key = self.project_group_key_for_workspace(&workspace, cx);
-
- Self::subscribe_to_workspace(&workspace, window, cx);
- self.sync_sidebar_to_workspace(&workspace, cx);
- let weak_self = cx.weak_entity();
- workspace.update(cx, |workspace, cx| {
- workspace.set_multi_workspace(weak_self, cx);
- });
-
- self.set_workspace_group_key(&workspace, project_group_key);
- self.workspaces.push(workspace.clone());
- cx.emit(MultiWorkspaceEvent::WorkspaceAdded(workspace));
- cx.notify();
- self.workspaces.len() - 1
- }
- }
-
/// Detaches a workspace: clears session state, DB binding, cached
/// group key, and emits `WorkspaceRemoved`. The DB row is preserved
/// so the workspace still appears in the recent-projects list.
fn detach_workspace(&mut self, workspace: &Entity<Workspace>, cx: &mut Context<Self>) {
- self.workspace_group_keys.remove(&workspace.entity_id());
+ // Remove workspace from its group
+ for group in &mut self.project_groups {
+ group.workspaces.retain(|w| w != workspace);
+ }
+ // Remove empty groups
+ self.project_groups.retain(|g| !g.workspaces.is_empty());
cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(workspace.entity_id()));
workspace.update(cx, |workspace, _cx| {
workspace.session_id.take();
@@ -1275,9 +1136,13 @@ impl MultiWorkspace {
let state = MultiWorkspaceState {
active_workspace_id: this.workspace().read(cx).database_id(),
project_group_keys: this
- .project_group_keys()
- .cloned()
- .map(Into::into)
+ .project_groups
+ .iter()
+ .map(|g| {
+ crate::persistence::model::SerializedProjectGroup::from_group(
+ g.id, &g.key,
+ )
+ })
.collect::<Vec<_>>(),
sidebar_open: this.sidebar_open,
sidebar_state: this.sidebar.as_ref().and_then(|s| s.serialized_state(cx)),
@@ -1416,41 +1281,18 @@ impl MultiWorkspace {
#[cfg(any(test, feature = "test-support"))]
pub fn assert_project_group_key_integrity(&self, cx: &App) -> anyhow::Result<()> {
- let stored_keys: HashSet<&ProjectGroupKey> = self.project_group_keys().collect();
-
- let workspace_group_keys: HashSet<&ProjectGroupKey> =
- self.workspace_group_keys.values().collect();
- let extra_keys = &workspace_group_keys - &stored_keys;
- anyhow::ensure!(
- extra_keys.is_empty(),
- "workspace_group_keys values not in project_group_keys: {:?}",
- extra_keys,
- );
-
- let cached_ids: HashSet<EntityId> = self.workspace_group_keys.keys().copied().collect();
- let workspace_ids: HashSet<EntityId> =
- self.workspaces.iter().map(|ws| ws.entity_id()).collect();
- anyhow::ensure!(
- cached_ids == workspace_ids,
- "workspace_group_keys entity IDs don't match workspaces.\n\
- only in cache: {:?}\n\
- only in workspaces: {:?}",
- &cached_ids - &workspace_ids,
- &workspace_ids - &cached_ids,
- );
-
- for workspace in self.workspaces() {
- let live_key = workspace.read(cx).project_group_key(cx);
- let cached_key = &self.workspace_group_keys[&workspace.entity_id()];
- anyhow::ensure!(
- *cached_key == live_key,
- "workspace {:?} has live key {:?} but cached key {:?}",
- workspace.entity_id(),
- live_key,
- cached_key,
- );
+ for group in &self.project_groups {
+ for workspace in &group.workspaces {
+ let live_key = workspace.read(cx).project_group_key(cx);
+ anyhow::ensure!(
+ group.key == live_key,
+ "workspace {:?} has live key {:?} but group key {:?}",
+ workspace.entity_id(),
+ live_key,
+ group.key,
+ );
+ }
}
-
Ok(())
}
@@ -1595,37 +1437,18 @@ impl MultiWorkspace {
// Actually remove the workspaces.
this.update_in(cx, |this, _, cx| {
- // Save a handle to the active workspace so we can restore
- // its index after the removals shift the vec around.
- let active_workspace = this.workspace().clone();
-
- let mut removed_workspaces: Vec<Entity<Workspace>> = Vec::new();
-
- this.workspaces.retain(|ws| {
- if workspaces.contains(ws) {
- removed_workspaces.push(ws.clone());
- false
- } else {
- true
+ let mut removed_any = false;
+
+ for workspace in &workspaces {
+ // detach_workspace already removes from groups
+ let was_in_group = this.group_for_workspace(workspace).is_some();
+ if was_in_group {
+ this.detach_workspace(workspace, cx);
+ removed_any = true;
}
- });
-
- for workspace in &removed_workspaces {
- this.detach_workspace(workspace, cx);
}
- let removed_any = !removed_workspaces.is_empty();
-
if removed_any {
- // Restore the active workspace index after removals.
- if let Some(new_index) = this
- .workspaces
- .iter()
- .position(|ws| ws == &active_workspace)
- {
- this.active_workspace = ActiveWorkspace::Persistent(new_index);
- }
-
this.serialize(cx);
cx.notify();
}
@@ -2568,9 +2568,9 @@ mod tests {
);
// --- Remove the first workspace (index 0, which is not the active one) ---
- multi_workspace.update_in(cx, |mw, window, cx| {
+ multi_workspace.update_in(cx, |mw, _window, cx| {
let ws = mw.workspaces().nth(0).unwrap().clone();
- mw.remove([ws], |_, _, _| unreachable!(), window, cx)
+ mw.remove([ws], |_, _, _| unreachable!(), _window, cx)
.detach_and_log_err(cx);
});
@@ -4956,7 +4956,13 @@ mod tests {
// --- Remove group B (the middle one). ---
// In the sidebar [C, B, A], "below" B is A.
multi_workspace.update_in(cx, |mw, window, cx| {
- mw.remove_project_group(&key_b, window, cx)
+ let group_id = mw
+ .project_groups()
+ .iter()
+ .find(|g| g.key == key_b)
+ .unwrap()
+ .id;
+ mw.remove_project_group(group_id, window, cx)
.detach_and_log_err(cx);
});
cx.run_until_parked();
@@ -4985,7 +4991,13 @@ mod tests {
// --- Remove group A (the bottom one in sidebar). ---
// Nothing below A, so should fall back upward to C.
multi_workspace.update_in(cx, |mw, window, cx| {
- mw.remove_project_group(&key_a, window, cx)
+ let group_id = mw
+ .project_groups()
+ .iter()
+ .find(|g| g.key == key_a)
+ .unwrap()
+ .id;
+ mw.remove_project_group(group_id, window, cx)
.detach_and_log_err(cx);
});
cx.run_until_parked();
@@ -5004,7 +5016,13 @@ mod tests {
// --- Remove group C (the only one remaining). ---
// Should create an empty workspace.
multi_workspace.update_in(cx, |mw, window, cx| {
- mw.remove_project_group(&key_c, window, cx)
+ let group_id = mw
+ .project_groups()
+ .iter()
+ .find(|g| g.key == key_c)
+ .unwrap()
+ .id;
+ mw.remove_project_group(group_id, window, cx)
.detach_and_log_err(cx);
});
cx.run_until_parked();
@@ -13,7 +13,9 @@ use db::sqlez::{
use gpui::{AsyncWindowContext, Entity, WeakEntity, WindowId};
use language::{Toolchain, ToolchainScope};
-use project::{Project, ProjectGroupKey, debugger::breakpoint_store::SourceBreakpoint};
+use project::{
+ Project, ProjectGroupId, ProjectGroupKey, debugger::breakpoint_store::SourceBreakpoint,
+};
use remote::RemoteConnectionOptions;
use serde::{Deserialize, Serialize};
use std::{
@@ -60,31 +62,40 @@ pub struct SessionWorkspace {
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
-pub struct SerializedProjectGroupKey {
+pub struct SerializedProjectGroup {
+ #[serde(default)]
+ pub id: Option<ProjectGroupId>,
pub path_list: SerializedPathList,
pub(crate) location: SerializedWorkspaceLocation,
}
-impl From<ProjectGroupKey> for SerializedProjectGroupKey {
- fn from(value: ProjectGroupKey) -> Self {
- SerializedProjectGroupKey {
- path_list: value.path_list().serialize(),
- location: match value.host() {
+impl SerializedProjectGroup {
+ pub fn from_group(id: ProjectGroupId, key: &ProjectGroupKey) -> Self {
+ Self {
+ id: Some(id),
+ path_list: key.path_list().serialize(),
+ location: match key.host() {
Some(host) => SerializedWorkspaceLocation::Remote(host),
None => SerializedWorkspaceLocation::Local,
},
}
}
-}
-impl From<SerializedProjectGroupKey> for ProjectGroupKey {
- fn from(value: SerializedProjectGroupKey) -> Self {
- let path_list = PathList::deserialize(&value.path_list);
- let host = match value.location {
+ pub fn into_id_and_key(self) -> (ProjectGroupId, ProjectGroupKey) {
+ let id = self.id.unwrap_or_else(ProjectGroupId::new);
+ let path_list = PathList::deserialize(&self.path_list);
+ let host = match self.location {
SerializedWorkspaceLocation::Local => None,
SerializedWorkspaceLocation::Remote(opts) => Some(opts),
};
- ProjectGroupKey::new(host, path_list)
+ (id, ProjectGroupKey::new(host, path_list))
+ }
+}
+
+impl From<SerializedProjectGroup> for ProjectGroupKey {
+ fn from(value: SerializedProjectGroup) -> Self {
+ let (_, key) = value.into_id_and_key();
+ key
}
}
@@ -93,7 +104,7 @@ impl From<SerializedProjectGroupKey> for ProjectGroupKey {
pub struct MultiWorkspaceState {
pub active_workspace_id: Option<WorkspaceId>,
pub sidebar_open: bool,
- pub project_group_keys: Vec<SerializedProjectGroupKey>,
+ pub project_group_keys: Vec<SerializedProjectGroup>,
#[serde(default)]
pub sidebar_state: Option<String>,
}
@@ -33,8 +33,8 @@ pub use dock::Panel;
pub use multi_workspace::{
CloseWorkspaceSidebar, DraggedSidebar, FocusWorkspaceSidebar, MultiWorkspace,
MultiWorkspaceEvent, NewThread, NextProject, NextThread, PreviousProject, PreviousThread,
- ShowFewerThreads, ShowMoreThreads, Sidebar, SidebarEvent, SidebarHandle, SidebarRenderState,
- SidebarSide, ToggleWorkspaceSidebar, sidebar_side_context_menu,
+ ProjectGroup, ShowFewerThreads, ShowMoreThreads, Sidebar, SidebarEvent, SidebarHandle,
+ SidebarRenderState, SidebarSide, ToggleWorkspaceSidebar, sidebar_side_context_menu,
};
pub use path_list::{PathList, SerializedPathList};
pub use toast_layer::{ToastAction, ToastLayer, ToastView};
@@ -86,7 +86,7 @@ pub use persistence::{
WorkspaceDb, delete_unloaded_items,
model::{
DockStructure, ItemId, MultiWorkspaceState, SerializedMultiWorkspace,
- SerializedProjectGroupKey, SerializedWorkspaceLocation, SessionWorkspace,
+ SerializedProjectGroup, SerializedWorkspaceLocation, SessionWorkspace,
},
read_serialized_multi_workspaces, resolve_worktree_workspaces,
};
@@ -8780,12 +8780,9 @@ pub async fn apply_restored_multiworkspace_state(
if !project_group_keys.is_empty() {
// Resolve linked worktree paths to their main repo paths so
// stale keys from previous sessions get normalized and deduped.
- let mut resolved_keys: Vec<ProjectGroupKey> = Vec::new();
- for key in project_group_keys
- .iter()
- .cloned()
- .map(ProjectGroupKey::from)
- {
+ let mut resolved_groups: Vec<(project::ProjectGroupId, ProjectGroupKey)> = Vec::new();
+ for serialized in project_group_keys.iter().cloned() {
+ let (id, key) = serialized.into_id_and_key();
if key.path_list().paths().is_empty() {
continue;
}
@@ -8802,14 +8799,14 @@ pub async fn apply_restored_multiworkspace_state(
}
}
let resolved = ProjectGroupKey::new(key.host(), PathList::new(&resolved_paths));
- if !resolved_keys.contains(&resolved) {
- resolved_keys.push(resolved);
+ if !resolved_groups.iter().any(|(_, k)| *k == resolved) {
+ resolved_groups.push((id, resolved));
}
}
window_handle
.update(cx, |multi_workspace, _window, _cx| {
- multi_workspace.restore_project_group_keys(resolved_keys);
+ multi_workspace.restore_project_groups(resolved_groups);
})
.ok();
}
@@ -9863,7 +9860,7 @@ async fn open_remote_project_inner(
});
if let Some(project_group_key) = provisional_project_group_key.clone() {
- multi_workspace.set_workspace_group_key(&new_workspace, project_group_key);
+ multi_workspace.ensure_workspace_in_group(new_workspace.clone(), project_group_key, cx);
}
multi_workspace.activate(new_workspace.clone(), window, cx);
new_workspace