Stuff

Anthony Eid created

Change summary

crates/agent_ui/src/agent_panel.rs           |   1 
crates/git_ui/src/worktree_picker.rs         |   1 
crates/recent_projects/src/remote_servers.rs |   2 
crates/sidebar/src/sidebar.rs                | 180 +++++++++++++--------
crates/workspace/src/multi_workspace.rs      |  67 ++++++-
crates/workspace/src/workspace.rs            |   7 
plan.md                                      |  79 +++++++++
summary.md                                   |  41 +++++
8 files changed, 299 insertions(+), 79 deletions(-)

Detailed changes

crates/recent_projects/src/remote_servers.rs 🔗

@@ -502,7 +502,7 @@ impl ProjectPicker {
                         .log_err()?;
 
                     let items = open_remote_project_with_existing_connection(
-                        connection, project, paths, app_state, window, cx,
+                        connection, project, paths, app_state, window, None, cx,
                     )
                     .await
                     .log_err();

crates/sidebar/src/sidebar.rs 🔗

@@ -386,6 +386,7 @@ pub struct Sidebar {
     thread_last_message_sent_or_queued: HashMap<acp::SessionId, DateTime<Utc>>,
     thread_switcher: Option<Entity<ThreadSwitcher>>,
     _thread_switcher_subscriptions: Vec<gpui::Subscription>,
+    pending_remote_thread_activation: Option<acp::SessionId>,
     view: SidebarView,
     recent_projects_popover_handle: PopoverMenuHandle<SidebarRecentProjects>,
     project_header_menu_ix: Option<usize>,
@@ -477,6 +478,7 @@ impl Sidebar {
             thread_last_message_sent_or_queued: HashMap::new(),
             thread_switcher: None,
             _thread_switcher_subscriptions: Vec::new(),
+            pending_remote_thread_activation: None,
             view: SidebarView::default(),
             recent_projects_popover_handle: PopoverMenuHandle::default(),
             project_header_menu_ix: None,
@@ -689,10 +691,16 @@ impl Sidebar {
 
     /// Finds an open workspace whose project group key matches the given path list.
     fn workspace_for_group(&self, path_list: &PathList, cx: &App) -> Option<Entity<Workspace>> {
-        let mw = self.multi_workspace.upgrade()?;
-        let mw = mw.read(cx);
-        mw.workspaces()
-            .find(|ws| ws.read(cx).project_group_key(cx).path_list() == path_list)
+        let multi_workspace = self.multi_workspace.upgrade()?;
+        let multi_workspace = multi_workspace.read(cx);
+        multi_workspace
+            .workspaces()
+            .find(|workspace| {
+                multi_workspace
+                    .project_group_key_for_workspace(workspace, cx)
+                    .path_list()
+                    == path_list
+            })
             .cloned()
     }
 
@@ -749,15 +757,25 @@ impl Sidebar {
         // also appears as a "draft" (no messages yet).
         if let Some(active_ws) = &active_workspace {
             if let Some(panel) = active_ws.read(cx).panel::<AgentPanel>(cx) {
-                if panel.read(cx).active_thread_is_draft(cx)
-                    || panel.read(cx).active_conversation_view().is_none()
-                {
-                    let conversation_parent_id = panel
-                        .read(cx)
-                        .active_conversation_view()
-                        .and_then(|cv| cv.read(cx).parent_id(cx));
-                    let preserving_thread =
-                        if let Some(ActiveEntry::Thread { session_id, .. }) = &self.active_entry {
+                let active_thread_is_draft = panel.read(cx).active_thread_is_draft(cx);
+                let active_conversation_view = panel.read(cx).active_conversation_view();
+
+                if active_thread_is_draft || active_conversation_view.is_none() {
+                    if active_conversation_view.is_none()
+                        && let Some(session_id) = self.pending_remote_thread_activation.clone()
+                    {
+                        self.active_entry = Some(ActiveEntry::Thread {
+                            session_id,
+                            workspace: active_ws.clone(),
+                        });
+                    } else {
+                        let conversation_parent_id =
+                            active_conversation_view.and_then(|cv| cv.read(cx).parent_id(cx));
+                        let preserving_thread = if let Some(ActiveEntry::Thread {
+                            session_id,
+                            ..
+                        }) = &self.active_entry
+                        {
                             self.active_entry_workspace() == Some(active_ws)
                                 && conversation_parent_id
                                     .as_ref()
@@ -765,14 +783,16 @@ impl Sidebar {
                         } else {
                             false
                         };
-                    if !preserving_thread {
-                        self.active_entry = Some(ActiveEntry::Draft(active_ws.clone()));
+                        if !preserving_thread {
+                            self.active_entry = Some(ActiveEntry::Draft(active_ws.clone()));
+                        }
                     }
-                } else if let Some(session_id) = panel
-                    .read(cx)
-                    .active_conversation_view()
-                    .and_then(|cv| cv.read(cx).parent_id(cx))
+                } else if let Some(session_id) =
+                    active_conversation_view.and_then(|cv| cv.read(cx).parent_id(cx))
                 {
+                    if self.pending_remote_thread_activation.as_ref() == Some(&session_id) {
+                        self.pending_remote_thread_activation = None;
+                    }
                     self.active_entry = Some(ActiveEntry::Thread {
                         session_id,
                         workspace: active_ws.clone(),
@@ -2177,8 +2197,12 @@ impl Sidebar {
         };
 
         if let Some(connection_options) = host {
+            let pending_session_id = metadata.session_id.clone();
+            self.pending_remote_thread_activation = Some(pending_session_id.clone());
+
             let window_handle = window.window_handle().downcast::<MultiWorkspace>();
             let Some(window_handle) = window_handle else {
+                self.pending_remote_thread_activation = None;
                 return;
             };
 
@@ -2191,58 +2215,80 @@ impl Sidebar {
             let paths = path_list.paths().to_vec();
 
             cx.spawn_in(window, async move |this, cx| {
-                let delegate: std::sync::Arc<dyn remote::RemoteClientDelegate> =
-                    std::sync::Arc::new(remote_connection::HeadlessRemoteClientDelegate);
-                let remote_connection =
-                    remote::connect(connection_options.clone(), delegate.clone(), cx).await?;
-
-                let (_cancel_tx, cancel_rx) = futures::channel::oneshot::channel();
-                let session = cx
-                    .update(|_, cx| {
-                        remote::RemoteClient::new(
-                            remote::remote_client::ConnectionIdentifier::setup(),
-                            remote_connection,
-                            cancel_rx,
-                            delegate,
+                let result: anyhow::Result<()> = async {
+                    let delegate: std::sync::Arc<dyn remote::RemoteClientDelegate> =
+                        std::sync::Arc::new(remote_connection::HeadlessRemoteClientDelegate);
+                    let remote_connection =
+                        remote::connect(connection_options.clone(), delegate.clone(), cx).await?;
+
+                    let (_cancel_tx, cancel_rx) = futures::channel::oneshot::channel();
+                    let session = cx
+                        .update(|_, cx| {
+                            remote::RemoteClient::new(
+                                remote::remote_client::ConnectionIdentifier::setup(),
+                                remote_connection,
+                                cancel_rx,
+                                delegate,
+                                cx,
+                            )
+                        })?
+                        .await?
+                        .ok_or_else(|| anyhow::anyhow!("Remote connection was cancelled"))?;
+
+                    let new_project = cx.update(|_, cx| {
+                        project::Project::remote(
+                            session,
+                            app_state.client.clone(),
+                            app_state.node_runtime.clone(),
+                            app_state.user_store.clone(),
+                            app_state.languages.clone(),
+                            app_state.fs.clone(),
+                            true,
                             cx,
                         )
-                    })?
-                    .await?
-                    .ok_or_else(|| anyhow::anyhow!("Remote connection was cancelled"))?;
-
-                let new_project = cx.update(|_, cx| {
-                    project::Project::remote(
-                        session,
-                        app_state.client.clone(),
-                        app_state.node_runtime.clone(),
-                        app_state.user_store.clone(),
-                        app_state.languages.clone(),
-                        app_state.fs.clone(),
-                        true,
+                    })?;
+
+                    let provisional_project_group_key = project::ProjectGroupKey::new(
+                        Some(connection_options.clone()),
+                        metadata.main_worktree_paths.clone(),
+                    );
+
+                    workspace::open_remote_project_with_existing_connection(
+                        connection_options,
+                        new_project,
+                        paths,
+                        app_state,
+                        window_handle,
+                        Some(provisional_project_group_key),
                         cx,
                     )
-                })?;
-
-                workspace::open_remote_project_with_existing_connection(
-                    connection_options,
-                    new_project,
-                    paths,
-                    app_state,
-                    window_handle,
-                    cx,
-                )
-                .await?;
+                    .await?;
+
+                    let workspace = window_handle.update(cx, |multi_workspace, window, cx| {
+                        let workspace = multi_workspace.workspace().clone();
+                        multi_workspace.add(workspace.clone(), window, cx);
+                        workspace
+                    })?;
+
+                    this.update_in(cx, |this, window, cx| {
+                        this.activate_thread(metadata, &workspace, false, window, cx);
+                    })?;
+                    anyhow::Ok(())
+                }
+                .await;
 
-                let workspace = window_handle.update(cx, |multi_workspace, window, cx| {
-                    let workspace = multi_workspace.workspace().clone();
-                    multi_workspace.add(workspace.clone(), window, cx);
-                    workspace
-                })?;
+                if result.is_err() {
+                    this.update(cx, |this, _cx| {
+                        if this.pending_remote_thread_activation.as_ref()
+                            == Some(&pending_session_id)
+                        {
+                            this.pending_remote_thread_activation = None;
+                        }
+                    })
+                    .ok();
+                }
 
-                this.update_in(cx, |this, window, cx| {
-                    this.activate_thread(metadata, &workspace, false, window, cx);
-                })?;
-                anyhow::Ok(())
+                result
             })
             .detach_and_log_err(cx);
         } else {
@@ -3184,8 +3230,8 @@ impl Sidebar {
 
     fn active_project_group_key(&self, cx: &App) -> Option<ProjectGroupKey> {
         let multi_workspace = self.multi_workspace.upgrade()?;
-        let mw = multi_workspace.read(cx);
-        Some(mw.workspace().read(cx).project_group_key(cx))
+        let multi_workspace = multi_workspace.read(cx);
+        Some(multi_workspace.project_group_key_for_workspace(multi_workspace.workspace(), cx))
     }
 
     fn active_project_header_position(&self, cx: &App) -> Option<usize> {

crates/workspace/src/multi_workspace.rs 🔗

@@ -1,4 +1,5 @@
 use anyhow::Result;
+use collections::{HashMap, HashSet};
 use feature_flags::{AgentV2FeatureFlag, FeatureFlagAppExt};
 use gpui::PathPromptOptions;
 use gpui::{
@@ -330,6 +331,7 @@ pub struct MultiWorkspace {
     workspaces: Vec<Entity<Workspace>>,
     active_workspace: ActiveWorkspace,
     project_group_keys: Vec<ProjectGroupKey>,
+    provisional_project_group_keys: HashMap<EntityId, ProjectGroupKey>,
     sidebar: Option<Box<dyn SidebarHandle>>,
     sidebar_open: bool,
     sidebar_overlay: Option<AnyView>,
@@ -382,6 +384,7 @@ impl MultiWorkspace {
         Self {
             window_id: window.window_handle().window_id(),
             project_group_keys: Vec::new(),
+            provisional_project_group_keys: HashMap::default(),
             workspaces: Vec::new(),
             active_workspace: ActiveWorkspace::Transient(workspace),
             sidebar: None,
@@ -584,7 +587,10 @@ impl MultiWorkspace {
                 }
                 project::Event::WorktreeUpdatedRootRepoCommonDir(_) => {
                     if let Some(workspace) = workspace.upgrade() {
-                        this.add_project_group_key(workspace.read(cx).project_group_key(cx));
+                        this.maybe_clear_provisional_project_group_key(&workspace, cx);
+                        this.add_project_group_key(
+                            this.project_group_key_for_workspace(&workspace, cx),
+                        );
                         this.remove_stale_project_group_keys(cx);
                         cx.notify();
                     }
@@ -612,11 +618,48 @@ impl MultiWorkspace {
         self.project_group_keys.push(project_group_key);
     }
 
+    pub fn set_provisional_project_group_key(
+        &mut self,
+        workspace: &Entity<Workspace>,
+        project_group_key: ProjectGroupKey,
+    ) {
+        self.provisional_project_group_keys
+            .insert(workspace.entity_id(), project_group_key.clone());
+        self.add_project_group_key(project_group_key);
+    }
+
+    pub fn project_group_key_for_workspace(
+        &self,
+        workspace: &Entity<Workspace>,
+        cx: &App,
+    ) -> ProjectGroupKey {
+        self.provisional_project_group_keys
+            .get(&workspace.entity_id())
+            .cloned()
+            .unwrap_or_else(|| workspace.read(cx).project_group_key(cx))
+    }
+
+    fn maybe_clear_provisional_project_group_key(
+        &mut self,
+        workspace: &Entity<Workspace>,
+        cx: &App,
+    ) {
+        let live_key = workspace.read(cx).project_group_key(cx);
+        if self
+            .provisional_project_group_keys
+            .get(&workspace.entity_id())
+            .is_some_and(|key| *key == live_key)
+        {
+            self.provisional_project_group_keys
+                .remove(&workspace.entity_id());
+        }
+    }
+
     fn remove_stale_project_group_keys(&mut self, cx: &App) {
-        let workspace_keys: std::collections::HashSet<ProjectGroupKey> = self
+        let workspace_keys: HashSet<ProjectGroupKey> = self
             .workspaces
             .iter()
-            .map(|ws| ws.read(cx).project_group_key(cx))
+            .map(|workspace| self.project_group_key_for_workspace(workspace, cx))
             .collect();
         self.project_group_keys
             .retain(|key| workspace_keys.contains(key));
@@ -648,7 +691,7 @@ impl MultiWorkspace {
             .map(|key| (key.clone(), Vec::new()))
             .collect::<Vec<_>>();
         for workspace in &self.workspaces {
-            let key = workspace.read(cx).project_group_key(cx);
+            let key = self.project_group_key_for_workspace(workspace, cx);
             if let Some((_, workspaces)) = groups.iter_mut().find(|(k, _)| k == &key) {
                 workspaces.push(workspace.clone());
             }
@@ -661,9 +704,9 @@ impl MultiWorkspace {
         project_group_key: &ProjectGroupKey,
         cx: &App,
     ) -> impl Iterator<Item = &Entity<Workspace>> {
-        self.workspaces
-            .iter()
-            .filter(move |ws| ws.read(cx).project_group_key(cx) == *project_group_key)
+        self.workspaces.iter().filter(move |workspace| {
+            self.project_group_key_for_workspace(workspace, cx) == *project_group_key
+        })
     }
 
     pub fn remove_folder_from_project_group(
@@ -919,7 +962,7 @@ impl MultiWorkspace {
     /// Promotes a former transient workspace into the persistent list.
     /// Returns the index of the newly inserted workspace.
     fn promote_transient(&mut self, workspace: Entity<Workspace>, cx: &mut Context<Self>) -> usize {
-        let project_group_key = workspace.read(cx).project().read(cx).project_group_key(cx);
+        let project_group_key = self.project_group_key_for_workspace(&workspace, cx);
         self.add_project_group_key(project_group_key);
         self.workspaces.push(workspace.clone());
         cx.emit(MultiWorkspaceEvent::WorkspaceAdded(workspace));
@@ -956,7 +999,7 @@ impl MultiWorkspace {
         if let Some(index) = self.workspaces.iter().position(|w| *w == workspace) {
             index
         } else {
-            let project_group_key = workspace.read(cx).project().read(cx).project_group_key(cx);
+            let project_group_key = self.project_group_key_for_workspace(&workspace, cx);
 
             Self::subscribe_to_workspace(&workspace, window, cx);
             self.sync_sidebar_to_workspace(&workspace, cx);
@@ -1230,7 +1273,7 @@ impl MultiWorkspace {
             return false;
         };
 
-        let old_key = workspace.read(cx).project_group_key(cx);
+        let old_key = self.project_group_key_for_workspace(workspace, cx);
 
         if self.workspaces.len() <= 1 {
             let has_worktrees = workspace.read(cx).visible_worktrees(cx).next().is_some();
@@ -1277,6 +1320,8 @@ impl MultiWorkspace {
             cx.emit(MultiWorkspaceEvent::ActiveWorkspaceChanged);
         } else {
             let removed_workspace = self.workspaces.remove(index);
+            self.provisional_project_group_keys
+                .remove(&removed_workspace.entity_id());
 
             if let Some(active_index) = self.active_workspace.persistent_index() {
                 if active_index >= self.workspaces.len() {
@@ -1297,7 +1342,7 @@ impl MultiWorkspace {
         let key_still_in_use = self
             .workspaces
             .iter()
-            .any(|ws| ws.read(cx).project_group_key(cx) == old_key);
+            .any(|workspace| self.project_group_key_for_workspace(workspace, cx) == old_key);
 
         if !key_still_in_use {
             self.project_group_keys.retain(|k| k != &old_key);

crates/workspace/src/workspace.rs 🔗

@@ -9693,6 +9693,7 @@ pub fn open_remote_project_with_new_connection(
             serialized_workspace,
             app_state,
             window,
+            None,
             cx,
         )
         .await
@@ -9705,6 +9706,7 @@ pub fn open_remote_project_with_existing_connection(
     paths: Vec<PathBuf>,
     app_state: Arc<AppState>,
     window: WindowHandle<MultiWorkspace>,
+    provisional_project_group_key: Option<ProjectGroupKey>,
     cx: &mut AsyncApp,
 ) -> Task<Result<Vec<Option<Box<dyn ItemHandle>>>>> {
     cx.spawn(async move |cx| {
@@ -9718,6 +9720,7 @@ pub fn open_remote_project_with_existing_connection(
             serialized_workspace,
             app_state,
             window,
+            provisional_project_group_key,
             cx,
         )
         .await
@@ -9731,6 +9734,7 @@ async fn open_remote_project_inner(
     serialized_workspace: Option<SerializedWorkspace>,
     app_state: Arc<AppState>,
     window: WindowHandle<MultiWorkspace>,
+    provisional_project_group_key: Option<ProjectGroupKey>,
     cx: &mut AsyncApp,
 ) -> Result<Vec<Option<Box<dyn ItemHandle>>>> {
     let db = cx.update(|cx| WorkspaceDb::global(cx));
@@ -9791,6 +9795,9 @@ async fn open_remote_project_inner(
             workspace
         });
 
+        if let Some(project_group_key) = provisional_project_group_key.clone() {
+            multi_workspace.set_provisional_project_group_key(&new_workspace, project_group_key);
+        }
         multi_workspace.activate(new_workspace.clone(), window, cx);
         new_workspace
     })?;

plan.md 🔗

@@ -0,0 +1,79 @@
+# Plan: Fix sidebar flicker when remote workspace is added
+
+## Context
+
+Read `summary.md` for all changes made so far. This plan covers the remaining flicker bug.
+
+## The Bug
+
+When a remote workspace is added to the sidebar, the project group briefly flickers (appears as a separate group for 1-2 frames). This happens because:
+
+1. **Server-side `set_snapshot`** in `zed/crates/worktree/src/worktree.rs` (~line 1205) unconditionally recomputes `root_repo_common_dir` from `git_repositories`:
+
+   ```rust
+   new_snapshot.root_repo_common_dir = new_snapshot
+       .local_repo_for_work_directory_path(RelPath::empty())
+       .map(|repo| SanitizedPath::from_arc(repo.common_dir_abs_path.clone()));
+   ```
+
+   During early scan passes, `.git` hasn't been discovered yet, so this overwrites the correct value (set by `Worktree::local()` during creation) with `None`.
+
+2. The server sends an `UpdateWorktree` message with `root_repo_common_dir = None`.
+
+3. The client's `apply_remote_update` in `zed/crates/worktree/src/worktree.rs` (~line 2437) currently has a partial fix that only updates when `Some`:
+   ```rust
+   if let Some(dir) = update.root_repo_common_dir.map(...) {
+       self.root_repo_common_dir = Some(dir);
+   }
+   ```
+   This prevents the client from clearing it, but the real fix should be server-side.
+
+## What To Do
+
+### Step 1: Add flicker detection to the existing test
+
+Extend `test_clicking_closed_remote_thread_opens_remote_workspace` in `zed/crates/sidebar/src/sidebar_tests.rs` to catch transient flicker. Use the `observe_self` pattern from `test_clicking_worktree_thread_does_not_briefly_render_as_separate_project` (line ~3326-3397), which installs an observer that fires on **every notification** and panics if more than one project header ever appears:
+
+```rust
+sidebar
+    .update(cx, |_, cx| cx.observe_self(assert_sidebar_state))
+    .detach();
+```
+
+Add this observer BEFORE the stale key injection / workspace addition steps. The callback should assert that there is never more than one project group header at any point during the test. This catches the case where an `UpdateWorktree` message with `root_repo_common_dir = None` temporarily creates a wrong project group key.
+
+Since the full remote mock connection is hard to set up for a second connection, an alternative approach: simulate the `UpdateWorktree` message arriving with `root_repo_common_dir = None` by directly calling the worktree's update mechanism on the existing project. Or, test at a lower level by verifying that `set_snapshot` doesn't clear `root_repo_common_dir`.
+
+### Step 2: Fix the server-side root cause
+
+In `zed/crates/worktree/src/worktree.rs`, find `set_snapshot` (~line 1200-1210). Change the `root_repo_common_dir` recomputation to not downgrade once set:
+
+```rust
+// Before (overwrites unconditionally):
+new_snapshot.root_repo_common_dir = new_snapshot
+    .local_repo_for_work_directory_path(RelPath::empty())
+    .map(|repo| SanitizedPath::from_arc(repo.common_dir_abs_path.clone()));
+
+// After (preserve existing value if scan hasn't discovered repo yet):
+new_snapshot.root_repo_common_dir = new_snapshot
+    .local_repo_for_work_directory_path(RelPath::empty())
+    .map(|repo| SanitizedPath::from_arc(repo.common_dir_abs_path.clone()))
+    .or(self.snapshot.root_repo_common_dir.clone());
+```
+
+This ensures the value discovered by `Worktree::local()` during creation is preserved until the scanner finds the repo and confirms/updates it.
+
+### Step 3: Verify the client-side guard is still useful
+
+The `apply_remote_update` change (only update when `Some`) is a defense-in-depth measure. With the server fix, the server should never send `None` after having the correct value. But keeping the client guard is good practice. Verify the test passes with both fixes.
+
+### Step 4: Update `summary.md`
+
+Add the flicker fix to the summary of changes.
+
+## Important Notes
+
+- Use sub-agents for research tasks to keep context manageable
+- The key test pattern is `cx.observe_self(callback)` which fires on every `cx.notify()` — this catches transient states that `run_until_parked` would miss
+- Read `test_clicking_worktree_thread_does_not_briefly_render_as_separate_project` (~line 3262-3397) for the full example of this testing pattern
+- After all changes, run `cargo check` on all affected packages and run the sidebar + agent_ui tests

summary.md 🔗

@@ -0,0 +1,41 @@
+# Remote Worktree Support — Summary of Changes
+
+## Problem
+The agent panel's "create new thread in worktree" feature only supported local projects. Remote (SSH/WSL/Docker) projects need the same capability, plus correct sidebar integration.
+
+## Changes Made
+
+### 1. `HeadlessRemoteClientDelegate` (`remote_connection/src/remote_connection.rs`)
+New public struct implementing `RemoteClientDelegate` without UI. Forwards binary downloads to `AutoUpdater`, drops password prompts with a log warning.
+
+### 2. Remote worktree workspace creation (`agent_ui/src/agent_panel.rs`)
+- `handle_worktree_requested`: extracts `remote_connection_options` from project, fails early if disconnected
+- `open_worktree_workspace_and_start_thread`: new remote branch using `remote::connect()` → `RemoteClient::new()` → `Project::remote()` → `open_remote_project_with_existing_connection()` + `multi_workspace.add()`
+
+### 3. Sidebar remote thread support (`sidebar/src/sidebar.rs`)
+- `ThreadEntryWorkspace::Closed` now carries `host: Option<RemoteConnectionOptions>`
+- `open_workspace_and_activate_thread`: branches on `host` — remote uses headless delegate flow, local unchanged
+- All pattern match sites updated, `activate_archived_thread` looks up host from project group keys
+- Worktree tooltip says "Remote" vs "Local" (`ui/src/components/ai/thread_item.rs`)
+
+### 4. Proto: `root_repo_common_dir` in `WorktreeMetadata` + `AddWorktreeResponse`
+- `proto/worktree.proto`: added `optional string root_repo_common_dir` to both messages
+- `remote_server/headless_project.rs`: includes value in `AddWorktreeResponse`
+- `worktree/worktree.rs`: `Worktree::remote()` sets it from metadata; `metadata_proto()` includes it; `apply_remote_update` only updates when `Some` (never clears)
+- `project/worktree_store.rs`: passes through in `create_remote_worktree`, `worktree_metadata_protos`; emits new `WorktreeUpdatedRootRepoCommonDir` event
+- `project/project.rs`: new `Event::WorktreeUpdatedRootRepoCommonDir`, forwarded from worktree store
+
+### 5. Stale key cleanup (`workspace/src/multi_workspace.rs`)
+- `subscribe_to_workspace`: handles `WorktreeUpdatedRootRepoCommonDir` — adds correct key, removes stale keys, notifies
+- New `remove_stale_project_group_keys()` method
+
+### 6. Dependency changes
+- `agent_ui/Cargo.toml`: added `remote`, `remote_connection` to deps; added remote test infra to dev-deps
+- `sidebar/Cargo.toml`: added `remote_connection`, `futures` to deps; added remote test infra to dev-deps
+
+### 7. Tests
+- `agent_ui`: `test_worktree_creation_for_remote_project` — verifies remote code path is taken
+- `sidebar`: `test_clicking_closed_remote_thread_opens_remote_workspace` — verifies grouping and stale key cleanup
+
+## What's Left
+See `plan.md`.