Stop following when project is unshared (#4010)

Conrad Irwin created

Before this change the views would continue to update in the background
of the "disconnected" dialogue, which was disconcerting.

[[PR Description]]

Release Notes:

- Fixed an edge-case where following didn't handle unshare correctly

Change summary

crates/call/src/room.rs                    |  7 +
crates/collab/src/tests/following_tests.rs | 58 +++++++++++++++-
crates/collab/src/tests/test_server.rs     | 64 ++++++++++++++++++
crates/project/src/project.rs              | 82 ++++++++++++-----------
crates/workspace/src/workspace.rs          |  5 +
5 files changed, 170 insertions(+), 46 deletions(-)

Detailed changes

crates/call/src/room.rs 🔗

@@ -1225,7 +1225,12 @@ impl Room {
         };
 
         self.client.send(proto::UnshareProject { project_id })?;
-        project.update(cx, |this, cx| this.unshare(cx))
+        project.update(cx, |this, cx| this.unshare(cx))?;
+
+        if self.local_participant.active_project == Some(project.downgrade()) {
+            self.set_location(Some(&project), cx).detach_and_log_err(cx);
+        }
+        Ok(())
     }
 
     pub(crate) fn set_location(

crates/collab/src/tests/following_tests.rs 🔗

@@ -1,5 +1,5 @@
 use crate::{rpc::RECONNECT_TIMEOUT, tests::TestServer};
-use call::ActiveCall;
+use call::{ActiveCall, ParticipantLocation};
 use collab_ui::notifications::project_shared_notification::ProjectSharedNotification;
 use editor::{Editor, ExcerptRange, MultiBuffer};
 use gpui::{
@@ -1568,6 +1568,59 @@ async fn test_following_across_workspaces(cx_a: &mut TestAppContext, cx_b: &mut
     });
 }
 
+#[gpui::test]
+async fn test_following_stops_on_unshare(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
+    let (client_a, client_b, channel_id) = TestServer::start2(cx_a, cx_b).await;
+
+    let (workspace_a, cx_a) = client_a.build_test_workspace(cx_a).await;
+    client_a
+        .host_workspace(&workspace_a, channel_id, cx_a)
+        .await;
+    let (workspace_b, cx_b) = client_b.join_workspace(channel_id, cx_b).await;
+
+    cx_a.simulate_keystrokes("cmd-p 2 enter");
+    cx_a.run_until_parked();
+
+    let editor_a = workspace_a.update(cx_a, |workspace, cx| {
+        workspace.active_item_as::<Editor>(cx).unwrap()
+    });
+    let editor_b = workspace_b.update(cx_b, |workspace, cx| {
+        workspace.active_item_as::<Editor>(cx).unwrap()
+    });
+
+    // b should follow a to position 1
+    editor_a.update(cx_a, |editor, cx| {
+        editor.change_selections(None, cx, |s| s.select_ranges([1..1]))
+    });
+    cx_a.run_until_parked();
+    editor_b.update(cx_b, |editor, cx| {
+        assert_eq!(editor.selections.ranges(cx), vec![1..1])
+    });
+
+    // a unshares the project
+    cx_a.update(|cx| {
+        let project = workspace_a.read(cx).project().clone();
+        ActiveCall::global(cx).update(cx, |call, cx| {
+            call.unshare_project(project, cx).unwrap();
+        })
+    });
+    cx_a.run_until_parked();
+
+    // b should not follow a to position 2
+    editor_a.update(cx_a, |editor, cx| {
+        editor.change_selections(None, cx, |s| s.select_ranges([2..2]))
+    });
+    cx_a.run_until_parked();
+    editor_b.update(cx_b, |editor, cx| {
+        assert_eq!(editor.selections.ranges(cx), vec![1..1])
+    });
+    cx_b.update(|cx| {
+        let room = ActiveCall::global(cx).read(cx).room().unwrap().read(cx);
+        let participant = room.remote_participants().get(&client_a.id()).unwrap();
+        assert_eq!(participant.location, ParticipantLocation::UnsharedProject)
+    })
+}
+
 #[gpui::test]
 async fn test_following_into_excluded_file(
     mut cx_a: &mut TestAppContext,
@@ -1593,9 +1646,6 @@ async fn test_following_into_excluded_file(
     let active_call_b = cx_b.read(ActiveCall::global);
     let peer_id_a = client_a.peer_id().unwrap();
 
-    cx_a.update(editor::init);
-    cx_b.update(editor::init);
-
     client_a
         .fs()
         .insert_tree(

crates/collab/src/tests/test_server.rs 🔗

@@ -113,6 +113,20 @@ impl TestServer {
         }
     }
 
+    pub async fn start2(
+        cx_a: &mut TestAppContext,
+        cx_b: &mut TestAppContext,
+    ) -> (TestClient, TestClient, u64) {
+        let mut server = Self::start(cx_a.executor()).await;
+        let client_a = server.create_client(cx_a, "user_a").await;
+        let client_b = server.create_client(cx_b, "user_b").await;
+        let channel_id = server
+            .make_channel("a", None, (&client_a, cx_a), &mut [(&client_b, cx_b)])
+            .await;
+
+        (client_a, client_b, channel_id)
+    }
+
     pub async fn reset(&self) {
         self.app_state.db.reset();
         let epoch = self
@@ -619,14 +633,49 @@ impl TestClient {
                 "/a",
                 json!({
                     "1.txt": "one\none\none",
-                    "2.txt": "two\ntwo\ntwo",
-                    "3.txt": "three\nthree\nthree",
+                    "2.js": "function two() { return 2; }",
+                    "3.rs": "mod test",
                 }),
             )
             .await;
         self.build_local_project("/a", cx).await.0
     }
 
+    pub async fn host_workspace(
+        &self,
+        workspace: &View<Workspace>,
+        channel_id: u64,
+        cx: &mut VisualTestContext,
+    ) {
+        cx.update(|cx| {
+            let active_call = ActiveCall::global(cx);
+            active_call.update(cx, |call, cx| call.join_channel(channel_id, cx))
+        })
+        .await
+        .unwrap();
+        cx.update(|cx| {
+            let active_call = ActiveCall::global(cx);
+            let project = workspace.read(cx).project().clone();
+            active_call.update(cx, |call, cx| call.share_project(project, cx))
+        })
+        .await
+        .unwrap();
+        cx.executor().run_until_parked();
+    }
+
+    pub async fn join_workspace<'a>(
+        &'a self,
+        channel_id: u64,
+        cx: &'a mut TestAppContext,
+    ) -> (View<Workspace>, &'a mut VisualTestContext) {
+        cx.update(|cx| workspace::join_channel(channel_id, self.app_state.clone(), None, cx))
+            .await
+            .unwrap();
+        cx.run_until_parked();
+
+        self.active_workspace(cx)
+    }
+
     pub fn build_empty_local_project(&self, cx: &mut TestAppContext) -> Model<Project> {
         cx.update(|cx| {
             Project::local(
@@ -670,6 +719,17 @@ impl TestClient {
         })
     }
 
+    pub async fn build_test_workspace<'a>(
+        &'a self,
+        cx: &'a mut TestAppContext,
+    ) -> (View<Workspace>, &'a mut VisualTestContext) {
+        let project = self.build_test_project(cx).await;
+        cx.add_window_view(|cx| {
+            cx.activate_window();
+            Workspace::new(0, project.clone(), self.app_state.clone(), cx)
+        })
+    }
+
     pub fn active_workspace<'a>(
         &'a self,
         cx: &'a mut TestAppContext,

crates/project/src/project.rs 🔗

@@ -130,7 +130,7 @@ pub struct Project {
     next_diagnostic_group_id: usize,
     user_store: Model<UserStore>,
     fs: Arc<dyn Fs>,
-    client_state: Option<ProjectClientState>,
+    client_state: ProjectClientState,
     collaborators: HashMap<proto::PeerId, Collaborator>,
     client_subscriptions: Vec<client::Subscription>,
     _subscriptions: Vec<gpui::Subscription>,
@@ -254,8 +254,10 @@ enum WorktreeHandle {
     Weak(WeakModel<Worktree>),
 }
 
+#[derive(Debug)]
 enum ProjectClientState {
-    Local {
+    Local,
+    Shared {
         remote_id: u64,
         updates_tx: mpsc::UnboundedSender<LocalProjectUpdate>,
         _send_updates: Task<Result<()>>,
@@ -657,7 +659,7 @@ impl Project {
                 local_buffer_ids_by_entry_id: Default::default(),
                 buffer_snapshots: Default::default(),
                 join_project_response_message_id: 0,
-                client_state: None,
+                client_state: ProjectClientState::Local,
                 opened_buffer: watch::channel(),
                 client_subscriptions: Vec::new(),
                 _subscriptions: vec![
@@ -756,12 +758,12 @@ impl Project {
                     cx.on_app_quit(Self::shutdown_language_servers),
                 ],
                 client: client.clone(),
-                client_state: Some(ProjectClientState::Remote {
+                client_state: ProjectClientState::Remote {
                     sharing_has_stopped: false,
                     capability: Capability::ReadWrite,
                     remote_id,
                     replica_id,
-                }),
+                },
                 supplementary_language_servers: HashMap::default(),
                 language_servers: Default::default(),
                 language_server_ids: Default::default(),
@@ -828,16 +830,16 @@ impl Project {
 
     fn release(&mut self, cx: &mut AppContext) {
         match &self.client_state {
-            Some(ProjectClientState::Local { .. }) => {
+            ProjectClientState::Local => {}
+            ProjectClientState::Shared { .. } => {
                 let _ = self.unshare_internal(cx);
             }
-            Some(ProjectClientState::Remote { remote_id, .. }) => {
+            ProjectClientState::Remote { remote_id, .. } => {
                 let _ = self.client.send(proto::LeaveProject {
                     project_id: *remote_id,
                 });
                 self.disconnected_from_host_internal(cx);
             }
-            _ => {}
         }
     }
 
@@ -1058,21 +1060,22 @@ impl Project {
     }
 
     pub fn remote_id(&self) -> Option<u64> {
-        match self.client_state.as_ref()? {
-            ProjectClientState::Local { remote_id, .. }
-            | ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
+        match self.client_state {
+            ProjectClientState::Local => None,
+            ProjectClientState::Shared { remote_id, .. }
+            | ProjectClientState::Remote { remote_id, .. } => Some(remote_id),
         }
     }
 
     pub fn replica_id(&self) -> ReplicaId {
-        match &self.client_state {
-            Some(ProjectClientState::Remote { replica_id, .. }) => *replica_id,
+        match self.client_state {
+            ProjectClientState::Remote { replica_id, .. } => replica_id,
             _ => 0,
         }
     }
 
     fn metadata_changed(&mut self, cx: &mut ModelContext<Self>) {
-        if let Some(ProjectClientState::Local { updates_tx, .. }) = &mut self.client_state {
+        if let ProjectClientState::Shared { updates_tx, .. } = &mut self.client_state {
             updates_tx
                 .unbounded_send(LocalProjectUpdate::WorktreesChanged)
                 .ok();
@@ -1362,7 +1365,7 @@ impl Project {
     }
 
     pub fn shared(&mut self, project_id: u64, cx: &mut ModelContext<Self>) -> Result<()> {
-        if self.client_state.is_some() {
+        if !matches!(self.client_state, ProjectClientState::Local) {
             return Err(anyhow!("project was already shared"));
         }
         self.client_subscriptions.push(
@@ -1423,7 +1426,7 @@ impl Project {
 
         let (updates_tx, mut updates_rx) = mpsc::unbounded();
         let client = self.client.clone();
-        self.client_state = Some(ProjectClientState::Local {
+        self.client_state = ProjectClientState::Shared {
             remote_id: project_id,
             updates_tx,
             _send_updates: cx.spawn(move |this, mut cx| async move {
@@ -1508,7 +1511,7 @@ impl Project {
                 }
                 Ok(())
             }),
-        });
+        };
 
         self.metadata_changed(cx);
         cx.emit(Event::RemoteIdChanged(Some(project_id)));
@@ -1578,7 +1581,8 @@ impl Project {
             return Err(anyhow!("attempted to unshare a remote project"));
         }
 
-        if let Some(ProjectClientState::Local { remote_id, .. }) = self.client_state.take() {
+        if let ProjectClientState::Shared { remote_id, .. } = self.client_state {
+            self.client_state = ProjectClientState::Local;
             self.collaborators.clear();
             self.shared_buffers.clear();
             self.client_subscriptions.clear();
@@ -1629,23 +1633,23 @@ impl Project {
             } else {
                 Capability::ReadOnly
             };
-        if let Some(ProjectClientState::Remote { capability, .. }) = &mut self.client_state {
+        if let ProjectClientState::Remote { capability, .. } = &mut self.client_state {
             if *capability == new_capability {
                 return;
             }
 
             *capability = new_capability;
-        }
-        for buffer in self.opened_buffers() {
-            buffer.update(cx, |buffer, cx| buffer.set_capability(new_capability, cx));
+            for buffer in self.opened_buffers() {
+                buffer.update(cx, |buffer, cx| buffer.set_capability(new_capability, cx));
+            }
         }
     }
 
     fn disconnected_from_host_internal(&mut self, cx: &mut AppContext) {
-        if let Some(ProjectClientState::Remote {
+        if let ProjectClientState::Remote {
             sharing_has_stopped,
             ..
-        }) = &mut self.client_state
+        } = &mut self.client_state
         {
             *sharing_has_stopped = true;
 
@@ -1684,18 +1688,18 @@ impl Project {
 
     pub fn is_disconnected(&self) -> bool {
         match &self.client_state {
-            Some(ProjectClientState::Remote {
+            ProjectClientState::Remote {
                 sharing_has_stopped,
                 ..
-            }) => *sharing_has_stopped,
+            } => *sharing_has_stopped,
             _ => false,
         }
     }
 
     pub fn capability(&self) -> Capability {
         match &self.client_state {
-            Some(ProjectClientState::Remote { capability, .. }) => *capability,
-            Some(ProjectClientState::Local { .. }) | None => Capability::ReadWrite,
+            ProjectClientState::Remote { capability, .. } => *capability,
+            ProjectClientState::Shared { .. } | ProjectClientState::Local => Capability::ReadWrite,
         }
     }
 
@@ -1705,8 +1709,8 @@ impl Project {
 
     pub fn is_local(&self) -> bool {
         match &self.client_state {
-            Some(ProjectClientState::Remote { .. }) => false,
-            _ => true,
+            ProjectClientState::Local | ProjectClientState::Shared { .. } => true,
+            ProjectClientState::Remote { .. } => false,
         }
     }
 
@@ -6165,8 +6169,8 @@ impl Project {
 
     pub fn is_shared(&self) -> bool {
         match &self.client_state {
-            Some(ProjectClientState::Local { .. }) => true,
-            _ => false,
+            ProjectClientState::Shared { .. } => true,
+            ProjectClientState::Local | ProjectClientState::Remote { .. } => false,
         }
     }
 
@@ -7954,7 +7958,7 @@ impl Project {
         cx: &mut AppContext,
     ) -> u64 {
         let buffer_id = buffer.read(cx).remote_id();
-        if let Some(ProjectClientState::Local { updates_tx, .. }) = &self.client_state {
+        if let ProjectClientState::Shared { updates_tx, .. } = &self.client_state {
             updates_tx
                 .unbounded_send(LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id })
                 .ok();
@@ -8003,21 +8007,21 @@ impl Project {
     }
 
     fn synchronize_remote_buffers(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
-        let project_id = match self.client_state.as_ref() {
-            Some(ProjectClientState::Remote {
+        let project_id = match self.client_state {
+            ProjectClientState::Remote {
                 sharing_has_stopped,
                 remote_id,
                 ..
-            }) => {
-                if *sharing_has_stopped {
+            } => {
+                if sharing_has_stopped {
                     return Task::ready(Err(anyhow!(
                         "can't synchronize remote buffers on a readonly project"
                     )));
                 } else {
-                    *remote_id
+                    remote_id
                 }
             }
-            Some(ProjectClientState::Local { .. }) | None => {
+            ProjectClientState::Shared { .. } | ProjectClientState::Local => {
                 return Task::ready(Err(anyhow!(
                     "can't synchronize remote buffers on a local project"
                 )))

crates/workspace/src/workspace.rs 🔗

@@ -512,6 +512,11 @@ impl Workspace {
 
                 project::Event::DisconnectedFromHost => {
                     this.update_window_edited(cx);
+                    let panes_to_unfollow: Vec<View<Pane>> =
+                        this.follower_states.keys().map(|k| k.clone()).collect();
+                    for pane in panes_to_unfollow {
+                        this.unfollow(&pane, cx);
+                    }
                     cx.disable_focus();
                 }