Remove 2 suffix for client, call, channel

Max Brunsfeld and Mikayla created

Co-authored-by: Mikayla <mikayla@zed.dev>

Change summary

Cargo.lock                                         |  160 -
Cargo.toml                                         |    3 
crates/assistant/Cargo.toml                        |    2 
crates/auto_update/Cargo.toml                      |    2 
crates/call/Cargo.toml                             |   18 
crates/call/src/call.rs                            |  136 
crates/call/src/call_settings.rs                   |   13 
crates/call/src/participant.rs                     |   25 
crates/call/src/room.rs                            |  336 +-
crates/call2/Cargo.toml                            |   54 
crates/call2/src/call2.rs                          |  543 -----
crates/call2/src/call_settings.rs                  |   32 
crates/call2/src/participant.rs                    |   52 
crates/call2/src/room.rs                           | 1599 ---------------
crates/channel/Cargo.toml                          |   16 
crates/channel/src/channel.rs                      |    4 
crates/channel/src/channel_buffer.rs               |   78 
crates/channel/src/channel_chat.rs                 |  123 
crates/channel/src/channel_store.rs                |  183 
crates/channel/src/channel_store/channel_index.rs  |    8 
crates/channel/src/channel_store_tests.rs          |   36 
crates/channel2/Cargo.toml                         |   54 
crates/channel2/src/channel2.rs                    |   23 
crates/channel2/src/channel_buffer.rs              |  257 --
crates/channel2/src/channel_chat.rs                |  645 ------
crates/channel2/src/channel_store.rs               | 1022 ---------
crates/channel2/src/channel_store/channel_index.rs |  184 -
crates/channel2/src/channel_store_tests.rs         |  380 ---
crates/client/Cargo.toml                           |   16 
crates/client/src/client.rs                        |  526 ++--
crates/client/src/telemetry.rs                     |   72 
crates/client/src/test.rs                          |   21 
crates/client/src/user.rs                          |  210 -
crates/client2/Cargo.toml                          |   53 
crates/client2/src/client2.rs                      | 1675 ----------------
crates/client2/src/telemetry.rs                    |  515 ----
crates/client2/src/test.rs                         |  214 --
crates/client2/src/user.rs                         |  694 ------
crates/collab2/Cargo.toml                          |    6 
crates/collab_ui/Cargo.toml                        |   10 
crates/diagnostics/Cargo.toml                      |    2 
crates/editor/Cargo.toml                           |    2 
crates/feedback/Cargo.toml                         |    2 
crates/language/Cargo.toml                         |    2 
crates/language_tools/Cargo.toml                   |    2 
crates/multi_buffer/Cargo.toml                     |    2 
crates/notifications2/Cargo.toml                   |    6 
crates/prettier2/Cargo.toml                        |    2 
crates/project/Cargo.toml                          |    4 
crates/project_panel/Cargo.toml                    |    2 
crates/search/Cargo.toml                           |    2 
crates/semantic_index2/Cargo.toml                  |    2 
crates/terminal_view/Cargo.toml                    |    2 
crates/theme_selector/Cargo.toml                   |    2 
crates/welcome/Cargo.toml                          |    2 
crates/workspace/Cargo.toml                        |    8 
crates/zed/Cargo.toml                              |    8 
57 files changed, 952 insertions(+), 9,100 deletions(-)

Detailed changes

Cargo.lock 🔗

@@ -305,7 +305,7 @@ dependencies = [
  "ai",
  "anyhow",
  "chrono",
- "client2",
+ "client",
  "collections",
  "ctor",
  "editor",
@@ -687,7 +687,7 @@ name = "auto_update"
 version = "0.1.0"
 dependencies = [
  "anyhow",
- "client2",
+ "client",
  "db2",
  "gpui2",
  "isahc",
@@ -1116,37 +1116,11 @@ checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223"
 [[package]]
 name = "call"
 version = "0.1.0"
-dependencies = [
- "anyhow",
- "async-broadcast",
- "audio",
- "client",
- "collections",
- "fs",
- "futures 0.3.28",
- "gpui",
- "language",
- "live_kit_client",
- "log",
- "media",
- "postage",
- "project",
- "schemars",
- "serde",
- "serde_derive",
- "serde_json",
- "settings",
- "util",
-]
-
-[[package]]
-name = "call2"
-version = "0.1.0"
 dependencies = [
  "anyhow",
  "async-broadcast",
  "audio2",
- "client2",
+ "client",
  "collections",
  "fs2",
  "futures 0.3.28",
@@ -1236,43 +1210,6 @@ dependencies = [
  "client",
  "clock",
  "collections",
- "db",
- "feature_flags",
- "futures 0.3.28",
- "gpui",
- "image",
- "language",
- "lazy_static",
- "log",
- "parking_lot 0.11.2",
- "postage",
- "rand 0.8.5",
- "rpc",
- "schemars",
- "serde",
- "serde_derive",
- "settings",
- "smallvec",
- "smol",
- "sum_tree",
- "tempfile",
- "text",
- "thiserror",
- "time",
- "tiny_http",
- "url",
- "util",
- "uuid 1.4.1",
-]
-
-[[package]]
-name = "channel2"
-version = "0.1.0"
-dependencies = [
- "anyhow",
- "client2",
- "clock",
- "collections",
  "db2",
  "feature_flags",
  "futures 0.3.28",
@@ -1441,43 +1378,6 @@ dependencies = [
 [[package]]
 name = "client"
 version = "0.1.0"
-dependencies = [
- "anyhow",
- "async-recursion 0.3.2",
- "async-tungstenite",
- "chrono",
- "collections",
- "db",
- "feature_flags",
- "futures 0.3.28",
- "gpui",
- "image",
- "lazy_static",
- "log",
- "parking_lot 0.11.2",
- "postage",
- "rand 0.8.5",
- "rpc",
- "schemars",
- "serde",
- "serde_derive",
- "settings",
- "smol",
- "sum_tree",
- "sysinfo",
- "tempfile",
- "text",
- "thiserror",
- "time",
- "tiny_http",
- "url",
- "util",
- "uuid 1.4.1",
-]
-
-[[package]]
-name = "client2"
-version = "0.1.0"
 dependencies = [
  "anyhow",
  "async-recursion 0.3.2",
@@ -1641,10 +1541,10 @@ dependencies = [
  "axum",
  "axum-extra",
  "base64 0.13.1",
- "call2",
- "channel2",
+ "call",
+ "channel",
  "clap 3.2.25",
- "client2",
+ "client",
  "clock",
  "collab_ui",
  "collections",
@@ -1709,9 +1609,9 @@ version = "0.1.0"
 dependencies = [
  "anyhow",
  "auto_update",
- "call2",
- "channel2",
- "client2",
+ "call",
+ "channel",
+ "client",
  "clock",
  "collections",
  "db2",
@@ -2433,7 +2333,7 @@ name = "diagnostics"
 version = "0.1.0"
 dependencies = [
  "anyhow",
- "client2",
+ "client",
  "collections",
  "editor",
  "futures 0.3.28",
@@ -2594,7 +2494,7 @@ version = "0.1.0"
 dependencies = [
  "aho-corasick",
  "anyhow",
- "client2",
+ "client",
  "clock",
  "collections",
  "convert_case 0.6.0",
@@ -2823,7 +2723,7 @@ version = "0.1.0"
 dependencies = [
  "anyhow",
  "bitflags 2.4.1",
- "client2",
+ "client",
  "db2",
  "editor",
  "futures 0.3.28",
@@ -4187,7 +4087,7 @@ dependencies = [
  "anyhow",
  "async-broadcast",
  "async-trait",
- "client2",
+ "client",
  "clock",
  "collections",
  "ctor",
@@ -4257,7 +4157,7 @@ name = "language_tools"
 version = "0.1.0"
 dependencies = [
  "anyhow",
- "client2",
+ "client",
  "collections",
  "editor",
  "env_logger",
@@ -4856,7 +4756,7 @@ version = "0.1.0"
 dependencies = [
  "aho-corasick",
  "anyhow",
- "client2",
+ "client",
  "clock",
  "collections",
  "convert_case 0.6.0",
@@ -5071,8 +4971,8 @@ name = "notifications2"
 version = "0.1.0"
 dependencies = [
  "anyhow",
- "channel2",
- "client2",
+ "channel",
+ "client",
  "clock",
  "collections",
  "db2",
@@ -5903,7 +5803,7 @@ name = "prettier2"
 version = "0.1.0"
 dependencies = [
  "anyhow",
- "client2",
+ "client",
  "collections",
  "fs2",
  "futures 0.3.28",
@@ -6010,7 +5910,7 @@ dependencies = [
  "anyhow",
  "async-trait",
  "backtrace",
- "client2",
+ "client",
  "clock",
  "collections",
  "copilot",
@@ -6062,7 +5962,7 @@ name = "project_panel"
 version = "0.1.0"
 dependencies = [
  "anyhow",
- "client2",
+ "client",
  "collections",
  "db2",
  "editor",
@@ -7265,7 +7165,7 @@ version = "0.1.0"
 dependencies = [
  "anyhow",
  "bitflags 1.3.2",
- "client2",
+ "client",
  "collections",
  "editor",
  "futures 0.3.28",
@@ -7373,7 +7273,7 @@ dependencies = [
  "ai",
  "anyhow",
  "async-trait",
- "client2",
+ "client",
  "collections",
  "ctor",
  "env_logger",
@@ -8553,7 +8453,7 @@ name = "terminal_view"
 version = "0.1.0"
 dependencies = [
  "anyhow",
- "client2",
+ "client",
  "db2",
  "dirs 4.0.0",
  "editor",
@@ -8701,7 +8601,7 @@ dependencies = [
 name = "theme_selector"
 version = "0.1.0"
 dependencies = [
- "client2",
+ "client",
  "editor",
  "feature_flags",
  "fs2",
@@ -10215,7 +10115,7 @@ name = "welcome"
 version = "0.1.0"
 dependencies = [
  "anyhow",
- "client2",
+ "client",
  "db2",
  "editor",
  "fs2",
@@ -10482,8 +10382,8 @@ dependencies = [
  "anyhow",
  "async-recursion 1.0.5",
  "bincode",
- "call2",
- "client2",
+ "call",
+ "client",
  "collections",
  "db2",
  "env_logger",
@@ -10607,11 +10507,11 @@ dependencies = [
  "auto_update",
  "backtrace",
  "breadcrumbs",
- "call2",
- "channel2",
+ "call",
+ "channel",
  "chrono",
  "cli",
- "client2",
+ "client",
  "collab_ui",
  "collections",
  "command_palette",

Cargo.toml 🔗

@@ -8,12 +8,9 @@ members = [
     "crates/auto_update",
     "crates/breadcrumbs",
     "crates/call",
-    "crates/call2",
     "crates/channel",
-    "crates/channel2",
     "crates/cli",
     "crates/client",
-    "crates/client2",
     "crates/clock",
     "crates/collab",
     "crates/collab2",

crates/assistant/Cargo.toml 🔗

@@ -10,7 +10,7 @@ doctest = false
 
 [dependencies]
 ai = { path = "../ai" }
-client = { package = "client2", path = "../client2" }
+client = { path = "../client" }
 collections = { path = "../collections"}
 editor = { path = "../editor" }
 fs = { package = "fs2", path = "../fs2" }

crates/auto_update/Cargo.toml 🔗

@@ -10,7 +10,7 @@ doctest = false
 
 [dependencies]
 db = { package = "db2", path = "../db2" }
-client = { package = "client2", path = "../client2" }
+client = { path = "../client" }
 gpui = { package = "gpui2", path = "../gpui2" }
 menu = { package = "menu2", path = "../menu2" }
 project = { path = "../project" }

crates/call/Cargo.toml 🔗

@@ -19,34 +19,36 @@ test-support = [
 ]
 
 [dependencies]
-audio = { path = "../audio" }
+audio = { package = "audio2", path = "../audio2" }
 client = { path = "../client" }
 collections = { path = "../collections" }
-gpui = { path = "../gpui" }
+gpui = { package = "gpui2", path = "../gpui2" }
 log.workspace = true
-live_kit_client = { path = "../live_kit_client" }
-fs = { path = "../fs" }
+live_kit_client = { package = "live_kit_client2", path = "../live_kit_client2" }
+fs = { package = "fs2", path = "../fs2" }
 language = { path = "../language" }
 media = { path = "../media" }
 project = { path = "../project" }
-settings = { path = "../settings" }
+settings = { package = "settings2", path = "../settings2" }
 util = { path = "../util" }
 
 anyhow.workspace = true
 async-broadcast = "0.4"
 futures.workspace = true
+image = "0.23"
 postage.workspace = true
 schemars.workspace = true
 serde.workspace = true
 serde_json.workspace = true
 serde_derive.workspace = true
+smallvec.workspace = true
 
 [dev-dependencies]
 client = { path = "../client", features = ["test-support"] }
-fs = { path = "../fs", features = ["test-support"] }
+fs = { package = "fs2", path = "../fs2", features = ["test-support"] }
 language = { path = "../language", features = ["test-support"] }
 collections = { path = "../collections", features = ["test-support"] }
-gpui = { path = "../gpui", features = ["test-support"] }
-live_kit_client = { path = "../live_kit_client", features = ["test-support"] }
+gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
+live_kit_client = { package = "live_kit_client2", path = "../live_kit_client2", features = ["test-support"] }
 project = { path = "../project", features = ["test-support"] }
 util = { path = "../util", features = ["test-support"] }

crates/call/src/call.rs 🔗

@@ -9,31 +9,25 @@ use client::{proto, Client, TelemetrySettings, TypedEnvelope, User, UserStore, Z
 use collections::HashSet;
 use futures::{channel::oneshot, future::Shared, Future, FutureExt};
 use gpui::{
-    AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Subscription, Task,
-    WeakModelHandle,
+    AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, Subscription, Task,
+    WeakModel,
 };
 use postage::watch;
 use project::Project;
+use room::Event;
+use settings::Settings;
 use std::sync::Arc;
 
 pub use participant::ParticipantLocation;
 pub use room::Room;
 
-pub fn init(client: Arc<Client>, user_store: ModelHandle<UserStore>, cx: &mut AppContext) {
-    settings::register::<CallSettings>(cx);
+pub fn init(client: Arc<Client>, user_store: Model<UserStore>, cx: &mut AppContext) {
+    CallSettings::register(cx);
 
-    let active_call = cx.add_model(|cx| ActiveCall::new(client, user_store, cx));
+    let active_call = cx.new_model(|cx| ActiveCall::new(client, user_store, cx));
     cx.set_global(active_call);
 }
 
-#[derive(Clone)]
-pub struct IncomingCall {
-    pub room_id: u64,
-    pub calling_user: Arc<User>,
-    pub participants: Vec<Arc<User>>,
-    pub initial_project: Option<proto::ParticipantProject>,
-}
-
 pub struct OneAtATime {
     cancel: Option<oneshot::Sender<()>>,
 }
@@ -65,43 +59,44 @@ impl OneAtATime {
     }
 }
 
+#[derive(Clone)]
+pub struct IncomingCall {
+    pub room_id: u64,
+    pub calling_user: Arc<User>,
+    pub participants: Vec<Arc<User>>,
+    pub initial_project: Option<proto::ParticipantProject>,
+}
+
 /// Singleton global maintaining the user's participation in a room across workspaces.
 pub struct ActiveCall {
-    room: Option<(ModelHandle<Room>, Vec<Subscription>)>,
-    pending_room_creation: Option<Shared<Task<Result<ModelHandle<Room>, Arc<anyhow::Error>>>>>,
+    room: Option<(Model<Room>, Vec<Subscription>)>,
+    pending_room_creation: Option<Shared<Task<Result<Model<Room>, Arc<anyhow::Error>>>>>,
+    location: Option<WeakModel<Project>>,
     _join_debouncer: OneAtATime,
-    location: Option<WeakModelHandle<Project>>,
     pending_invites: HashSet<u64>,
     incoming_call: (
         watch::Sender<Option<IncomingCall>>,
         watch::Receiver<Option<IncomingCall>>,
     ),
     client: Arc<Client>,
-    user_store: ModelHandle<UserStore>,
+    user_store: Model<UserStore>,
     _subscriptions: Vec<client::Subscription>,
 }
 
-impl Entity for ActiveCall {
-    type Event = room::Event;
-}
+impl EventEmitter<Event> for ActiveCall {}
 
 impl ActiveCall {
-    fn new(
-        client: Arc<Client>,
-        user_store: ModelHandle<UserStore>,
-        cx: &mut ModelContext<Self>,
-    ) -> Self {
+    fn new(client: Arc<Client>, user_store: Model<UserStore>, cx: &mut ModelContext<Self>) -> Self {
         Self {
             room: None,
             pending_room_creation: None,
             location: None,
             pending_invites: Default::default(),
             incoming_call: watch::channel(),
-
             _join_debouncer: OneAtATime { cancel: None },
             _subscriptions: vec![
-                client.add_request_handler(cx.handle(), Self::handle_incoming_call),
-                client.add_message_handler(cx.handle(), Self::handle_call_canceled),
+                client.add_request_handler(cx.weak_model(), Self::handle_incoming_call),
+                client.add_message_handler(cx.weak_model(), Self::handle_call_canceled),
             ],
             client,
             user_store,
@@ -113,35 +108,35 @@ impl ActiveCall {
     }
 
     async fn handle_incoming_call(
-        this: ModelHandle<Self>,
+        this: Model<Self>,
         envelope: TypedEnvelope<proto::IncomingCall>,
         _: Arc<Client>,
         mut cx: AsyncAppContext,
     ) -> Result<proto::Ack> {
-        let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
+        let user_store = this.update(&mut cx, |this, _| this.user_store.clone())?;
         let call = IncomingCall {
             room_id: envelope.payload.room_id,
             participants: user_store
                 .update(&mut cx, |user_store, cx| {
                     user_store.get_users(envelope.payload.participant_user_ids, cx)
-                })
+                })?
                 .await?,
             calling_user: user_store
                 .update(&mut cx, |user_store, cx| {
                     user_store.get_user(envelope.payload.calling_user_id, cx)
-                })
+                })?
                 .await?,
             initial_project: envelope.payload.initial_project,
         };
         this.update(&mut cx, |this, _| {
             *this.incoming_call.0.borrow_mut() = Some(call);
-        });
+        })?;
 
         Ok(proto::Ack {})
     }
 
     async fn handle_call_canceled(
-        this: ModelHandle<Self>,
+        this: Model<Self>,
         envelope: TypedEnvelope<proto::CallCanceled>,
         _: Arc<Client>,
         mut cx: AsyncAppContext,
@@ -154,18 +149,18 @@ impl ActiveCall {
             {
                 incoming_call.take();
             }
-        });
+        })?;
         Ok(())
     }
 
-    pub fn global(cx: &AppContext) -> ModelHandle<Self> {
-        cx.global::<ModelHandle<Self>>().clone()
+    pub fn global(cx: &AppContext) -> Model<Self> {
+        cx.global::<Model<Self>>().clone()
     }
 
     pub fn invite(
         &mut self,
         called_user_id: u64,
-        initial_project: Option<ModelHandle<Project>>,
+        initial_project: Option<Model<Project>>,
         cx: &mut ModelContext<Self>,
     ) -> Task<Result<()>> {
         if !self.pending_invites.insert(called_user_id) {
@@ -184,21 +179,21 @@ impl ActiveCall {
         };
 
         let invite = if let Some(room) = room {
-            cx.spawn_weak(|_, mut cx| async move {
+            cx.spawn(move |_, mut cx| async move {
                 let room = room.await.map_err(|err| anyhow!("{:?}", err))?;
 
                 let initial_project_id = if let Some(initial_project) = initial_project {
                     Some(
-                        room.update(&mut cx, |room, cx| room.share_project(initial_project, cx))
+                        room.update(&mut cx, |room, cx| room.share_project(initial_project, cx))?
                             .await?,
                     )
                 } else {
                     None
                 };
 
-                room.update(&mut cx, |room, cx| {
+                room.update(&mut cx, move |room, cx| {
                     room.call(called_user_id, initial_project_id, cx)
-                })
+                })?
                 .await?;
 
                 anyhow::Ok(())
@@ -207,7 +202,7 @@ impl ActiveCall {
             let client = self.client.clone();
             let user_store = self.user_store.clone();
             let room = cx
-                .spawn(|this, mut cx| async move {
+                .spawn(move |this, mut cx| async move {
                     let create_room = async {
                         let room = cx
                             .update(|cx| {
@@ -218,31 +213,31 @@ impl ActiveCall {
                                     user_store,
                                     cx,
                                 )
-                            })
+                            })?
                             .await?;
 
-                        this.update(&mut cx, |this, cx| this.set_room(Some(room.clone()), cx))
+                        this.update(&mut cx, |this, cx| this.set_room(Some(room.clone()), cx))?
                             .await?;
 
                         anyhow::Ok(room)
                     };
 
                     let room = create_room.await;
-                    this.update(&mut cx, |this, _| this.pending_room_creation = None);
+                    this.update(&mut cx, |this, _| this.pending_room_creation = None)?;
                     room.map_err(Arc::new)
                 })
                 .shared();
             self.pending_room_creation = Some(room.clone());
-            cx.foreground().spawn(async move {
+            cx.background_executor().spawn(async move {
                 room.await.map_err(|err| anyhow!("{:?}", err))?;
                 anyhow::Ok(())
             })
         };
 
-        cx.spawn(|this, mut cx| async move {
+        cx.spawn(move |this, mut cx| async move {
             let result = invite.await;
             if result.is_ok() {
-                this.update(&mut cx, |this, cx| this.report_call_event("invite", cx));
+                this.update(&mut cx, |this, cx| this.report_call_event("invite", cx))?;
             } else {
                 // TODO: Resport collaboration error
             }
@@ -250,7 +245,7 @@ impl ActiveCall {
             this.update(&mut cx, |this, cx| {
                 this.pending_invites.remove(&called_user_id);
                 cx.notify();
-            });
+            })?;
             result
         })
     }
@@ -267,7 +262,7 @@ impl ActiveCall {
         };
 
         let client = self.client.clone();
-        cx.foreground().spawn(async move {
+        cx.background_executor().spawn(async move {
             client
                 .request(proto::CancelCall {
                     room_id,
@@ -306,11 +301,11 @@ impl ActiveCall {
 
         cx.spawn(|this, mut cx| async move {
             let room = join.await?;
-            this.update(&mut cx, |this, cx| this.set_room(room.clone(), cx))
+            this.update(&mut cx, |this, cx| this.set_room(room.clone(), cx))?
                 .await?;
             this.update(&mut cx, |this, cx| {
                 this.report_call_event("accept incoming", cx)
-            });
+            })?;
             Ok(())
         })
     }
@@ -333,7 +328,7 @@ impl ActiveCall {
         &mut self,
         channel_id: u64,
         cx: &mut ModelContext<Self>,
-    ) -> Task<Result<Option<ModelHandle<Room>>>> {
+    ) -> Task<Result<Option<Model<Room>>>> {
         if let Some(room) = self.room().cloned() {
             if room.read(cx).channel_id() == Some(channel_id) {
                 return Task::ready(Ok(Some(room)));
@@ -352,13 +347,13 @@ impl ActiveCall {
             Room::join_channel(channel_id, client, user_store, cx).await
         });
 
-        cx.spawn(move |this, mut cx| async move {
+        cx.spawn(|this, mut cx| async move {
             let room = join.await?;
-            this.update(&mut cx, |this, cx| this.set_room(room.clone(), cx))
+            this.update(&mut cx, |this, cx| this.set_room(room.clone(), cx))?
                 .await?;
             this.update(&mut cx, |this, cx| {
                 this.report_call_event("join channel", cx)
-            });
+            })?;
             Ok(room)
         })
     }
@@ -366,6 +361,7 @@ impl ActiveCall {
     pub fn hang_up(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
         cx.notify();
         self.report_call_event("hang up", cx);
+
         Audio::end_call(cx);
         if let Some((room, _)) = self.room.take() {
             room.update(cx, |room, cx| room.leave(cx))
@@ -376,7 +372,7 @@ impl ActiveCall {
 
     pub fn share_project(
         &mut self,
-        project: ModelHandle<Project>,
+        project: Model<Project>,
         cx: &mut ModelContext<Self>,
     ) -> Task<Result<u64>> {
         if let Some((room, _)) = self.room.as_ref() {
@@ -389,7 +385,7 @@ impl ActiveCall {
 
     pub fn unshare_project(
         &mut self,
-        project: ModelHandle<Project>,
+        project: Model<Project>,
         cx: &mut ModelContext<Self>,
     ) -> Result<()> {
         if let Some((room, _)) = self.room.as_ref() {
@@ -400,13 +396,13 @@ impl ActiveCall {
         }
     }
 
-    pub fn location(&self) -> Option<&WeakModelHandle<Project>> {
+    pub fn location(&self) -> Option<&WeakModel<Project>> {
         self.location.as_ref()
     }
 
     pub fn set_location(
         &mut self,
-        project: Option<&ModelHandle<Project>>,
+        project: Option<&Model<Project>>,
         cx: &mut ModelContext<Self>,
     ) -> Task<Result<()>> {
         if project.is_some() || !*ZED_ALWAYS_ACTIVE {
@@ -420,7 +416,7 @@ impl ActiveCall {
 
     fn set_room(
         &mut self,
-        room: Option<ModelHandle<Room>>,
+        room: Option<Model<Room>>,
         cx: &mut ModelContext<Self>,
     ) -> Task<Result<()>> {
         if room.as_ref() != self.room.as_ref().map(|room| &room.0) {
@@ -441,7 +437,10 @@ impl ActiveCall {
                         cx.subscribe(&room, |_, _, event, cx| cx.emit(event.clone())),
                     ];
                     self.room = Some((room.clone(), subscriptions));
-                    let location = self.location.and_then(|location| location.upgrade(cx));
+                    let location = self
+                        .location
+                        .as_ref()
+                        .and_then(|location| location.upgrade());
                     room.update(cx, |room, cx| room.set_location(location.as_ref(), cx))
                 }
             } else {
@@ -453,7 +452,7 @@ impl ActiveCall {
         }
     }
 
-    pub fn room(&self) -> Option<&ModelHandle<Room>> {
+    pub fn room(&self) -> Option<&Model<Room>> {
         self.room.as_ref().map(|(room, _)| room)
     }
 
@@ -465,7 +464,7 @@ impl ActiveCall {
         &self.pending_invites
     }
 
-    pub fn report_call_event(&self, operation: &'static str, cx: &AppContext) {
+    pub fn report_call_event(&self, operation: &'static str, cx: &mut AppContext) {
         if let Some(room) = self.room() {
             let room = room.read(cx);
             report_call_event_for_room(operation, room.id(), room.channel_id(), &self.client, cx);
@@ -478,10 +477,10 @@ pub fn report_call_event_for_room(
     room_id: u64,
     channel_id: Option<u64>,
     client: &Arc<Client>,
-    cx: &AppContext,
+    cx: &mut AppContext,
 ) {
     let telemetry = client.telemetry();
-    let telemetry_settings = *settings::get::<TelemetrySettings>(cx);
+    let telemetry_settings = *TelemetrySettings::get_global(cx);
 
     telemetry.report_call_event(telemetry_settings, operation, Some(room_id), channel_id)
 }
@@ -495,7 +494,8 @@ pub fn report_call_event_for_channel(
     let room = ActiveCall::global(cx).read(cx).room();
 
     let telemetry = client.telemetry();
-    let telemetry_settings = *settings::get::<TelemetrySettings>(cx);
+
+    let telemetry_settings = *TelemetrySettings::get_global(cx);
 
     telemetry.report_call_event(
         telemetry_settings,

crates/call/src/call_settings.rs 🔗

@@ -1,6 +1,8 @@
+use anyhow::Result;
+use gpui::AppContext;
 use schemars::JsonSchema;
 use serde_derive::{Deserialize, Serialize};
-use settings::Setting;
+use settings::Settings;
 
 #[derive(Deserialize, Debug)]
 pub struct CallSettings {
@@ -12,7 +14,7 @@ pub struct CallSettingsContent {
     pub mute_on_join: Option<bool>,
 }
 
-impl Setting for CallSettings {
+impl Settings for CallSettings {
     const KEY: Option<&'static str> = Some("calls");
 
     type FileContent = CallSettingsContent;
@@ -20,8 +22,11 @@ impl Setting for CallSettings {
     fn load(
         default_value: &Self::FileContent,
         user_values: &[&Self::FileContent],
-        _: &gpui::AppContext,
-    ) -> anyhow::Result<Self> {
+        _cx: &mut AppContext,
+    ) -> Result<Self>
+    where
+        Self: Sized,
+    {
         Self::load_via_json_merge(default_value, user_values)
     }
 }

crates/call/src/participant.rs 🔗

@@ -2,11 +2,11 @@ use anyhow::{anyhow, Result};
 use client::ParticipantIndex;
 use client::{proto, User};
 use collections::HashMap;
-use gpui::WeakModelHandle;
+use gpui::WeakModel;
 pub use live_kit_client::Frame;
-use live_kit_client::RemoteAudioTrack;
+pub use live_kit_client::{RemoteAudioTrack, RemoteVideoTrack};
 use project::Project;
-use std::{fmt, sync::Arc};
+use std::sync::Arc;
 
 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
 pub enum ParticipantLocation {
@@ -35,7 +35,7 @@ impl ParticipantLocation {
 #[derive(Clone, Default)]
 pub struct LocalParticipant {
     pub projects: Vec<proto::ParticipantProject>,
-    pub active_project: Option<WeakModelHandle<Project>>,
+    pub active_project: Option<WeakModel<Project>>,
 }
 
 #[derive(Clone, Debug)]
@@ -50,20 +50,3 @@ pub struct RemoteParticipant {
     pub video_tracks: HashMap<live_kit_client::Sid, Arc<RemoteVideoTrack>>,
     pub audio_tracks: HashMap<live_kit_client::Sid, Arc<RemoteAudioTrack>>,
 }
-
-#[derive(Clone)]
-pub struct RemoteVideoTrack {
-    pub(crate) live_kit_track: Arc<live_kit_client::RemoteVideoTrack>,
-}
-
-impl fmt::Debug for RemoteVideoTrack {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.debug_struct("RemoteVideoTrack").finish()
-    }
-}
-
-impl RemoteVideoTrack {
-    pub fn frames(&self) -> async_broadcast::Receiver<Frame> {
-        self.live_kit_track.frames()
-    }
-}

crates/call/src/room.rs 🔗

@@ -1,6 +1,6 @@
 use crate::{
     call_settings::CallSettings,
-    participant::{LocalParticipant, ParticipantLocation, RemoteParticipant, RemoteVideoTrack},
+    participant::{LocalParticipant, ParticipantLocation, RemoteParticipant},
 };
 use anyhow::{anyhow, Result};
 use audio::{Audio, Sound};
@@ -11,7 +11,9 @@ use client::{
 use collections::{BTreeMap, HashMap, HashSet};
 use fs::Fs;
 use futures::{FutureExt, StreamExt};
-use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task, WeakModelHandle};
+use gpui::{
+    AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, Task, WeakModel,
+};
 use language::LanguageRegistry;
 use live_kit_client::{
     LocalAudioTrack, LocalTrackPublication, LocalVideoTrack, RemoteAudioTrackUpdate,
@@ -19,7 +21,8 @@ use live_kit_client::{
 };
 use postage::{sink::Sink, stream::Stream, watch};
 use project::Project;
-use std::{future::Future, mem, pin::Pin, sync::Arc, time::Duration};
+use settings::Settings as _;
+use std::{future::Future, mem, sync::Arc, time::Duration};
 use util::{post_inc, ResultExt, TryFutureExt};
 
 pub const RECONNECT_TIMEOUT: Duration = Duration::from_secs(30);
@@ -54,11 +57,11 @@ pub enum Event {
 
 pub struct Room {
     id: u64,
-    pub channel_id: Option<u64>,
+    channel_id: Option<u64>,
     live_kit: Option<LiveKitRoom>,
     status: RoomStatus,
-    shared_projects: HashSet<WeakModelHandle<Project>>,
-    joined_projects: HashSet<WeakModelHandle<Project>>,
+    shared_projects: HashSet<WeakModel<Project>>,
+    joined_projects: HashSet<WeakModel<Project>>,
     local_participant: LocalParticipant,
     remote_participants: BTreeMap<u64, RemoteParticipant>,
     pending_participants: Vec<Arc<User>>,
@@ -66,39 +69,17 @@ pub struct Room {
     pending_call_count: usize,
     leave_when_empty: bool,
     client: Arc<Client>,
-    user_store: ModelHandle<UserStore>,
+    user_store: Model<UserStore>,
     follows_by_leader_id_project_id: HashMap<(PeerId, u64), Vec<PeerId>>,
-    subscriptions: Vec<client::Subscription>,
+    client_subscriptions: Vec<client::Subscription>,
+    _subscriptions: Vec<gpui::Subscription>,
     room_update_completed_tx: watch::Sender<Option<()>>,
     room_update_completed_rx: watch::Receiver<Option<()>>,
     pending_room_update: Option<Task<()>>,
     maintain_connection: Option<Task<Option<()>>>,
 }
 
-impl Entity for Room {
-    type Event = Event;
-
-    fn release(&mut self, cx: &mut AppContext) {
-        if self.status.is_online() {
-            self.leave_internal(cx).detach_and_log_err(cx);
-        }
-    }
-
-    fn app_will_quit(&mut self, cx: &mut AppContext) -> Option<Pin<Box<dyn Future<Output = ()>>>> {
-        if self.status.is_online() {
-            let leave = self.leave_internal(cx);
-            Some(
-                cx.background()
-                    .spawn(async move {
-                        leave.await.log_err();
-                    })
-                    .boxed(),
-            )
-        } else {
-            None
-        }
-    }
-}
+impl EventEmitter<Event> for Room {}
 
 impl Room {
     pub fn channel_id(&self) -> Option<u64> {
@@ -121,16 +102,12 @@ impl Room {
         }
     }
 
-    pub fn can_publish(&self) -> bool {
-        self.live_kit.as_ref().is_some_and(|room| room.can_publish)
-    }
-
     fn new(
         id: u64,
         channel_id: Option<u64>,
         live_kit_connection_info: Option<proto::LiveKitConnectionInfo>,
         client: Arc<Client>,
-        user_store: ModelHandle<UserStore>,
+        user_store: Model<UserStore>,
         cx: &mut ModelContext<Self>,
     ) -> Self {
         let live_kit_room = if let Some(connection_info) = live_kit_connection_info {
@@ -138,69 +115,75 @@ impl Room {
             let mut status = room.status();
             // Consume the initial status of the room.
             let _ = status.try_recv();
-            let _maintain_room = cx.spawn_weak(|this, mut cx| async move {
+            let _maintain_room = cx.spawn(|this, mut cx| async move {
                 while let Some(status) = status.next().await {
-                    let this = if let Some(this) = this.upgrade(&cx) {
+                    let this = if let Some(this) = this.upgrade() {
                         this
                     } else {
                         break;
                     };
 
                     if status == live_kit_client::ConnectionState::Disconnected {
-                        this.update(&mut cx, |this, cx| this.leave(cx).log_err());
+                        this.update(&mut cx, |this, cx| this.leave(cx).log_err())
+                            .ok();
                         break;
                     }
                 }
             });
 
-            let mut track_video_changes = room.remote_video_track_updates();
-            let _maintain_video_tracks = cx.spawn_weak(|this, mut cx| async move {
-                while let Some(track_change) = track_video_changes.next().await {
-                    let this = if let Some(this) = this.upgrade(&cx) {
-                        this
-                    } else {
-                        break;
-                    };
+            let _maintain_video_tracks = cx.spawn({
+                let room = room.clone();
+                move |this, mut cx| async move {
+                    let mut track_video_changes = room.remote_video_track_updates();
+                    while let Some(track_change) = track_video_changes.next().await {
+                        let this = if let Some(this) = this.upgrade() {
+                            this
+                        } else {
+                            break;
+                        };
 
-                    this.update(&mut cx, |this, cx| {
-                        this.remote_video_track_updated(track_change, cx).log_err()
-                    });
+                        this.update(&mut cx, |this, cx| {
+                            this.remote_video_track_updated(track_change, cx).log_err()
+                        })
+                        .ok();
+                    }
                 }
             });
 
-            let mut track_audio_changes = room.remote_audio_track_updates();
-            let _maintain_audio_tracks = cx.spawn_weak(|this, mut cx| async move {
-                while let Some(track_change) = track_audio_changes.next().await {
-                    let this = if let Some(this) = this.upgrade(&cx) {
-                        this
-                    } else {
-                        break;
-                    };
+            let _maintain_audio_tracks = cx.spawn({
+                let room = room.clone();
+                |this, mut cx| async move {
+                    let mut track_audio_changes = room.remote_audio_track_updates();
+                    while let Some(track_change) = track_audio_changes.next().await {
+                        let this = if let Some(this) = this.upgrade() {
+                            this
+                        } else {
+                            break;
+                        };
 
-                    this.update(&mut cx, |this, cx| {
-                        this.remote_audio_track_updated(track_change, cx).log_err()
-                    });
+                        this.update(&mut cx, |this, cx| {
+                            this.remote_audio_track_updated(track_change, cx).log_err()
+                        })
+                        .ok();
+                    }
                 }
             });
 
             let connect = room.connect(&connection_info.server_url, &connection_info.token);
-            if connection_info.can_publish {
-                cx.spawn(|this, mut cx| async move {
-                    connect.await?;
+            cx.spawn(|this, mut cx| async move {
+                connect.await?;
 
-                    if !cx.read(Self::mute_on_join) {
-                        this.update(&mut cx, |this, cx| this.share_microphone(cx))
-                            .await?;
-                    }
+                if !cx.update(|cx| Self::mute_on_join(cx))? {
+                    this.update(&mut cx, |this, cx| this.share_microphone(cx))?
+                        .await?;
+                }
 
-                    anyhow::Ok(())
-                })
-                .detach_and_log_err(cx);
-            }
+                anyhow::Ok(())
+            })
+            .detach_and_log_err(cx);
 
             Some(LiveKitRoom {
                 room,
-                can_publish: connection_info.can_publish,
                 screen_track: LocalTrack::None,
                 microphone_track: LocalTrack::None,
                 next_publish_id: 0,
@@ -214,8 +197,10 @@ impl Room {
             None
         };
 
-        let maintain_connection =
-            cx.spawn_weak(|this, cx| Self::maintain_connection(this, client.clone(), cx).log_err());
+        let maintain_connection = cx.spawn({
+            let client = client.clone();
+            move |this, cx| Self::maintain_connection(this, client.clone(), cx).log_err()
+        });
 
         Audio::play_sound(Sound::Joined, cx);
 
@@ -233,7 +218,13 @@ impl Room {
             remote_participants: Default::default(),
             pending_participants: Default::default(),
             pending_call_count: 0,
-            subscriptions: vec![client.add_message_handler(cx.handle(), Self::handle_room_updated)],
+            client_subscriptions: vec![
+                client.add_message_handler(cx.weak_model(), Self::handle_room_updated)
+            ],
+            _subscriptions: vec![
+                cx.on_release(Self::released),
+                cx.on_app_quit(Self::app_will_quit),
+            ],
             leave_when_empty: false,
             pending_room_update: None,
             client,
@@ -247,15 +238,15 @@ impl Room {
 
     pub(crate) fn create(
         called_user_id: u64,
-        initial_project: Option<ModelHandle<Project>>,
+        initial_project: Option<Model<Project>>,
         client: Arc<Client>,
-        user_store: ModelHandle<UserStore>,
+        user_store: Model<UserStore>,
         cx: &mut AppContext,
-    ) -> Task<Result<ModelHandle<Self>>> {
-        cx.spawn(|mut cx| async move {
+    ) -> Task<Result<Model<Self>>> {
+        cx.spawn(move |mut cx| async move {
             let response = client.request(proto::CreateRoom {}).await?;
             let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
-            let room = cx.add_model(|cx| {
+            let room = cx.new_model(|cx| {
                 Self::new(
                     room_proto.id,
                     None,
@@ -264,13 +255,13 @@ impl Room {
                     user_store,
                     cx,
                 )
-            });
+            })?;
 
             let initial_project_id = if let Some(initial_project) = initial_project {
                 let initial_project_id = room
                     .update(&mut cx, |room, cx| {
                         room.share_project(initial_project.clone(), cx)
-                    })
+                    })?
                     .await?;
                 Some(initial_project_id)
             } else {
@@ -281,7 +272,7 @@ impl Room {
                 .update(&mut cx, |room, cx| {
                     room.leave_when_empty = true;
                     room.call(called_user_id, initial_project_id, cx)
-                })
+                })?
                 .await
             {
                 Ok(()) => Ok(room),
@@ -293,9 +284,9 @@ impl Room {
     pub(crate) async fn join_channel(
         channel_id: u64,
         client: Arc<Client>,
-        user_store: ModelHandle<UserStore>,
+        user_store: Model<UserStore>,
         cx: AsyncAppContext,
-    ) -> Result<ModelHandle<Self>> {
+    ) -> Result<Model<Self>> {
         Self::from_join_response(
             client.request(proto::JoinChannel { channel_id }).await?,
             client,
@@ -307,9 +298,9 @@ impl Room {
     pub(crate) async fn join(
         room_id: u64,
         client: Arc<Client>,
-        user_store: ModelHandle<UserStore>,
+        user_store: Model<UserStore>,
         cx: AsyncAppContext,
-    ) -> Result<ModelHandle<Self>> {
+    ) -> Result<Model<Self>> {
         Self::from_join_response(
             client.request(proto::JoinRoom { id: room_id }).await?,
             client,
@@ -318,18 +309,41 @@ impl Room {
         )
     }
 
+    fn released(&mut self, cx: &mut AppContext) {
+        if self.status.is_online() {
+            self.leave_internal(cx).detach_and_log_err(cx);
+        }
+    }
+
+    fn app_will_quit(&mut self, cx: &mut ModelContext<Self>) -> impl Future<Output = ()> {
+        let task = if self.status.is_online() {
+            let leave = self.leave_internal(cx);
+            Some(cx.background_executor().spawn(async move {
+                leave.await.log_err();
+            }))
+        } else {
+            None
+        };
+
+        async move {
+            if let Some(task) = task {
+                task.await;
+            }
+        }
+    }
+
     pub fn mute_on_join(cx: &AppContext) -> bool {
-        settings::get::<CallSettings>(cx).mute_on_join || client::IMPERSONATE_LOGIN.is_some()
+        CallSettings::get_global(cx).mute_on_join || client::IMPERSONATE_LOGIN.is_some()
     }
 
     fn from_join_response(
         response: proto::JoinRoomResponse,
         client: Arc<Client>,
-        user_store: ModelHandle<UserStore>,
+        user_store: Model<UserStore>,
         mut cx: AsyncAppContext,
-    ) -> Result<ModelHandle<Self>> {
+    ) -> Result<Model<Self>> {
         let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
-        let room = cx.add_model(|cx| {
+        let room = cx.new_model(|cx| {
             Self::new(
                 room_proto.id,
                 response.channel_id,
@@ -338,12 +352,12 @@ impl Room {
                 user_store,
                 cx,
             )
-        });
+        })?;
         room.update(&mut cx, |room, cx| {
             room.leave_when_empty = room.channel_id.is_none();
             room.apply_room_update(room_proto, cx)?;
             anyhow::Ok(())
-        })?;
+        })??;
         Ok(room)
     }
 
@@ -372,7 +386,7 @@ impl Room {
         self.clear_state(cx);
 
         let leave_room = self.client.request(proto::LeaveRoom {});
-        cx.background().spawn(async move {
+        cx.background_executor().spawn(async move {
             leave_room.await?;
             anyhow::Ok(())
         })
@@ -380,14 +394,14 @@ impl Room {
 
     pub(crate) fn clear_state(&mut self, cx: &mut AppContext) {
         for project in self.shared_projects.drain() {
-            if let Some(project) = project.upgrade(cx) {
+            if let Some(project) = project.upgrade() {
                 project.update(cx, |project, cx| {
                     project.unshare(cx).log_err();
                 });
             }
         }
         for project in self.joined_projects.drain() {
-            if let Some(project) = project.upgrade(cx) {
+            if let Some(project) = project.upgrade() {
                 project.update(cx, |project, cx| {
                     project.disconnected_from_host(cx);
                     project.close(cx);
@@ -399,14 +413,14 @@ impl Room {
         self.remote_participants.clear();
         self.pending_participants.clear();
         self.participant_user_ids.clear();
-        self.subscriptions.clear();
+        self.client_subscriptions.clear();
         self.live_kit.take();
         self.pending_room_update.take();
         self.maintain_connection.take();
     }
 
     async fn maintain_connection(
-        this: WeakModelHandle<Self>,
+        this: WeakModel<Self>,
         client: Arc<Client>,
         mut cx: AsyncAppContext,
     ) -> Result<()> {
@@ -418,32 +432,33 @@ impl Room {
             if !is_connected || client_status.next().await.is_some() {
                 log::info!("detected client disconnection");
 
-                this.upgrade(&cx)
+                this.upgrade()
                     .ok_or_else(|| anyhow!("room was dropped"))?
                     .update(&mut cx, |this, cx| {
                         this.status = RoomStatus::Rejoining;
                         cx.notify();
-                    });
+                    })?;
 
                 // Wait for client to re-establish a connection to the server.
                 {
-                    let mut reconnection_timeout = cx.background().timer(RECONNECT_TIMEOUT).fuse();
+                    let mut reconnection_timeout =
+                        cx.background_executor().timer(RECONNECT_TIMEOUT).fuse();
                     let client_reconnection = async {
                         let mut remaining_attempts = 3;
                         while remaining_attempts > 0 {
                             if client_status.borrow().is_connected() {
                                 log::info!("client reconnected, attempting to rejoin room");
 
-                                let Some(this) = this.upgrade(&cx) else { break };
-                                if this
-                                    .update(&mut cx, |this, cx| this.rejoin(cx))
-                                    .await
-                                    .log_err()
-                                    .is_some()
-                                {
-                                    return true;
-                                } else {
-                                    remaining_attempts -= 1;
+                                let Some(this) = this.upgrade() else { break };
+                                match this.update(&mut cx, |this, cx| this.rejoin(cx)) {
+                                    Ok(task) => {
+                                        if task.await.log_err().is_some() {
+                                            return true;
+                                        } else {
+                                            remaining_attempts -= 1;
+                                        }
+                                    }
+                                    Err(_app_dropped) => return false,
                                 }
                             } else if client_status.borrow().is_signed_out() {
                                 return false;
@@ -482,9 +497,9 @@ impl Room {
         // The client failed to re-establish a connection to the server
         // or an error occurred while trying to re-join the room. Either way
         // we leave the room and return an error.
-        if let Some(this) = this.upgrade(&cx) {
+        if let Some(this) = this.upgrade() {
             log::info!("reconnection failed, leaving room");
-            let _ = this.update(&mut cx, |this, cx| this.leave(cx));
+            let _ = this.update(&mut cx, |this, cx| this.leave(cx))?;
         }
         Err(anyhow!(
             "can't reconnect to room: client failed to re-establish connection"
@@ -496,7 +511,7 @@ impl Room {
         let mut reshared_projects = Vec::new();
         let mut rejoined_projects = Vec::new();
         self.shared_projects.retain(|project| {
-            if let Some(handle) = project.upgrade(cx) {
+            if let Some(handle) = project.upgrade() {
                 let project = handle.read(cx);
                 if let Some(project_id) = project.remote_id() {
                     projects.insert(project_id, handle.clone());
@@ -510,14 +525,14 @@ impl Room {
             false
         });
         self.joined_projects.retain(|project| {
-            if let Some(handle) = project.upgrade(cx) {
+            if let Some(handle) = project.upgrade() {
                 let project = handle.read(cx);
                 if let Some(project_id) = project.remote_id() {
                     projects.insert(project_id, handle.clone());
                     rejoined_projects.push(proto::RejoinProject {
                         id: project_id,
                         worktrees: project
-                            .worktrees(cx)
+                            .worktrees()
                             .map(|worktree| {
                                 let worktree = worktree.read(cx);
                                 proto::RejoinWorktree {
@@ -565,7 +580,7 @@ impl Room {
                 }
 
                 anyhow::Ok(())
-            })
+            })?
         })
     }
 
@@ -643,7 +658,7 @@ impl Room {
     }
 
     async fn handle_room_updated(
-        this: ModelHandle<Self>,
+        this: Model<Self>,
         envelope: TypedEnvelope<proto::RoomUpdated>,
         _: Arc<Client>,
         mut cx: AsyncAppContext,
@@ -652,7 +667,7 @@ impl Room {
             .payload
             .room
             .ok_or_else(|| anyhow!("invalid room"))?;
-        this.update(&mut cx, |this, cx| this.apply_room_update(room, cx))
+        this.update(&mut cx, |this, cx| this.apply_room_update(room, cx))?
     }
 
     fn apply_room_update(
@@ -733,7 +748,7 @@ impl Room {
 
                         for unshared_project_id in old_projects.difference(&new_projects) {
                             this.joined_projects.retain(|project| {
-                                if let Some(project) = project.upgrade(cx) {
+                                if let Some(project) = project.upgrade() {
                                     project.update(cx, |project, cx| {
                                         if project.remote_id() == Some(*unshared_project_id) {
                                             project.disconnected_from_host(cx);
@@ -876,7 +891,8 @@ impl Room {
                 this.check_invariants();
                 this.room_update_completed_tx.try_send(Some(())).ok();
                 cx.notify();
-            });
+            })
+            .ok();
         }));
 
         cx.notify();
@@ -907,12 +923,7 @@ impl Room {
                     .remote_participants
                     .get_mut(&user_id)
                     .ok_or_else(|| anyhow!("subscribed to track by unknown participant"))?;
-                participant.video_tracks.insert(
-                    track_id.clone(),
-                    Arc::new(RemoteVideoTrack {
-                        live_kit_track: track,
-                    }),
-                );
+                participant.video_tracks.insert(track_id.clone(), track);
                 cx.emit(Event::RemoteVideoTracksChanged {
                     participant_id: participant.peer_id,
                 });
@@ -991,7 +1002,6 @@ impl Room {
                     .remote_participants
                     .get_mut(&user_id)
                     .ok_or_else(|| anyhow!("subscribed to track by unknown participant"))?;
-
                 participant.audio_tracks.insert(track_id.clone(), track);
                 participant.muted = publication.is_muted();
 
@@ -1053,7 +1063,7 @@ impl Room {
         let client = self.client.clone();
         let room_id = self.id;
         self.pending_call_count += 1;
-        cx.spawn(|this, mut cx| async move {
+        cx.spawn(move |this, mut cx| async move {
             let result = client
                 .request(proto::Call {
                     room_id,
@@ -1066,7 +1076,7 @@ impl Room {
                 if this.should_leave() {
                     this.leave(cx).detach_and_log_err(cx);
                 }
-            });
+            })?;
             result?;
             Ok(())
         })
@@ -1078,31 +1088,31 @@ impl Room {
         language_registry: Arc<LanguageRegistry>,
         fs: Arc<dyn Fs>,
         cx: &mut ModelContext<Self>,
-    ) -> Task<Result<ModelHandle<Project>>> {
+    ) -> Task<Result<Model<Project>>> {
         let client = self.client.clone();
         let user_store = self.user_store.clone();
         cx.emit(Event::RemoteProjectJoined { project_id: id });
-        cx.spawn(|this, mut cx| async move {
+        cx.spawn(move |this, mut cx| async move {
             let project =
                 Project::remote(id, client, user_store, language_registry, fs, cx.clone()).await?;
 
             this.update(&mut cx, |this, cx| {
                 this.joined_projects.retain(|project| {
-                    if let Some(project) = project.upgrade(cx) {
+                    if let Some(project) = project.upgrade() {
                         !project.read(cx).is_read_only()
                     } else {
                         false
                     }
                 });
                 this.joined_projects.insert(project.downgrade());
-            });
+            })?;
             Ok(project)
         })
     }
 
     pub(crate) fn share_project(
         &mut self,
-        project: ModelHandle<Project>,
+        project: Model<Project>,
         cx: &mut ModelContext<Self>,
     ) -> Task<Result<u64>> {
         if let Some(project_id) = project.read(cx).remote_id() {
@@ -1118,7 +1128,7 @@ impl Room {
 
             project.update(&mut cx, |project, cx| {
                 project.shared(response.project_id, cx)
-            })?;
+            })??;
 
             // If the user's location is in this project, it changes from UnsharedProject to SharedProject.
             this.update(&mut cx, |this, cx| {
@@ -1129,7 +1139,7 @@ impl Room {
                 } else {
                     Task::ready(Ok(()))
                 }
-            })
+            })?
             .await?;
 
             Ok(response.project_id)
@@ -1138,7 +1148,7 @@ impl Room {
 
     pub(crate) fn unshare_project(
         &mut self,
-        project: ModelHandle<Project>,
+        project: Model<Project>,
         cx: &mut ModelContext<Self>,
     ) -> Result<()> {
         let project_id = match project.read(cx).remote_id() {
@@ -1152,7 +1162,7 @@ impl Room {
 
     pub(crate) fn set_location(
         &mut self,
-        project: Option<&ModelHandle<Project>>,
+        project: Option<&Model<Project>>,
         cx: &mut ModelContext<Self>,
     ) -> Task<Result<()>> {
         if self.status.is_offline() {
@@ -1178,7 +1188,7 @@ impl Room {
         };
 
         cx.notify();
-        cx.foreground().spawn(async move {
+        cx.background_executor().spawn(async move {
             client
                 .request(proto::UpdateParticipantLocation {
                     room_id,
@@ -1244,22 +1254,21 @@ impl Room {
             return Task::ready(Err(anyhow!("live-kit was not initialized")));
         };
 
-        cx.spawn_weak(|this, mut cx| async move {
+        cx.spawn(move |this, mut cx| async move {
             let publish_track = async {
                 let track = LocalAudioTrack::create();
-                this.upgrade(&cx)
+                this.upgrade()
                     .ok_or_else(|| anyhow!("room was dropped"))?
-                    .read_with(&cx, |this, _| {
+                    .update(&mut cx, |this, _| {
                         this.live_kit
                             .as_ref()
                             .map(|live_kit| live_kit.room.publish_audio_track(track))
-                    })
+                    })?
                     .ok_or_else(|| anyhow!("live-kit was not initialized"))?
                     .await
             };
-
             let publication = publish_track.await;
-            this.upgrade(&cx)
+            this.upgrade()
                 .ok_or_else(|| anyhow!("room was dropped"))?
                 .update(&mut cx, |this, cx| {
                     let live_kit = this
@@ -1283,7 +1292,9 @@ impl Room {
                                 live_kit.room.unpublish_track(publication);
                             } else {
                                 if muted {
-                                    cx.background().spawn(publication.set_mute(muted)).detach();
+                                    cx.background_executor()
+                                        .spawn(publication.set_mute(muted))
+                                        .detach();
                                 }
                                 live_kit.microphone_track = LocalTrack::Published {
                                     track_publication: publication,
@@ -1303,7 +1314,7 @@ impl Room {
                             }
                         }
                     }
-                })
+                })?
         })
     }
 
@@ -1326,26 +1337,26 @@ impl Room {
             return Task::ready(Err(anyhow!("live-kit was not initialized")));
         };
 
-        cx.spawn_weak(|this, mut cx| async move {
+        cx.spawn(move |this, mut cx| async move {
             let publish_track = async {
                 let displays = displays.await?;
                 let display = displays
                     .first()
                     .ok_or_else(|| anyhow!("no display found"))?;
                 let track = LocalVideoTrack::screen_share_for_display(&display);
-                this.upgrade(&cx)
+                this.upgrade()
                     .ok_or_else(|| anyhow!("room was dropped"))?
-                    .read_with(&cx, |this, _| {
+                    .update(&mut cx, |this, _| {
                         this.live_kit
                             .as_ref()
                             .map(|live_kit| live_kit.room.publish_video_track(track))
-                    })
+                    })?
                     .ok_or_else(|| anyhow!("live-kit was not initialized"))?
                     .await
             };
 
             let publication = publish_track.await;
-            this.upgrade(&cx)
+            this.upgrade()
                 .ok_or_else(|| anyhow!("room was dropped"))?
                 .update(&mut cx, |this, cx| {
                     let live_kit = this
@@ -1369,7 +1380,9 @@ impl Room {
                                 live_kit.room.unpublish_track(publication);
                             } else {
                                 if muted {
-                                    cx.background().spawn(publication.set_mute(muted)).detach();
+                                    cx.background_executor()
+                                        .spawn(publication.set_mute(muted))
+                                        .detach();
                                 }
                                 live_kit.screen_track = LocalTrack::Published {
                                     track_publication: publication,
@@ -1392,7 +1405,7 @@ impl Room {
                             }
                         }
                     }
-                })
+                })?
         })
     }
 
@@ -1435,11 +1448,12 @@ impl Room {
                     .room
                     .remote_audio_track_publications(&participant.user.id.to_string())
                 {
-                    tasks.push(cx.foreground().spawn(track.set_enabled(!live_kit.deafened)));
+                    let deafened = live_kit.deafened;
+                    tasks.push(cx.foreground_executor().spawn(track.set_enabled(!deafened)));
                 }
             }
 
-            Ok(cx.foreground().spawn(async move {
+            Ok(cx.foreground_executor().spawn(async move {
                 if let Some(mute_task) = mute_task {
                     mute_task.await?;
                 }
@@ -1499,7 +1513,6 @@ struct LiveKitRoom {
     deafened: bool,
     speaking: bool,
     next_publish_id: usize,
-    can_publish: bool,
     _maintain_room: Task<()>,
     _maintain_tracks: [Task<()>; 2],
 }
@@ -1531,7 +1544,8 @@ impl LiveKitRoom {
                 *muted = should_mute;
                 cx.notify();
                 Ok((
-                    cx.background().spawn(track_publication.set_mute(*muted)),
+                    cx.background_executor()
+                        .spawn(track_publication.set_mute(*muted)),
                     old_muted,
                 ))
             }

crates/call2/Cargo.toml 🔗

@@ -1,54 +0,0 @@
-[package]
-name = "call2"
-version = "0.1.0"
-edition = "2021"
-publish = false
-
-[lib]
-path = "src/call2.rs"
-doctest = false
-
-[features]
-test-support = [
-    "client/test-support",
-    "collections/test-support",
-    "gpui/test-support",
-    "live_kit_client/test-support",
-    "project/test-support",
-    "util/test-support"
-]
-
-[dependencies]
-audio = { package = "audio2", path = "../audio2" }
-client = { package = "client2", path = "../client2" }
-collections = { path = "../collections" }
-gpui = { package = "gpui2", path = "../gpui2" }
-log.workspace = true
-live_kit_client = { package = "live_kit_client2", path = "../live_kit_client2" }
-fs = { package = "fs2", path = "../fs2" }
-language = { path = "../language" }
-media = { path = "../media" }
-project = { path = "../project" }
-settings = { package = "settings2", path = "../settings2" }
-util = { path = "../util" }
-
-anyhow.workspace = true
-async-broadcast = "0.4"
-futures.workspace = true
-image = "0.23"
-postage.workspace = true
-schemars.workspace = true
-serde.workspace = true
-serde_json.workspace = true
-serde_derive.workspace = true
-smallvec.workspace = true
-
-[dev-dependencies]
-client = { package = "client2", path = "../client2", features = ["test-support"] }
-fs = { package = "fs2", path = "../fs2", features = ["test-support"] }
-language = { path = "../language", features = ["test-support"] }
-collections = { path = "../collections", features = ["test-support"] }
-gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
-live_kit_client = { package = "live_kit_client2", path = "../live_kit_client2", features = ["test-support"] }
-project = { path = "../project", features = ["test-support"] }
-util = { path = "../util", features = ["test-support"] }

crates/call2/src/call2.rs 🔗

@@ -1,543 +0,0 @@
-pub mod call_settings;
-pub mod participant;
-pub mod room;
-
-use anyhow::{anyhow, Result};
-use audio::Audio;
-use call_settings::CallSettings;
-use client::{proto, Client, TelemetrySettings, TypedEnvelope, User, UserStore, ZED_ALWAYS_ACTIVE};
-use collections::HashSet;
-use futures::{channel::oneshot, future::Shared, Future, FutureExt};
-use gpui::{
-    AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, Subscription, Task,
-    WeakModel,
-};
-use postage::watch;
-use project::Project;
-use room::Event;
-use settings::Settings;
-use std::sync::Arc;
-
-pub use participant::ParticipantLocation;
-pub use room::Room;
-
-pub fn init(client: Arc<Client>, user_store: Model<UserStore>, cx: &mut AppContext) {
-    CallSettings::register(cx);
-
-    let active_call = cx.new_model(|cx| ActiveCall::new(client, user_store, cx));
-    cx.set_global(active_call);
-}
-
-pub struct OneAtATime {
-    cancel: Option<oneshot::Sender<()>>,
-}
-
-impl OneAtATime {
-    /// spawn a task in the given context.
-    /// if another task is spawned before that resolves, or if the OneAtATime itself is dropped, the first task will be cancelled and return Ok(None)
-    /// otherwise you'll see the result of the task.
-    fn spawn<F, Fut, R>(&mut self, cx: &mut AppContext, f: F) -> Task<Result<Option<R>>>
-    where
-        F: 'static + FnOnce(AsyncAppContext) -> Fut,
-        Fut: Future<Output = Result<R>>,
-        R: 'static,
-    {
-        let (tx, rx) = oneshot::channel();
-        self.cancel.replace(tx);
-        cx.spawn(|cx| async move {
-            futures::select_biased! {
-                _ = rx.fuse() => Ok(None),
-                result = f(cx).fuse() => result.map(Some),
-            }
-        })
-    }
-
-    fn running(&self) -> bool {
-        self.cancel
-            .as_ref()
-            .is_some_and(|cancel| !cancel.is_canceled())
-    }
-}
-
-#[derive(Clone)]
-pub struct IncomingCall {
-    pub room_id: u64,
-    pub calling_user: Arc<User>,
-    pub participants: Vec<Arc<User>>,
-    pub initial_project: Option<proto::ParticipantProject>,
-}
-
-/// Singleton global maintaining the user's participation in a room across workspaces.
-pub struct ActiveCall {
-    room: Option<(Model<Room>, Vec<Subscription>)>,
-    pending_room_creation: Option<Shared<Task<Result<Model<Room>, Arc<anyhow::Error>>>>>,
-    location: Option<WeakModel<Project>>,
-    _join_debouncer: OneAtATime,
-    pending_invites: HashSet<u64>,
-    incoming_call: (
-        watch::Sender<Option<IncomingCall>>,
-        watch::Receiver<Option<IncomingCall>>,
-    ),
-    client: Arc<Client>,
-    user_store: Model<UserStore>,
-    _subscriptions: Vec<client::Subscription>,
-}
-
-impl EventEmitter<Event> for ActiveCall {}
-
-impl ActiveCall {
-    fn new(client: Arc<Client>, user_store: Model<UserStore>, cx: &mut ModelContext<Self>) -> Self {
-        Self {
-            room: None,
-            pending_room_creation: None,
-            location: None,
-            pending_invites: Default::default(),
-            incoming_call: watch::channel(),
-            _join_debouncer: OneAtATime { cancel: None },
-            _subscriptions: vec![
-                client.add_request_handler(cx.weak_model(), Self::handle_incoming_call),
-                client.add_message_handler(cx.weak_model(), Self::handle_call_canceled),
-            ],
-            client,
-            user_store,
-        }
-    }
-
-    pub fn channel_id(&self, cx: &AppContext) -> Option<u64> {
-        self.room()?.read(cx).channel_id()
-    }
-
-    async fn handle_incoming_call(
-        this: Model<Self>,
-        envelope: TypedEnvelope<proto::IncomingCall>,
-        _: Arc<Client>,
-        mut cx: AsyncAppContext,
-    ) -> Result<proto::Ack> {
-        let user_store = this.update(&mut cx, |this, _| this.user_store.clone())?;
-        let call = IncomingCall {
-            room_id: envelope.payload.room_id,
-            participants: user_store
-                .update(&mut cx, |user_store, cx| {
-                    user_store.get_users(envelope.payload.participant_user_ids, cx)
-                })?
-                .await?,
-            calling_user: user_store
-                .update(&mut cx, |user_store, cx| {
-                    user_store.get_user(envelope.payload.calling_user_id, cx)
-                })?
-                .await?,
-            initial_project: envelope.payload.initial_project,
-        };
-        this.update(&mut cx, |this, _| {
-            *this.incoming_call.0.borrow_mut() = Some(call);
-        })?;
-
-        Ok(proto::Ack {})
-    }
-
-    async fn handle_call_canceled(
-        this: Model<Self>,
-        envelope: TypedEnvelope<proto::CallCanceled>,
-        _: Arc<Client>,
-        mut cx: AsyncAppContext,
-    ) -> Result<()> {
-        this.update(&mut cx, |this, _| {
-            let mut incoming_call = this.incoming_call.0.borrow_mut();
-            if incoming_call
-                .as_ref()
-                .map_or(false, |call| call.room_id == envelope.payload.room_id)
-            {
-                incoming_call.take();
-            }
-        })?;
-        Ok(())
-    }
-
-    pub fn global(cx: &AppContext) -> Model<Self> {
-        cx.global::<Model<Self>>().clone()
-    }
-
-    pub fn invite(
-        &mut self,
-        called_user_id: u64,
-        initial_project: Option<Model<Project>>,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<()>> {
-        if !self.pending_invites.insert(called_user_id) {
-            return Task::ready(Err(anyhow!("user was already invited")));
-        }
-        cx.notify();
-
-        if self._join_debouncer.running() {
-            return Task::ready(Ok(()));
-        }
-
-        let room = if let Some(room) = self.room().cloned() {
-            Some(Task::ready(Ok(room)).shared())
-        } else {
-            self.pending_room_creation.clone()
-        };
-
-        let invite = if let Some(room) = room {
-            cx.spawn(move |_, mut cx| async move {
-                let room = room.await.map_err(|err| anyhow!("{:?}", err))?;
-
-                let initial_project_id = if let Some(initial_project) = initial_project {
-                    Some(
-                        room.update(&mut cx, |room, cx| room.share_project(initial_project, cx))?
-                            .await?,
-                    )
-                } else {
-                    None
-                };
-
-                room.update(&mut cx, move |room, cx| {
-                    room.call(called_user_id, initial_project_id, cx)
-                })?
-                .await?;
-
-                anyhow::Ok(())
-            })
-        } else {
-            let client = self.client.clone();
-            let user_store = self.user_store.clone();
-            let room = cx
-                .spawn(move |this, mut cx| async move {
-                    let create_room = async {
-                        let room = cx
-                            .update(|cx| {
-                                Room::create(
-                                    called_user_id,
-                                    initial_project,
-                                    client,
-                                    user_store,
-                                    cx,
-                                )
-                            })?
-                            .await?;
-
-                        this.update(&mut cx, |this, cx| this.set_room(Some(room.clone()), cx))?
-                            .await?;
-
-                        anyhow::Ok(room)
-                    };
-
-                    let room = create_room.await;
-                    this.update(&mut cx, |this, _| this.pending_room_creation = None)?;
-                    room.map_err(Arc::new)
-                })
-                .shared();
-            self.pending_room_creation = Some(room.clone());
-            cx.background_executor().spawn(async move {
-                room.await.map_err(|err| anyhow!("{:?}", err))?;
-                anyhow::Ok(())
-            })
-        };
-
-        cx.spawn(move |this, mut cx| async move {
-            let result = invite.await;
-            if result.is_ok() {
-                this.update(&mut cx, |this, cx| this.report_call_event("invite", cx))?;
-            } else {
-                // TODO: Resport collaboration error
-            }
-
-            this.update(&mut cx, |this, cx| {
-                this.pending_invites.remove(&called_user_id);
-                cx.notify();
-            })?;
-            result
-        })
-    }
-
-    pub fn cancel_invite(
-        &mut self,
-        called_user_id: u64,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<()>> {
-        let room_id = if let Some(room) = self.room() {
-            room.read(cx).id()
-        } else {
-            return Task::ready(Err(anyhow!("no active call")));
-        };
-
-        let client = self.client.clone();
-        cx.background_executor().spawn(async move {
-            client
-                .request(proto::CancelCall {
-                    room_id,
-                    called_user_id,
-                })
-                .await?;
-            anyhow::Ok(())
-        })
-    }
-
-    pub fn incoming(&self) -> watch::Receiver<Option<IncomingCall>> {
-        self.incoming_call.1.clone()
-    }
-
-    pub fn accept_incoming(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
-        if self.room.is_some() {
-            return Task::ready(Err(anyhow!("cannot join while on another call")));
-        }
-
-        let call = if let Some(call) = self.incoming_call.1.borrow().clone() {
-            call
-        } else {
-            return Task::ready(Err(anyhow!("no incoming call")));
-        };
-
-        if self.pending_room_creation.is_some() {
-            return Task::ready(Ok(()));
-        }
-
-        let room_id = call.room_id.clone();
-        let client = self.client.clone();
-        let user_store = self.user_store.clone();
-        let join = self
-            ._join_debouncer
-            .spawn(cx, move |cx| Room::join(room_id, client, user_store, cx));
-
-        cx.spawn(|this, mut cx| async move {
-            let room = join.await?;
-            this.update(&mut cx, |this, cx| this.set_room(room.clone(), cx))?
-                .await?;
-            this.update(&mut cx, |this, cx| {
-                this.report_call_event("accept incoming", cx)
-            })?;
-            Ok(())
-        })
-    }
-
-    pub fn decline_incoming(&mut self, cx: &mut ModelContext<Self>) -> Result<()> {
-        let call = self
-            .incoming_call
-            .0
-            .borrow_mut()
-            .take()
-            .ok_or_else(|| anyhow!("no incoming call"))?;
-        report_call_event_for_room("decline incoming", call.room_id, None, &self.client, cx);
-        self.client.send(proto::DeclineCall {
-            room_id: call.room_id,
-        })?;
-        Ok(())
-    }
-
-    pub fn join_channel(
-        &mut self,
-        channel_id: u64,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<Option<Model<Room>>>> {
-        if let Some(room) = self.room().cloned() {
-            if room.read(cx).channel_id() == Some(channel_id) {
-                return Task::ready(Ok(Some(room)));
-            } else {
-                room.update(cx, |room, cx| room.clear_state(cx));
-            }
-        }
-
-        if self.pending_room_creation.is_some() {
-            return Task::ready(Ok(None));
-        }
-
-        let client = self.client.clone();
-        let user_store = self.user_store.clone();
-        let join = self._join_debouncer.spawn(cx, move |cx| async move {
-            Room::join_channel(channel_id, client, user_store, cx).await
-        });
-
-        cx.spawn(|this, mut cx| async move {
-            let room = join.await?;
-            this.update(&mut cx, |this, cx| this.set_room(room.clone(), cx))?
-                .await?;
-            this.update(&mut cx, |this, cx| {
-                this.report_call_event("join channel", cx)
-            })?;
-            Ok(room)
-        })
-    }
-
-    pub fn hang_up(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
-        cx.notify();
-        self.report_call_event("hang up", cx);
-
-        Audio::end_call(cx);
-        if let Some((room, _)) = self.room.take() {
-            room.update(cx, |room, cx| room.leave(cx))
-        } else {
-            Task::ready(Ok(()))
-        }
-    }
-
-    pub fn share_project(
-        &mut self,
-        project: Model<Project>,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<u64>> {
-        if let Some((room, _)) = self.room.as_ref() {
-            self.report_call_event("share project", cx);
-            room.update(cx, |room, cx| room.share_project(project, cx))
-        } else {
-            Task::ready(Err(anyhow!("no active call")))
-        }
-    }
-
-    pub fn unshare_project(
-        &mut self,
-        project: Model<Project>,
-        cx: &mut ModelContext<Self>,
-    ) -> Result<()> {
-        if let Some((room, _)) = self.room.as_ref() {
-            self.report_call_event("unshare project", cx);
-            room.update(cx, |room, cx| room.unshare_project(project, cx))
-        } else {
-            Err(anyhow!("no active call"))
-        }
-    }
-
-    pub fn location(&self) -> Option<&WeakModel<Project>> {
-        self.location.as_ref()
-    }
-
-    pub fn set_location(
-        &mut self,
-        project: Option<&Model<Project>>,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<()>> {
-        if project.is_some() || !*ZED_ALWAYS_ACTIVE {
-            self.location = project.map(|project| project.downgrade());
-            if let Some((room, _)) = self.room.as_ref() {
-                return room.update(cx, |room, cx| room.set_location(project, cx));
-            }
-        }
-        Task::ready(Ok(()))
-    }
-
-    fn set_room(
-        &mut self,
-        room: Option<Model<Room>>,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<()>> {
-        if room.as_ref() != self.room.as_ref().map(|room| &room.0) {
-            cx.notify();
-            if let Some(room) = room {
-                if room.read(cx).status().is_offline() {
-                    self.room = None;
-                    Task::ready(Ok(()))
-                } else {
-                    let subscriptions = vec![
-                        cx.observe(&room, |this, room, cx| {
-                            if room.read(cx).status().is_offline() {
-                                this.set_room(None, cx).detach_and_log_err(cx);
-                            }
-
-                            cx.notify();
-                        }),
-                        cx.subscribe(&room, |_, _, event, cx| cx.emit(event.clone())),
-                    ];
-                    self.room = Some((room.clone(), subscriptions));
-                    let location = self
-                        .location
-                        .as_ref()
-                        .and_then(|location| location.upgrade());
-                    room.update(cx, |room, cx| room.set_location(location.as_ref(), cx))
-                }
-            } else {
-                self.room = None;
-                Task::ready(Ok(()))
-            }
-        } else {
-            Task::ready(Ok(()))
-        }
-    }
-
-    pub fn room(&self) -> Option<&Model<Room>> {
-        self.room.as_ref().map(|(room, _)| room)
-    }
-
-    pub fn client(&self) -> Arc<Client> {
-        self.client.clone()
-    }
-
-    pub fn pending_invites(&self) -> &HashSet<u64> {
-        &self.pending_invites
-    }
-
-    pub fn report_call_event(&self, operation: &'static str, cx: &mut AppContext) {
-        if let Some(room) = self.room() {
-            let room = room.read(cx);
-            report_call_event_for_room(operation, room.id(), room.channel_id(), &self.client, cx);
-        }
-    }
-}
-
-pub fn report_call_event_for_room(
-    operation: &'static str,
-    room_id: u64,
-    channel_id: Option<u64>,
-    client: &Arc<Client>,
-    cx: &mut AppContext,
-) {
-    let telemetry = client.telemetry();
-    let telemetry_settings = *TelemetrySettings::get_global(cx);
-
-    telemetry.report_call_event(telemetry_settings, operation, Some(room_id), channel_id)
-}
-
-pub fn report_call_event_for_channel(
-    operation: &'static str,
-    channel_id: u64,
-    client: &Arc<Client>,
-    cx: &AppContext,
-) {
-    let room = ActiveCall::global(cx).read(cx).room();
-
-    let telemetry = client.telemetry();
-
-    let telemetry_settings = *TelemetrySettings::get_global(cx);
-
-    telemetry.report_call_event(
-        telemetry_settings,
-        operation,
-        room.map(|r| r.read(cx).id()),
-        Some(channel_id),
-    )
-}
-
-#[cfg(test)]
-mod test {
-    use gpui::TestAppContext;
-
-    use crate::OneAtATime;
-
-    #[gpui::test]
-    async fn test_one_at_a_time(cx: &mut TestAppContext) {
-        let mut one_at_a_time = OneAtATime { cancel: None };
-
-        assert_eq!(
-            cx.update(|cx| one_at_a_time.spawn(cx, |_| async { Ok(1) }))
-                .await
-                .unwrap(),
-            Some(1)
-        );
-
-        let (a, b) = cx.update(|cx| {
-            (
-                one_at_a_time.spawn(cx, |_| async {
-                    assert!(false);
-                    Ok(2)
-                }),
-                one_at_a_time.spawn(cx, |_| async { Ok(3) }),
-            )
-        });
-
-        assert_eq!(a.await.unwrap(), None);
-        assert_eq!(b.await.unwrap(), Some(3));
-
-        let promise = cx.update(|cx| one_at_a_time.spawn(cx, |_| async { Ok(4) }));
-        drop(one_at_a_time);
-
-        assert_eq!(promise.await.unwrap(), None);
-    }
-}

crates/call2/src/call_settings.rs 🔗

@@ -1,32 +0,0 @@
-use anyhow::Result;
-use gpui::AppContext;
-use schemars::JsonSchema;
-use serde_derive::{Deserialize, Serialize};
-use settings::Settings;
-
-#[derive(Deserialize, Debug)]
-pub struct CallSettings {
-    pub mute_on_join: bool,
-}
-
-#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
-pub struct CallSettingsContent {
-    pub mute_on_join: Option<bool>,
-}
-
-impl Settings for CallSettings {
-    const KEY: Option<&'static str> = Some("calls");
-
-    type FileContent = CallSettingsContent;
-
-    fn load(
-        default_value: &Self::FileContent,
-        user_values: &[&Self::FileContent],
-        _cx: &mut AppContext,
-    ) -> Result<Self>
-    where
-        Self: Sized,
-    {
-        Self::load_via_json_merge(default_value, user_values)
-    }
-}

crates/call2/src/participant.rs 🔗

@@ -1,52 +0,0 @@
-use anyhow::{anyhow, Result};
-use client::ParticipantIndex;
-use client::{proto, User};
-use collections::HashMap;
-use gpui::WeakModel;
-pub use live_kit_client::Frame;
-pub use live_kit_client::{RemoteAudioTrack, RemoteVideoTrack};
-use project::Project;
-use std::sync::Arc;
-
-#[derive(Copy, Clone, Debug, Eq, PartialEq)]
-pub enum ParticipantLocation {
-    SharedProject { project_id: u64 },
-    UnsharedProject,
-    External,
-}
-
-impl ParticipantLocation {
-    pub fn from_proto(location: Option<proto::ParticipantLocation>) -> Result<Self> {
-        match location.and_then(|l| l.variant) {
-            Some(proto::participant_location::Variant::SharedProject(project)) => {
-                Ok(Self::SharedProject {
-                    project_id: project.id,
-                })
-            }
-            Some(proto::participant_location::Variant::UnsharedProject(_)) => {
-                Ok(Self::UnsharedProject)
-            }
-            Some(proto::participant_location::Variant::External(_)) => Ok(Self::External),
-            None => Err(anyhow!("participant location was not provided")),
-        }
-    }
-}
-
-#[derive(Clone, Default)]
-pub struct LocalParticipant {
-    pub projects: Vec<proto::ParticipantProject>,
-    pub active_project: Option<WeakModel<Project>>,
-}
-
-#[derive(Clone, Debug)]
-pub struct RemoteParticipant {
-    pub user: Arc<User>,
-    pub peer_id: proto::PeerId,
-    pub projects: Vec<proto::ParticipantProject>,
-    pub location: ParticipantLocation,
-    pub participant_index: ParticipantIndex,
-    pub muted: bool,
-    pub speaking: bool,
-    pub video_tracks: HashMap<live_kit_client::Sid, Arc<RemoteVideoTrack>>,
-    pub audio_tracks: HashMap<live_kit_client::Sid, Arc<RemoteAudioTrack>>,
-}

crates/call2/src/room.rs 🔗

@@ -1,1599 +0,0 @@
-use crate::{
-    call_settings::CallSettings,
-    participant::{LocalParticipant, ParticipantLocation, RemoteParticipant},
-};
-use anyhow::{anyhow, Result};
-use audio::{Audio, Sound};
-use client::{
-    proto::{self, PeerId},
-    Client, ParticipantIndex, TypedEnvelope, User, UserStore,
-};
-use collections::{BTreeMap, HashMap, HashSet};
-use fs::Fs;
-use futures::{FutureExt, StreamExt};
-use gpui::{
-    AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, Task, WeakModel,
-};
-use language::LanguageRegistry;
-use live_kit_client::{
-    LocalAudioTrack, LocalTrackPublication, LocalVideoTrack, RemoteAudioTrackUpdate,
-    RemoteVideoTrackUpdate,
-};
-use postage::{sink::Sink, stream::Stream, watch};
-use project::Project;
-use settings::Settings as _;
-use std::{future::Future, mem, sync::Arc, time::Duration};
-use util::{post_inc, ResultExt, TryFutureExt};
-
-pub const RECONNECT_TIMEOUT: Duration = Duration::from_secs(30);
-
-#[derive(Clone, Debug, PartialEq, Eq)]
-pub enum Event {
-    ParticipantLocationChanged {
-        participant_id: proto::PeerId,
-    },
-    RemoteVideoTracksChanged {
-        participant_id: proto::PeerId,
-    },
-    RemoteAudioTracksChanged {
-        participant_id: proto::PeerId,
-    },
-    RemoteProjectShared {
-        owner: Arc<User>,
-        project_id: u64,
-        worktree_root_names: Vec<String>,
-    },
-    RemoteProjectUnshared {
-        project_id: u64,
-    },
-    RemoteProjectJoined {
-        project_id: u64,
-    },
-    RemoteProjectInvitationDiscarded {
-        project_id: u64,
-    },
-    Left,
-}
-
-pub struct Room {
-    id: u64,
-    channel_id: Option<u64>,
-    live_kit: Option<LiveKitRoom>,
-    status: RoomStatus,
-    shared_projects: HashSet<WeakModel<Project>>,
-    joined_projects: HashSet<WeakModel<Project>>,
-    local_participant: LocalParticipant,
-    remote_participants: BTreeMap<u64, RemoteParticipant>,
-    pending_participants: Vec<Arc<User>>,
-    participant_user_ids: HashSet<u64>,
-    pending_call_count: usize,
-    leave_when_empty: bool,
-    client: Arc<Client>,
-    user_store: Model<UserStore>,
-    follows_by_leader_id_project_id: HashMap<(PeerId, u64), Vec<PeerId>>,
-    client_subscriptions: Vec<client::Subscription>,
-    _subscriptions: Vec<gpui::Subscription>,
-    room_update_completed_tx: watch::Sender<Option<()>>,
-    room_update_completed_rx: watch::Receiver<Option<()>>,
-    pending_room_update: Option<Task<()>>,
-    maintain_connection: Option<Task<Option<()>>>,
-}
-
-impl EventEmitter<Event> for Room {}
-
-impl Room {
-    pub fn channel_id(&self) -> Option<u64> {
-        self.channel_id
-    }
-
-    pub fn is_sharing_project(&self) -> bool {
-        !self.shared_projects.is_empty()
-    }
-
-    #[cfg(any(test, feature = "test-support"))]
-    pub fn is_connected(&self) -> bool {
-        if let Some(live_kit) = self.live_kit.as_ref() {
-            matches!(
-                *live_kit.room.status().borrow(),
-                live_kit_client::ConnectionState::Connected { .. }
-            )
-        } else {
-            false
-        }
-    }
-
-    fn new(
-        id: u64,
-        channel_id: Option<u64>,
-        live_kit_connection_info: Option<proto::LiveKitConnectionInfo>,
-        client: Arc<Client>,
-        user_store: Model<UserStore>,
-        cx: &mut ModelContext<Self>,
-    ) -> Self {
-        let live_kit_room = if let Some(connection_info) = live_kit_connection_info {
-            let room = live_kit_client::Room::new();
-            let mut status = room.status();
-            // Consume the initial status of the room.
-            let _ = status.try_recv();
-            let _maintain_room = cx.spawn(|this, mut cx| async move {
-                while let Some(status) = status.next().await {
-                    let this = if let Some(this) = this.upgrade() {
-                        this
-                    } else {
-                        break;
-                    };
-
-                    if status == live_kit_client::ConnectionState::Disconnected {
-                        this.update(&mut cx, |this, cx| this.leave(cx).log_err())
-                            .ok();
-                        break;
-                    }
-                }
-            });
-
-            let _maintain_video_tracks = cx.spawn({
-                let room = room.clone();
-                move |this, mut cx| async move {
-                    let mut track_video_changes = room.remote_video_track_updates();
-                    while let Some(track_change) = track_video_changes.next().await {
-                        let this = if let Some(this) = this.upgrade() {
-                            this
-                        } else {
-                            break;
-                        };
-
-                        this.update(&mut cx, |this, cx| {
-                            this.remote_video_track_updated(track_change, cx).log_err()
-                        })
-                        .ok();
-                    }
-                }
-            });
-
-            let _maintain_audio_tracks = cx.spawn({
-                let room = room.clone();
-                |this, mut cx| async move {
-                    let mut track_audio_changes = room.remote_audio_track_updates();
-                    while let Some(track_change) = track_audio_changes.next().await {
-                        let this = if let Some(this) = this.upgrade() {
-                            this
-                        } else {
-                            break;
-                        };
-
-                        this.update(&mut cx, |this, cx| {
-                            this.remote_audio_track_updated(track_change, cx).log_err()
-                        })
-                        .ok();
-                    }
-                }
-            });
-
-            let connect = room.connect(&connection_info.server_url, &connection_info.token);
-            cx.spawn(|this, mut cx| async move {
-                connect.await?;
-
-                if !cx.update(|cx| Self::mute_on_join(cx))? {
-                    this.update(&mut cx, |this, cx| this.share_microphone(cx))?
-                        .await?;
-                }
-
-                anyhow::Ok(())
-            })
-            .detach_and_log_err(cx);
-
-            Some(LiveKitRoom {
-                room,
-                screen_track: LocalTrack::None,
-                microphone_track: LocalTrack::None,
-                next_publish_id: 0,
-                muted_by_user: false,
-                deafened: false,
-                speaking: false,
-                _maintain_room,
-                _maintain_tracks: [_maintain_video_tracks, _maintain_audio_tracks],
-            })
-        } else {
-            None
-        };
-
-        let maintain_connection = cx.spawn({
-            let client = client.clone();
-            move |this, cx| Self::maintain_connection(this, client.clone(), cx).log_err()
-        });
-
-        Audio::play_sound(Sound::Joined, cx);
-
-        let (room_update_completed_tx, room_update_completed_rx) = watch::channel();
-
-        Self {
-            id,
-            channel_id,
-            live_kit: live_kit_room,
-            status: RoomStatus::Online,
-            shared_projects: Default::default(),
-            joined_projects: Default::default(),
-            participant_user_ids: Default::default(),
-            local_participant: Default::default(),
-            remote_participants: Default::default(),
-            pending_participants: Default::default(),
-            pending_call_count: 0,
-            client_subscriptions: vec![
-                client.add_message_handler(cx.weak_model(), Self::handle_room_updated)
-            ],
-            _subscriptions: vec![
-                cx.on_release(Self::released),
-                cx.on_app_quit(Self::app_will_quit),
-            ],
-            leave_when_empty: false,
-            pending_room_update: None,
-            client,
-            user_store,
-            follows_by_leader_id_project_id: Default::default(),
-            maintain_connection: Some(maintain_connection),
-            room_update_completed_tx,
-            room_update_completed_rx,
-        }
-    }
-
-    pub(crate) fn create(
-        called_user_id: u64,
-        initial_project: Option<Model<Project>>,
-        client: Arc<Client>,
-        user_store: Model<UserStore>,
-        cx: &mut AppContext,
-    ) -> Task<Result<Model<Self>>> {
-        cx.spawn(move |mut cx| async move {
-            let response = client.request(proto::CreateRoom {}).await?;
-            let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
-            let room = cx.new_model(|cx| {
-                Self::new(
-                    room_proto.id,
-                    None,
-                    response.live_kit_connection_info,
-                    client,
-                    user_store,
-                    cx,
-                )
-            })?;
-
-            let initial_project_id = if let Some(initial_project) = initial_project {
-                let initial_project_id = room
-                    .update(&mut cx, |room, cx| {
-                        room.share_project(initial_project.clone(), cx)
-                    })?
-                    .await?;
-                Some(initial_project_id)
-            } else {
-                None
-            };
-
-            match room
-                .update(&mut cx, |room, cx| {
-                    room.leave_when_empty = true;
-                    room.call(called_user_id, initial_project_id, cx)
-                })?
-                .await
-            {
-                Ok(()) => Ok(room),
-                Err(error) => Err(anyhow!("room creation failed: {:?}", error)),
-            }
-        })
-    }
-
-    pub(crate) async fn join_channel(
-        channel_id: u64,
-        client: Arc<Client>,
-        user_store: Model<UserStore>,
-        cx: AsyncAppContext,
-    ) -> Result<Model<Self>> {
-        Self::from_join_response(
-            client.request(proto::JoinChannel { channel_id }).await?,
-            client,
-            user_store,
-            cx,
-        )
-    }
-
-    pub(crate) async fn join(
-        room_id: u64,
-        client: Arc<Client>,
-        user_store: Model<UserStore>,
-        cx: AsyncAppContext,
-    ) -> Result<Model<Self>> {
-        Self::from_join_response(
-            client.request(proto::JoinRoom { id: room_id }).await?,
-            client,
-            user_store,
-            cx,
-        )
-    }
-
-    fn released(&mut self, cx: &mut AppContext) {
-        if self.status.is_online() {
-            self.leave_internal(cx).detach_and_log_err(cx);
-        }
-    }
-
-    fn app_will_quit(&mut self, cx: &mut ModelContext<Self>) -> impl Future<Output = ()> {
-        let task = if self.status.is_online() {
-            let leave = self.leave_internal(cx);
-            Some(cx.background_executor().spawn(async move {
-                leave.await.log_err();
-            }))
-        } else {
-            None
-        };
-
-        async move {
-            if let Some(task) = task {
-                task.await;
-            }
-        }
-    }
-
-    pub fn mute_on_join(cx: &AppContext) -> bool {
-        CallSettings::get_global(cx).mute_on_join || client::IMPERSONATE_LOGIN.is_some()
-    }
-
-    fn from_join_response(
-        response: proto::JoinRoomResponse,
-        client: Arc<Client>,
-        user_store: Model<UserStore>,
-        mut cx: AsyncAppContext,
-    ) -> Result<Model<Self>> {
-        let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
-        let room = cx.new_model(|cx| {
-            Self::new(
-                room_proto.id,
-                response.channel_id,
-                response.live_kit_connection_info,
-                client,
-                user_store,
-                cx,
-            )
-        })?;
-        room.update(&mut cx, |room, cx| {
-            room.leave_when_empty = room.channel_id.is_none();
-            room.apply_room_update(room_proto, cx)?;
-            anyhow::Ok(())
-        })??;
-        Ok(room)
-    }
-
-    fn should_leave(&self) -> bool {
-        self.leave_when_empty
-            && self.pending_room_update.is_none()
-            && self.pending_participants.is_empty()
-            && self.remote_participants.is_empty()
-            && self.pending_call_count == 0
-    }
-
-    pub(crate) fn leave(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
-        cx.notify();
-        cx.emit(Event::Left);
-        self.leave_internal(cx)
-    }
-
-    fn leave_internal(&mut self, cx: &mut AppContext) -> Task<Result<()>> {
-        if self.status.is_offline() {
-            return Task::ready(Err(anyhow!("room is offline")));
-        }
-
-        log::info!("leaving room");
-        Audio::play_sound(Sound::Leave, cx);
-
-        self.clear_state(cx);
-
-        let leave_room = self.client.request(proto::LeaveRoom {});
-        cx.background_executor().spawn(async move {
-            leave_room.await?;
-            anyhow::Ok(())
-        })
-    }
-
-    pub(crate) fn clear_state(&mut self, cx: &mut AppContext) {
-        for project in self.shared_projects.drain() {
-            if let Some(project) = project.upgrade() {
-                project.update(cx, |project, cx| {
-                    project.unshare(cx).log_err();
-                });
-            }
-        }
-        for project in self.joined_projects.drain() {
-            if let Some(project) = project.upgrade() {
-                project.update(cx, |project, cx| {
-                    project.disconnected_from_host(cx);
-                    project.close(cx);
-                });
-            }
-        }
-
-        self.status = RoomStatus::Offline;
-        self.remote_participants.clear();
-        self.pending_participants.clear();
-        self.participant_user_ids.clear();
-        self.client_subscriptions.clear();
-        self.live_kit.take();
-        self.pending_room_update.take();
-        self.maintain_connection.take();
-    }
-
-    async fn maintain_connection(
-        this: WeakModel<Self>,
-        client: Arc<Client>,
-        mut cx: AsyncAppContext,
-    ) -> Result<()> {
-        let mut client_status = client.status();
-        loop {
-            let _ = client_status.try_recv();
-            let is_connected = client_status.borrow().is_connected();
-            // Even if we're initially connected, any future change of the status means we momentarily disconnected.
-            if !is_connected || client_status.next().await.is_some() {
-                log::info!("detected client disconnection");
-
-                this.upgrade()
-                    .ok_or_else(|| anyhow!("room was dropped"))?
-                    .update(&mut cx, |this, cx| {
-                        this.status = RoomStatus::Rejoining;
-                        cx.notify();
-                    })?;
-
-                // Wait for client to re-establish a connection to the server.
-                {
-                    let mut reconnection_timeout =
-                        cx.background_executor().timer(RECONNECT_TIMEOUT).fuse();
-                    let client_reconnection = async {
-                        let mut remaining_attempts = 3;
-                        while remaining_attempts > 0 {
-                            if client_status.borrow().is_connected() {
-                                log::info!("client reconnected, attempting to rejoin room");
-
-                                let Some(this) = this.upgrade() else { break };
-                                match this.update(&mut cx, |this, cx| this.rejoin(cx)) {
-                                    Ok(task) => {
-                                        if task.await.log_err().is_some() {
-                                            return true;
-                                        } else {
-                                            remaining_attempts -= 1;
-                                        }
-                                    }
-                                    Err(_app_dropped) => return false,
-                                }
-                            } else if client_status.borrow().is_signed_out() {
-                                return false;
-                            }
-
-                            log::info!(
-                                "waiting for client status change, remaining attempts {}",
-                                remaining_attempts
-                            );
-                            client_status.next().await;
-                        }
-                        false
-                    }
-                    .fuse();
-                    futures::pin_mut!(client_reconnection);
-
-                    futures::select_biased! {
-                        reconnected = client_reconnection => {
-                            if reconnected {
-                                log::info!("successfully reconnected to room");
-                                // If we successfully joined the room, go back around the loop
-                                // waiting for future connection status changes.
-                                continue;
-                            }
-                        }
-                        _ = reconnection_timeout => {
-                            log::info!("room reconnection timeout expired");
-                        }
-                    }
-                }
-
-                break;
-            }
-        }
-
-        // The client failed to re-establish a connection to the server
-        // or an error occurred while trying to re-join the room. Either way
-        // we leave the room and return an error.
-        if let Some(this) = this.upgrade() {
-            log::info!("reconnection failed, leaving room");
-            let _ = this.update(&mut cx, |this, cx| this.leave(cx))?;
-        }
-        Err(anyhow!(
-            "can't reconnect to room: client failed to re-establish connection"
-        ))
-    }
-
-    fn rejoin(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
-        let mut projects = HashMap::default();
-        let mut reshared_projects = Vec::new();
-        let mut rejoined_projects = Vec::new();
-        self.shared_projects.retain(|project| {
-            if let Some(handle) = project.upgrade() {
-                let project = handle.read(cx);
-                if let Some(project_id) = project.remote_id() {
-                    projects.insert(project_id, handle.clone());
-                    reshared_projects.push(proto::UpdateProject {
-                        project_id,
-                        worktrees: project.worktree_metadata_protos(cx),
-                    });
-                    return true;
-                }
-            }
-            false
-        });
-        self.joined_projects.retain(|project| {
-            if let Some(handle) = project.upgrade() {
-                let project = handle.read(cx);
-                if let Some(project_id) = project.remote_id() {
-                    projects.insert(project_id, handle.clone());
-                    rejoined_projects.push(proto::RejoinProject {
-                        id: project_id,
-                        worktrees: project
-                            .worktrees()
-                            .map(|worktree| {
-                                let worktree = worktree.read(cx);
-                                proto::RejoinWorktree {
-                                    id: worktree.id().to_proto(),
-                                    scan_id: worktree.completed_scan_id() as u64,
-                                }
-                            })
-                            .collect(),
-                    });
-                }
-                return true;
-            }
-            false
-        });
-
-        let response = self.client.request_envelope(proto::RejoinRoom {
-            id: self.id,
-            reshared_projects,
-            rejoined_projects,
-        });
-
-        cx.spawn(|this, mut cx| async move {
-            let response = response.await?;
-            let message_id = response.message_id;
-            let response = response.payload;
-            let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
-            this.update(&mut cx, |this, cx| {
-                this.status = RoomStatus::Online;
-                this.apply_room_update(room_proto, cx)?;
-
-                for reshared_project in response.reshared_projects {
-                    if let Some(project) = projects.get(&reshared_project.id) {
-                        project.update(cx, |project, cx| {
-                            project.reshared(reshared_project, cx).log_err();
-                        });
-                    }
-                }
-
-                for rejoined_project in response.rejoined_projects {
-                    if let Some(project) = projects.get(&rejoined_project.id) {
-                        project.update(cx, |project, cx| {
-                            project.rejoined(rejoined_project, message_id, cx).log_err();
-                        });
-                    }
-                }
-
-                anyhow::Ok(())
-            })?
-        })
-    }
-
-    pub fn id(&self) -> u64 {
-        self.id
-    }
-
-    pub fn status(&self) -> RoomStatus {
-        self.status
-    }
-
-    pub fn local_participant(&self) -> &LocalParticipant {
-        &self.local_participant
-    }
-
-    pub fn remote_participants(&self) -> &BTreeMap<u64, RemoteParticipant> {
-        &self.remote_participants
-    }
-
-    pub fn remote_participant_for_peer_id(&self, peer_id: PeerId) -> Option<&RemoteParticipant> {
-        self.remote_participants
-            .values()
-            .find(|p| p.peer_id == peer_id)
-    }
-
-    pub fn pending_participants(&self) -> &[Arc<User>] {
-        &self.pending_participants
-    }
-
-    pub fn contains_participant(&self, user_id: u64) -> bool {
-        self.participant_user_ids.contains(&user_id)
-    }
-
-    pub fn followers_for(&self, leader_id: PeerId, project_id: u64) -> &[PeerId] {
-        self.follows_by_leader_id_project_id
-            .get(&(leader_id, project_id))
-            .map_or(&[], |v| v.as_slice())
-    }
-
-    /// Returns the most 'active' projects, defined as most people in the project
-    pub fn most_active_project(&self, cx: &AppContext) -> Option<(u64, u64)> {
-        let mut project_hosts_and_guest_counts = HashMap::<u64, (Option<u64>, u32)>::default();
-        for participant in self.remote_participants.values() {
-            match participant.location {
-                ParticipantLocation::SharedProject { project_id } => {
-                    project_hosts_and_guest_counts
-                        .entry(project_id)
-                        .or_default()
-                        .1 += 1;
-                }
-                ParticipantLocation::External | ParticipantLocation::UnsharedProject => {}
-            }
-            for project in &participant.projects {
-                project_hosts_and_guest_counts
-                    .entry(project.id)
-                    .or_default()
-                    .0 = Some(participant.user.id);
-            }
-        }
-
-        if let Some(user) = self.user_store.read(cx).current_user() {
-            for project in &self.local_participant.projects {
-                project_hosts_and_guest_counts
-                    .entry(project.id)
-                    .or_default()
-                    .0 = Some(user.id);
-            }
-        }
-
-        project_hosts_and_guest_counts
-            .into_iter()
-            .filter_map(|(id, (host, guest_count))| Some((id, host?, guest_count)))
-            .max_by_key(|(_, _, guest_count)| *guest_count)
-            .map(|(id, host, _)| (id, host))
-    }
-
-    async fn handle_room_updated(
-        this: Model<Self>,
-        envelope: TypedEnvelope<proto::RoomUpdated>,
-        _: Arc<Client>,
-        mut cx: AsyncAppContext,
-    ) -> Result<()> {
-        let room = envelope
-            .payload
-            .room
-            .ok_or_else(|| anyhow!("invalid room"))?;
-        this.update(&mut cx, |this, cx| this.apply_room_update(room, cx))?
-    }
-
-    fn apply_room_update(
-        &mut self,
-        mut room: proto::Room,
-        cx: &mut ModelContext<Self>,
-    ) -> Result<()> {
-        // Filter ourselves out from the room's participants.
-        let local_participant_ix = room
-            .participants
-            .iter()
-            .position(|participant| Some(participant.user_id) == self.client.user_id());
-        let local_participant = local_participant_ix.map(|ix| room.participants.swap_remove(ix));
-
-        let pending_participant_user_ids = room
-            .pending_participants
-            .iter()
-            .map(|p| p.user_id)
-            .collect::<Vec<_>>();
-
-        let remote_participant_user_ids = room
-            .participants
-            .iter()
-            .map(|p| p.user_id)
-            .collect::<Vec<_>>();
-
-        let (remote_participants, pending_participants) =
-            self.user_store.update(cx, move |user_store, cx| {
-                (
-                    user_store.get_users(remote_participant_user_ids, cx),
-                    user_store.get_users(pending_participant_user_ids, cx),
-                )
-            });
-
-        self.pending_room_update = Some(cx.spawn(|this, mut cx| async move {
-            let (remote_participants, pending_participants) =
-                futures::join!(remote_participants, pending_participants);
-
-            this.update(&mut cx, |this, cx| {
-                this.participant_user_ids.clear();
-
-                if let Some(participant) = local_participant {
-                    this.local_participant.projects = participant.projects;
-                } else {
-                    this.local_participant.projects.clear();
-                }
-
-                if let Some(participants) = remote_participants.log_err() {
-                    for (participant, user) in room.participants.into_iter().zip(participants) {
-                        let Some(peer_id) = participant.peer_id else {
-                            continue;
-                        };
-                        let participant_index = ParticipantIndex(participant.participant_index);
-                        this.participant_user_ids.insert(participant.user_id);
-
-                        let old_projects = this
-                            .remote_participants
-                            .get(&participant.user_id)
-                            .into_iter()
-                            .flat_map(|existing| &existing.projects)
-                            .map(|project| project.id)
-                            .collect::<HashSet<_>>();
-                        let new_projects = participant
-                            .projects
-                            .iter()
-                            .map(|project| project.id)
-                            .collect::<HashSet<_>>();
-
-                        for project in &participant.projects {
-                            if !old_projects.contains(&project.id) {
-                                cx.emit(Event::RemoteProjectShared {
-                                    owner: user.clone(),
-                                    project_id: project.id,
-                                    worktree_root_names: project.worktree_root_names.clone(),
-                                });
-                            }
-                        }
-
-                        for unshared_project_id in old_projects.difference(&new_projects) {
-                            this.joined_projects.retain(|project| {
-                                if let Some(project) = project.upgrade() {
-                                    project.update(cx, |project, cx| {
-                                        if project.remote_id() == Some(*unshared_project_id) {
-                                            project.disconnected_from_host(cx);
-                                            false
-                                        } else {
-                                            true
-                                        }
-                                    })
-                                } else {
-                                    false
-                                }
-                            });
-                            cx.emit(Event::RemoteProjectUnshared {
-                                project_id: *unshared_project_id,
-                            });
-                        }
-
-                        let location = ParticipantLocation::from_proto(participant.location)
-                            .unwrap_or(ParticipantLocation::External);
-                        if let Some(remote_participant) =
-                            this.remote_participants.get_mut(&participant.user_id)
-                        {
-                            remote_participant.peer_id = peer_id;
-                            remote_participant.projects = participant.projects;
-                            remote_participant.participant_index = participant_index;
-                            if location != remote_participant.location {
-                                remote_participant.location = location;
-                                cx.emit(Event::ParticipantLocationChanged {
-                                    participant_id: peer_id,
-                                });
-                            }
-                        } else {
-                            this.remote_participants.insert(
-                                participant.user_id,
-                                RemoteParticipant {
-                                    user: user.clone(),
-                                    participant_index,
-                                    peer_id,
-                                    projects: participant.projects,
-                                    location,
-                                    muted: true,
-                                    speaking: false,
-                                    video_tracks: Default::default(),
-                                    audio_tracks: Default::default(),
-                                },
-                            );
-
-                            Audio::play_sound(Sound::Joined, cx);
-
-                            if let Some(live_kit) = this.live_kit.as_ref() {
-                                let video_tracks =
-                                    live_kit.room.remote_video_tracks(&user.id.to_string());
-                                let audio_tracks =
-                                    live_kit.room.remote_audio_tracks(&user.id.to_string());
-                                let publications = live_kit
-                                    .room
-                                    .remote_audio_track_publications(&user.id.to_string());
-
-                                for track in video_tracks {
-                                    this.remote_video_track_updated(
-                                        RemoteVideoTrackUpdate::Subscribed(track),
-                                        cx,
-                                    )
-                                    .log_err();
-                                }
-
-                                for (track, publication) in
-                                    audio_tracks.iter().zip(publications.iter())
-                                {
-                                    this.remote_audio_track_updated(
-                                        RemoteAudioTrackUpdate::Subscribed(
-                                            track.clone(),
-                                            publication.clone(),
-                                        ),
-                                        cx,
-                                    )
-                                    .log_err();
-                                }
-                            }
-                        }
-                    }
-
-                    this.remote_participants.retain(|user_id, participant| {
-                        if this.participant_user_ids.contains(user_id) {
-                            true
-                        } else {
-                            for project in &participant.projects {
-                                cx.emit(Event::RemoteProjectUnshared {
-                                    project_id: project.id,
-                                });
-                            }
-                            false
-                        }
-                    });
-                }
-
-                if let Some(pending_participants) = pending_participants.log_err() {
-                    this.pending_participants = pending_participants;
-                    for participant in &this.pending_participants {
-                        this.participant_user_ids.insert(participant.id);
-                    }
-                }
-
-                this.follows_by_leader_id_project_id.clear();
-                for follower in room.followers {
-                    let project_id = follower.project_id;
-                    let (leader, follower) = match (follower.leader_id, follower.follower_id) {
-                        (Some(leader), Some(follower)) => (leader, follower),
-
-                        _ => {
-                            log::error!("Follower message {follower:?} missing some state");
-                            continue;
-                        }
-                    };
-
-                    let list = this
-                        .follows_by_leader_id_project_id
-                        .entry((leader, project_id))
-                        .or_insert(Vec::new());
-                    if !list.contains(&follower) {
-                        list.push(follower);
-                    }
-                }
-
-                this.pending_room_update.take();
-                if this.should_leave() {
-                    log::info!("room is empty, leaving");
-                    let _ = this.leave(cx);
-                }
-
-                this.user_store.update(cx, |user_store, cx| {
-                    let participant_indices_by_user_id = this
-                        .remote_participants
-                        .iter()
-                        .map(|(user_id, participant)| (*user_id, participant.participant_index))
-                        .collect();
-                    user_store.set_participant_indices(participant_indices_by_user_id, cx);
-                });
-
-                this.check_invariants();
-                this.room_update_completed_tx.try_send(Some(())).ok();
-                cx.notify();
-            })
-            .ok();
-        }));
-
-        cx.notify();
-        Ok(())
-    }
-
-    pub fn room_update_completed(&mut self) -> impl Future<Output = ()> {
-        let mut done_rx = self.room_update_completed_rx.clone();
-        async move {
-            while let Some(result) = done_rx.next().await {
-                if result.is_some() {
-                    break;
-                }
-            }
-        }
-    }
-
-    fn remote_video_track_updated(
-        &mut self,
-        change: RemoteVideoTrackUpdate,
-        cx: &mut ModelContext<Self>,
-    ) -> Result<()> {
-        match change {
-            RemoteVideoTrackUpdate::Subscribed(track) => {
-                let user_id = track.publisher_id().parse()?;
-                let track_id = track.sid().to_string();
-                let participant = self
-                    .remote_participants
-                    .get_mut(&user_id)
-                    .ok_or_else(|| anyhow!("subscribed to track by unknown participant"))?;
-                participant.video_tracks.insert(track_id.clone(), track);
-                cx.emit(Event::RemoteVideoTracksChanged {
-                    participant_id: participant.peer_id,
-                });
-            }
-            RemoteVideoTrackUpdate::Unsubscribed {
-                publisher_id,
-                track_id,
-            } => {
-                let user_id = publisher_id.parse()?;
-                let participant = self
-                    .remote_participants
-                    .get_mut(&user_id)
-                    .ok_or_else(|| anyhow!("unsubscribed from track by unknown participant"))?;
-                participant.video_tracks.remove(&track_id);
-                cx.emit(Event::RemoteVideoTracksChanged {
-                    participant_id: participant.peer_id,
-                });
-            }
-        }
-
-        cx.notify();
-        Ok(())
-    }
-
-    fn remote_audio_track_updated(
-        &mut self,
-        change: RemoteAudioTrackUpdate,
-        cx: &mut ModelContext<Self>,
-    ) -> Result<()> {
-        match change {
-            RemoteAudioTrackUpdate::ActiveSpeakersChanged { speakers } => {
-                let mut speaker_ids = speakers
-                    .into_iter()
-                    .filter_map(|speaker_sid| speaker_sid.parse().ok())
-                    .collect::<Vec<u64>>();
-                speaker_ids.sort_unstable();
-                for (sid, participant) in &mut self.remote_participants {
-                    if let Ok(_) = speaker_ids.binary_search(sid) {
-                        participant.speaking = true;
-                    } else {
-                        participant.speaking = false;
-                    }
-                }
-                if let Some(id) = self.client.user_id() {
-                    if let Some(room) = &mut self.live_kit {
-                        if let Ok(_) = speaker_ids.binary_search(&id) {
-                            room.speaking = true;
-                        } else {
-                            room.speaking = false;
-                        }
-                    }
-                }
-                cx.notify();
-            }
-            RemoteAudioTrackUpdate::MuteChanged { track_id, muted } => {
-                let mut found = false;
-                for participant in &mut self.remote_participants.values_mut() {
-                    for track in participant.audio_tracks.values() {
-                        if track.sid() == track_id {
-                            found = true;
-                            break;
-                        }
-                    }
-                    if found {
-                        participant.muted = muted;
-                        break;
-                    }
-                }
-
-                cx.notify();
-            }
-            RemoteAudioTrackUpdate::Subscribed(track, publication) => {
-                let user_id = track.publisher_id().parse()?;
-                let track_id = track.sid().to_string();
-                let participant = self
-                    .remote_participants
-                    .get_mut(&user_id)
-                    .ok_or_else(|| anyhow!("subscribed to track by unknown participant"))?;
-                participant.audio_tracks.insert(track_id.clone(), track);
-                participant.muted = publication.is_muted();
-
-                cx.emit(Event::RemoteAudioTracksChanged {
-                    participant_id: participant.peer_id,
-                });
-            }
-            RemoteAudioTrackUpdate::Unsubscribed {
-                publisher_id,
-                track_id,
-            } => {
-                let user_id = publisher_id.parse()?;
-                let participant = self
-                    .remote_participants
-                    .get_mut(&user_id)
-                    .ok_or_else(|| anyhow!("unsubscribed from track by unknown participant"))?;
-                participant.audio_tracks.remove(&track_id);
-                cx.emit(Event::RemoteAudioTracksChanged {
-                    participant_id: participant.peer_id,
-                });
-            }
-        }
-
-        cx.notify();
-        Ok(())
-    }
-
-    fn check_invariants(&self) {
-        #[cfg(any(test, feature = "test-support"))]
-        {
-            for participant in self.remote_participants.values() {
-                assert!(self.participant_user_ids.contains(&participant.user.id));
-                assert_ne!(participant.user.id, self.client.user_id().unwrap());
-            }
-
-            for participant in &self.pending_participants {
-                assert!(self.participant_user_ids.contains(&participant.id));
-                assert_ne!(participant.id, self.client.user_id().unwrap());
-            }
-
-            assert_eq!(
-                self.participant_user_ids.len(),
-                self.remote_participants.len() + self.pending_participants.len()
-            );
-        }
-    }
-
-    pub(crate) fn call(
-        &mut self,
-        called_user_id: u64,
-        initial_project_id: Option<u64>,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<()>> {
-        if self.status.is_offline() {
-            return Task::ready(Err(anyhow!("room is offline")));
-        }
-
-        cx.notify();
-        let client = self.client.clone();
-        let room_id = self.id;
-        self.pending_call_count += 1;
-        cx.spawn(move |this, mut cx| async move {
-            let result = client
-                .request(proto::Call {
-                    room_id,
-                    called_user_id,
-                    initial_project_id,
-                })
-                .await;
-            this.update(&mut cx, |this, cx| {
-                this.pending_call_count -= 1;
-                if this.should_leave() {
-                    this.leave(cx).detach_and_log_err(cx);
-                }
-            })?;
-            result?;
-            Ok(())
-        })
-    }
-
-    pub fn join_project(
-        &mut self,
-        id: u64,
-        language_registry: Arc<LanguageRegistry>,
-        fs: Arc<dyn Fs>,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<Model<Project>>> {
-        let client = self.client.clone();
-        let user_store = self.user_store.clone();
-        cx.emit(Event::RemoteProjectJoined { project_id: id });
-        cx.spawn(move |this, mut cx| async move {
-            let project =
-                Project::remote(id, client, user_store, language_registry, fs, cx.clone()).await?;
-
-            this.update(&mut cx, |this, cx| {
-                this.joined_projects.retain(|project| {
-                    if let Some(project) = project.upgrade() {
-                        !project.read(cx).is_read_only()
-                    } else {
-                        false
-                    }
-                });
-                this.joined_projects.insert(project.downgrade());
-            })?;
-            Ok(project)
-        })
-    }
-
-    pub(crate) fn share_project(
-        &mut self,
-        project: Model<Project>,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<u64>> {
-        if let Some(project_id) = project.read(cx).remote_id() {
-            return Task::ready(Ok(project_id));
-        }
-
-        let request = self.client.request(proto::ShareProject {
-            room_id: self.id(),
-            worktrees: project.read(cx).worktree_metadata_protos(cx),
-        });
-        cx.spawn(|this, mut cx| async move {
-            let response = request.await?;
-
-            project.update(&mut cx, |project, cx| {
-                project.shared(response.project_id, cx)
-            })??;
-
-            // If the user's location is in this project, it changes from UnsharedProject to SharedProject.
-            this.update(&mut cx, |this, cx| {
-                this.shared_projects.insert(project.downgrade());
-                let active_project = this.local_participant.active_project.as_ref();
-                if active_project.map_or(false, |location| *location == project) {
-                    this.set_location(Some(&project), cx)
-                } else {
-                    Task::ready(Ok(()))
-                }
-            })?
-            .await?;
-
-            Ok(response.project_id)
-        })
-    }
-
-    pub(crate) fn unshare_project(
-        &mut self,
-        project: Model<Project>,
-        cx: &mut ModelContext<Self>,
-    ) -> Result<()> {
-        let project_id = match project.read(cx).remote_id() {
-            Some(project_id) => project_id,
-            None => return Ok(()),
-        };
-
-        self.client.send(proto::UnshareProject { project_id })?;
-        project.update(cx, |this, cx| this.unshare(cx))
-    }
-
-    pub(crate) fn set_location(
-        &mut self,
-        project: Option<&Model<Project>>,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<()>> {
-        if self.status.is_offline() {
-            return Task::ready(Err(anyhow!("room is offline")));
-        }
-
-        let client = self.client.clone();
-        let room_id = self.id;
-        let location = if let Some(project) = project {
-            self.local_participant.active_project = Some(project.downgrade());
-            if let Some(project_id) = project.read(cx).remote_id() {
-                proto::participant_location::Variant::SharedProject(
-                    proto::participant_location::SharedProject { id: project_id },
-                )
-            } else {
-                proto::participant_location::Variant::UnsharedProject(
-                    proto::participant_location::UnsharedProject {},
-                )
-            }
-        } else {
-            self.local_participant.active_project = None;
-            proto::participant_location::Variant::External(proto::participant_location::External {})
-        };
-
-        cx.notify();
-        cx.background_executor().spawn(async move {
-            client
-                .request(proto::UpdateParticipantLocation {
-                    room_id,
-                    location: Some(proto::ParticipantLocation {
-                        variant: Some(location),
-                    }),
-                })
-                .await?;
-            Ok(())
-        })
-    }
-
-    pub fn is_screen_sharing(&self) -> bool {
-        self.live_kit.as_ref().map_or(false, |live_kit| {
-            !matches!(live_kit.screen_track, LocalTrack::None)
-        })
-    }
-
-    pub fn is_sharing_mic(&self) -> bool {
-        self.live_kit.as_ref().map_or(false, |live_kit| {
-            !matches!(live_kit.microphone_track, LocalTrack::None)
-        })
-    }
-
-    pub fn is_muted(&self, cx: &AppContext) -> bool {
-        self.live_kit
-            .as_ref()
-            .and_then(|live_kit| match &live_kit.microphone_track {
-                LocalTrack::None => Some(Self::mute_on_join(cx)),
-                LocalTrack::Pending { muted, .. } => Some(*muted),
-                LocalTrack::Published { muted, .. } => Some(*muted),
-            })
-            .unwrap_or(false)
-    }
-
-    pub fn is_speaking(&self) -> bool {
-        self.live_kit
-            .as_ref()
-            .map_or(false, |live_kit| live_kit.speaking)
-    }
-
-    pub fn is_deafened(&self) -> Option<bool> {
-        self.live_kit.as_ref().map(|live_kit| live_kit.deafened)
-    }
-
-    #[track_caller]
-    pub fn share_microphone(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
-        if self.status.is_offline() {
-            return Task::ready(Err(anyhow!("room is offline")));
-        } else if self.is_sharing_mic() {
-            return Task::ready(Err(anyhow!("microphone was already shared")));
-        }
-
-        let publish_id = if let Some(live_kit) = self.live_kit.as_mut() {
-            let publish_id = post_inc(&mut live_kit.next_publish_id);
-            live_kit.microphone_track = LocalTrack::Pending {
-                publish_id,
-                muted: false,
-            };
-            cx.notify();
-            publish_id
-        } else {
-            return Task::ready(Err(anyhow!("live-kit was not initialized")));
-        };
-
-        cx.spawn(move |this, mut cx| async move {
-            let publish_track = async {
-                let track = LocalAudioTrack::create();
-                this.upgrade()
-                    .ok_or_else(|| anyhow!("room was dropped"))?
-                    .update(&mut cx, |this, _| {
-                        this.live_kit
-                            .as_ref()
-                            .map(|live_kit| live_kit.room.publish_audio_track(track))
-                    })?
-                    .ok_or_else(|| anyhow!("live-kit was not initialized"))?
-                    .await
-            };
-            let publication = publish_track.await;
-            this.upgrade()
-                .ok_or_else(|| anyhow!("room was dropped"))?
-                .update(&mut cx, |this, cx| {
-                    let live_kit = this
-                        .live_kit
-                        .as_mut()
-                        .ok_or_else(|| anyhow!("live-kit was not initialized"))?;
-
-                    let (canceled, muted) = if let LocalTrack::Pending {
-                        publish_id: cur_publish_id,
-                        muted,
-                    } = &live_kit.microphone_track
-                    {
-                        (*cur_publish_id != publish_id, *muted)
-                    } else {
-                        (true, false)
-                    };
-
-                    match publication {
-                        Ok(publication) => {
-                            if canceled {
-                                live_kit.room.unpublish_track(publication);
-                            } else {
-                                if muted {
-                                    cx.background_executor()
-                                        .spawn(publication.set_mute(muted))
-                                        .detach();
-                                }
-                                live_kit.microphone_track = LocalTrack::Published {
-                                    track_publication: publication,
-                                    muted,
-                                };
-                                cx.notify();
-                            }
-                            Ok(())
-                        }
-                        Err(error) => {
-                            if canceled {
-                                Ok(())
-                            } else {
-                                live_kit.microphone_track = LocalTrack::None;
-                                cx.notify();
-                                Err(error)
-                            }
-                        }
-                    }
-                })?
-        })
-    }
-
-    pub fn share_screen(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
-        if self.status.is_offline() {
-            return Task::ready(Err(anyhow!("room is offline")));
-        } else if self.is_screen_sharing() {
-            return Task::ready(Err(anyhow!("screen was already shared")));
-        }
-
-        let (displays, publish_id) = if let Some(live_kit) = self.live_kit.as_mut() {
-            let publish_id = post_inc(&mut live_kit.next_publish_id);
-            live_kit.screen_track = LocalTrack::Pending {
-                publish_id,
-                muted: false,
-            };
-            cx.notify();
-            (live_kit.room.display_sources(), publish_id)
-        } else {
-            return Task::ready(Err(anyhow!("live-kit was not initialized")));
-        };
-
-        cx.spawn(move |this, mut cx| async move {
-            let publish_track = async {
-                let displays = displays.await?;
-                let display = displays
-                    .first()
-                    .ok_or_else(|| anyhow!("no display found"))?;
-                let track = LocalVideoTrack::screen_share_for_display(&display);
-                this.upgrade()
-                    .ok_or_else(|| anyhow!("room was dropped"))?
-                    .update(&mut cx, |this, _| {
-                        this.live_kit
-                            .as_ref()
-                            .map(|live_kit| live_kit.room.publish_video_track(track))
-                    })?
-                    .ok_or_else(|| anyhow!("live-kit was not initialized"))?
-                    .await
-            };
-
-            let publication = publish_track.await;
-            this.upgrade()
-                .ok_or_else(|| anyhow!("room was dropped"))?
-                .update(&mut cx, |this, cx| {
-                    let live_kit = this
-                        .live_kit
-                        .as_mut()
-                        .ok_or_else(|| anyhow!("live-kit was not initialized"))?;
-
-                    let (canceled, muted) = if let LocalTrack::Pending {
-                        publish_id: cur_publish_id,
-                        muted,
-                    } = &live_kit.screen_track
-                    {
-                        (*cur_publish_id != publish_id, *muted)
-                    } else {
-                        (true, false)
-                    };
-
-                    match publication {
-                        Ok(publication) => {
-                            if canceled {
-                                live_kit.room.unpublish_track(publication);
-                            } else {
-                                if muted {
-                                    cx.background_executor()
-                                        .spawn(publication.set_mute(muted))
-                                        .detach();
-                                }
-                                live_kit.screen_track = LocalTrack::Published {
-                                    track_publication: publication,
-                                    muted,
-                                };
-                                cx.notify();
-                            }
-
-                            Audio::play_sound(Sound::StartScreenshare, cx);
-
-                            Ok(())
-                        }
-                        Err(error) => {
-                            if canceled {
-                                Ok(())
-                            } else {
-                                live_kit.screen_track = LocalTrack::None;
-                                cx.notify();
-                                Err(error)
-                            }
-                        }
-                    }
-                })?
-        })
-    }
-
-    pub fn toggle_mute(&mut self, cx: &mut ModelContext<Self>) -> Result<Task<Result<()>>> {
-        let should_mute = !self.is_muted(cx);
-        if let Some(live_kit) = self.live_kit.as_mut() {
-            if matches!(live_kit.microphone_track, LocalTrack::None) {
-                return Ok(self.share_microphone(cx));
-            }
-
-            let (ret_task, old_muted) = live_kit.set_mute(should_mute, cx)?;
-            live_kit.muted_by_user = should_mute;
-
-            if old_muted == true && live_kit.deafened == true {
-                if let Some(task) = self.toggle_deafen(cx).ok() {
-                    task.detach();
-                }
-            }
-
-            Ok(ret_task)
-        } else {
-            Err(anyhow!("LiveKit not started"))
-        }
-    }
-
-    pub fn toggle_deafen(&mut self, cx: &mut ModelContext<Self>) -> Result<Task<Result<()>>> {
-        if let Some(live_kit) = self.live_kit.as_mut() {
-            (*live_kit).deafened = !live_kit.deafened;
-
-            let mut tasks = Vec::with_capacity(self.remote_participants.len());
-            // Context notification is sent within set_mute itself.
-            let mut mute_task = None;
-            // When deafening, mute user's mic as well.
-            // When undeafening, unmute user's mic unless it was manually muted prior to deafening.
-            if live_kit.deafened || !live_kit.muted_by_user {
-                mute_task = Some(live_kit.set_mute(live_kit.deafened, cx)?.0);
-            };
-            for participant in self.remote_participants.values() {
-                for track in live_kit
-                    .room
-                    .remote_audio_track_publications(&participant.user.id.to_string())
-                {
-                    let deafened = live_kit.deafened;
-                    tasks.push(cx.foreground_executor().spawn(track.set_enabled(!deafened)));
-                }
-            }
-
-            Ok(cx.foreground_executor().spawn(async move {
-                if let Some(mute_task) = mute_task {
-                    mute_task.await?;
-                }
-                for task in tasks {
-                    task.await?;
-                }
-                Ok(())
-            }))
-        } else {
-            Err(anyhow!("LiveKit not started"))
-        }
-    }
-
-    pub fn unshare_screen(&mut self, cx: &mut ModelContext<Self>) -> Result<()> {
-        if self.status.is_offline() {
-            return Err(anyhow!("room is offline"));
-        }
-
-        let live_kit = self
-            .live_kit
-            .as_mut()
-            .ok_or_else(|| anyhow!("live-kit was not initialized"))?;
-        match mem::take(&mut live_kit.screen_track) {
-            LocalTrack::None => Err(anyhow!("screen was not shared")),
-            LocalTrack::Pending { .. } => {
-                cx.notify();
-                Ok(())
-            }
-            LocalTrack::Published {
-                track_publication, ..
-            } => {
-                live_kit.room.unpublish_track(track_publication);
-                cx.notify();
-
-                Audio::play_sound(Sound::StopScreenshare, cx);
-                Ok(())
-            }
-        }
-    }
-
-    #[cfg(any(test, feature = "test-support"))]
-    pub fn set_display_sources(&self, sources: Vec<live_kit_client::MacOSDisplay>) {
-        self.live_kit
-            .as_ref()
-            .unwrap()
-            .room
-            .set_display_sources(sources);
-    }
-}
-
-struct LiveKitRoom {
-    room: Arc<live_kit_client::Room>,
-    screen_track: LocalTrack,
-    microphone_track: LocalTrack,
-    /// Tracks whether we're currently in a muted state due to auto-mute from deafening or manual mute performed by user.
-    muted_by_user: bool,
-    deafened: bool,
-    speaking: bool,
-    next_publish_id: usize,
-    _maintain_room: Task<()>,
-    _maintain_tracks: [Task<()>; 2],
-}
-
-impl LiveKitRoom {
-    fn set_mute(
-        self: &mut LiveKitRoom,
-        should_mute: bool,
-        cx: &mut ModelContext<Room>,
-    ) -> Result<(Task<Result<()>>, bool)> {
-        if !should_mute {
-            // clear user muting state.
-            self.muted_by_user = false;
-        }
-
-        let (result, old_muted) = match &mut self.microphone_track {
-            LocalTrack::None => Err(anyhow!("microphone was not shared")),
-            LocalTrack::Pending { muted, .. } => {
-                let old_muted = *muted;
-                *muted = should_mute;
-                cx.notify();
-                Ok((Task::Ready(Some(Ok(()))), old_muted))
-            }
-            LocalTrack::Published {
-                track_publication,
-                muted,
-            } => {
-                let old_muted = *muted;
-                *muted = should_mute;
-                cx.notify();
-                Ok((
-                    cx.background_executor()
-                        .spawn(track_publication.set_mute(*muted)),
-                    old_muted,
-                ))
-            }
-        }?;
-
-        if old_muted != should_mute {
-            if should_mute {
-                Audio::play_sound(Sound::Mute, cx);
-            } else {
-                Audio::play_sound(Sound::Unmute, cx);
-            }
-        }
-
-        Ok((result, old_muted))
-    }
-}
-
-enum LocalTrack {
-    None,
-    Pending {
-        publish_id: usize,
-        muted: bool,
-    },
-    Published {
-        track_publication: LocalTrackPublication,
-        muted: bool,
-    },
-}
-
-impl Default for LocalTrack {
-    fn default() -> Self {
-        Self::None
-    }
-}
-
-#[derive(Copy, Clone, PartialEq, Eq)]
-pub enum RoomStatus {
-    Online,
-    Rejoining,
-    Offline,
-}
-
-impl RoomStatus {
-    pub fn is_offline(&self) -> bool {
-        matches!(self, RoomStatus::Offline)
-    }
-
-    pub fn is_online(&self) -> bool {
-        matches!(self, RoomStatus::Online)
-    }
-}

crates/channel/Cargo.toml 🔗

@@ -14,13 +14,13 @@ test-support = ["collections/test-support", "gpui/test-support", "rpc/test-suppo
 [dependencies]
 client = { path = "../client" }
 collections = { path = "../collections" }
-db = { path = "../db" }
-gpui = { path = "../gpui" }
+db = { package = "db2", path = "../db2" }
+gpui = { package = "gpui2", path = "../gpui2" }
 util = { path = "../util" }
-rpc = { path = "../rpc" }
-text = { path = "../text" }
+rpc = { package = "rpc2", path = "../rpc2" }
+text = { package = "text2", path = "../text2" }
 language = { path = "../language" }
-settings = { path = "../settings" }
+settings = { package = "settings2", path = "../settings2" }
 feature_flags = { path = "../feature_flags" }
 sum_tree = { path = "../sum_tree" }
 clock = { path = "../clock" }
@@ -47,8 +47,8 @@ tempfile = "3"
 
 [dev-dependencies]
 collections = { path = "../collections", features = ["test-support"] }
-gpui = { path = "../gpui", features = ["test-support"] }
-rpc = { path = "../rpc", features = ["test-support"] }
+gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
+rpc = { package = "rpc2", path = "../rpc2", features = ["test-support"] }
 client = { path = "../client", features = ["test-support"] }
-settings = { path = "../settings", features = ["test-support"] }
+settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
 util = { path = "../util", features = ["test-support"] }

crates/channel/src/channel.rs 🔗

@@ -3,7 +3,7 @@ mod channel_chat;
 mod channel_store;
 
 use client::{Client, UserStore};
-use gpui::{AppContext, ModelHandle};
+use gpui::{AppContext, Model};
 use std::sync::Arc;
 
 pub use channel_buffer::{ChannelBuffer, ChannelBufferEvent, ACKNOWLEDGE_DEBOUNCE_INTERVAL};
@@ -16,7 +16,7 @@ pub use channel_store::{Channel, ChannelEvent, ChannelId, ChannelMembership, Cha
 #[cfg(test)]
 mod channel_store_tests;
 
-pub fn init(client: &Arc<Client>, user_store: ModelHandle<UserStore>, cx: &mut AppContext) {
+pub fn init(client: &Arc<Client>, user_store: Model<UserStore>, cx: &mut AppContext) {
     channel_store::init(client, user_store, cx);
     channel_buffer::init(client);
     channel_chat::init(client);

crates/channel/src/channel_buffer.rs 🔗

@@ -2,7 +2,7 @@ use crate::{Channel, ChannelId, ChannelStore};
 use anyhow::Result;
 use client::{Client, Collaborator, UserStore};
 use collections::HashMap;
-use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task};
+use gpui::{AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, Task};
 use language::proto::serialize_version;
 use rpc::{
     proto::{self, PeerId},
@@ -22,9 +22,9 @@ pub struct ChannelBuffer {
     pub channel_id: ChannelId,
     connected: bool,
     collaborators: HashMap<PeerId, Collaborator>,
-    user_store: ModelHandle<UserStore>,
-    channel_store: ModelHandle<ChannelStore>,
-    buffer: ModelHandle<language::Buffer>,
+    user_store: Model<UserStore>,
+    channel_store: Model<ChannelStore>,
+    buffer: Model<language::Buffer>,
     buffer_epoch: u64,
     client: Arc<Client>,
     subscription: Option<client::Subscription>,
@@ -38,31 +38,16 @@ pub enum ChannelBufferEvent {
     ChannelChanged,
 }
 
-impl Entity for ChannelBuffer {
-    type Event = ChannelBufferEvent;
-
-    fn release(&mut self, _: &mut AppContext) {
-        if self.connected {
-            if let Some(task) = self.acknowledge_task.take() {
-                task.detach();
-            }
-            self.client
-                .send(proto::LeaveChannelBuffer {
-                    channel_id: self.channel_id,
-                })
-                .log_err();
-        }
-    }
-}
+impl EventEmitter<ChannelBufferEvent> for ChannelBuffer {}
 
 impl ChannelBuffer {
     pub(crate) async fn new(
         channel: Arc<Channel>,
         client: Arc<Client>,
-        user_store: ModelHandle<UserStore>,
-        channel_store: ModelHandle<ChannelStore>,
+        user_store: Model<UserStore>,
+        channel_store: Model<ChannelStore>,
         mut cx: AsyncAppContext,
-    ) -> Result<ModelHandle<Self>> {
+    ) -> Result<Model<Self>> {
         let response = client
             .request(proto::JoinChannelBuffer {
                 channel_id: channel.id,
@@ -76,16 +61,16 @@ impl ChannelBuffer {
             .map(language::proto::deserialize_operation)
             .collect::<Result<Vec<_>, _>>()?;
 
-        let buffer = cx.add_model(|_| {
+        let buffer = cx.new_model(|_| {
             language::Buffer::remote(response.buffer_id, response.replica_id as u16, base_text)
-        });
-        buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))?;
+        })?;
+        buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))??;
 
         let subscription = client.subscribe_to_entity(channel.id)?;
 
-        anyhow::Ok(cx.add_model(|cx| {
+        anyhow::Ok(cx.new_model(|cx| {
             cx.subscribe(&buffer, Self::on_buffer_update).detach();
-
+            cx.on_release(Self::release).detach();
             let mut this = Self {
                 buffer,
                 buffer_epoch: response.epoch,
@@ -100,14 +85,27 @@ impl ChannelBuffer {
             };
             this.replace_collaborators(response.collaborators, cx);
             this
-        }))
+        })?)
+    }
+
+    fn release(&mut self, _: &mut AppContext) {
+        if self.connected {
+            if let Some(task) = self.acknowledge_task.take() {
+                task.detach();
+            }
+            self.client
+                .send(proto::LeaveChannelBuffer {
+                    channel_id: self.channel_id,
+                })
+                .log_err();
+        }
     }
 
     pub fn remote_id(&self, cx: &AppContext) -> u64 {
         self.buffer.read(cx).remote_id()
     }
 
-    pub fn user_store(&self) -> &ModelHandle<UserStore> {
+    pub fn user_store(&self) -> &Model<UserStore> {
         &self.user_store
     }
 
@@ -136,7 +134,7 @@ impl ChannelBuffer {
     }
 
     async fn handle_update_channel_buffer(
-        this: ModelHandle<Self>,
+        this: Model<Self>,
         update_channel_buffer: TypedEnvelope<proto::UpdateChannelBuffer>,
         _: Arc<Client>,
         mut cx: AsyncAppContext,
@@ -152,13 +150,13 @@ impl ChannelBuffer {
             cx.notify();
             this.buffer
                 .update(cx, |buffer, cx| buffer.apply_ops(ops, cx))
-        })?;
+        })??;
 
         Ok(())
     }
 
     async fn handle_update_channel_buffer_collaborators(
-        this: ModelHandle<Self>,
+        this: Model<Self>,
         message: TypedEnvelope<proto::UpdateChannelBufferCollaborators>,
         _: Arc<Client>,
         mut cx: AsyncAppContext,
@@ -167,14 +165,12 @@ impl ChannelBuffer {
             this.replace_collaborators(message.payload.collaborators, cx);
             cx.emit(ChannelBufferEvent::CollaboratorsChanged);
             cx.notify();
-        });
-
-        Ok(())
+        })
     }
 
     fn on_buffer_update(
         &mut self,
-        _: ModelHandle<language::Buffer>,
+        _: Model<language::Buffer>,
         event: &language::Event,
         cx: &mut ModelContext<Self>,
     ) {
@@ -202,8 +198,10 @@ impl ChannelBuffer {
         let client = self.client.clone();
         let epoch = self.epoch();
 
-        self.acknowledge_task = Some(cx.spawn_weak(|_, cx| async move {
-            cx.background().timer(ACKNOWLEDGE_DEBOUNCE_INTERVAL).await;
+        self.acknowledge_task = Some(cx.spawn(move |_, cx| async move {
+            cx.background_executor()
+                .timer(ACKNOWLEDGE_DEBOUNCE_INTERVAL)
+                .await;
             client
                 .send(proto::AckBufferOperation {
                     buffer_id,
@@ -219,7 +217,7 @@ impl ChannelBuffer {
         self.buffer_epoch
     }
 
-    pub fn buffer(&self) -> ModelHandle<language::Buffer> {
+    pub fn buffer(&self) -> Model<language::Buffer> {
         self.buffer.clone()
     }
 

crates/channel/src/channel_chat.rs 🔗

@@ -6,7 +6,7 @@ use client::{
     Client, Subscription, TypedEnvelope, UserId,
 };
 use futures::lock::Mutex;
-use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task};
+use gpui::{AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, Task};
 use rand::prelude::*;
 use std::{
     collections::HashSet,
@@ -22,11 +22,11 @@ pub struct ChannelChat {
     pub channel_id: ChannelId,
     messages: SumTree<ChannelMessage>,
     acknowledged_message_ids: HashSet<u64>,
-    channel_store: ModelHandle<ChannelStore>,
+    channel_store: Model<ChannelStore>,
     loaded_all_messages: bool,
     last_acknowledged_id: Option<u64>,
     next_pending_message_id: usize,
-    user_store: ModelHandle<UserStore>,
+    user_store: Model<UserStore>,
     rpc: Arc<Client>,
     outgoing_messages_lock: Arc<Mutex<()>>,
     rng: StdRng,
@@ -76,31 +76,20 @@ pub enum ChannelChatEvent {
     },
 }
 
+impl EventEmitter<ChannelChatEvent> for ChannelChat {}
 pub fn init(client: &Arc<Client>) {
     client.add_model_message_handler(ChannelChat::handle_message_sent);
     client.add_model_message_handler(ChannelChat::handle_message_removed);
 }
 
-impl Entity for ChannelChat {
-    type Event = ChannelChatEvent;
-
-    fn release(&mut self, _: &mut AppContext) {
-        self.rpc
-            .send(proto::LeaveChannelChat {
-                channel_id: self.channel_id,
-            })
-            .log_err();
-    }
-}
-
 impl ChannelChat {
     pub async fn new(
         channel: Arc<Channel>,
-        channel_store: ModelHandle<ChannelStore>,
-        user_store: ModelHandle<UserStore>,
+        channel_store: Model<ChannelStore>,
+        user_store: Model<UserStore>,
         client: Arc<Client>,
         mut cx: AsyncAppContext,
-    ) -> Result<ModelHandle<Self>> {
+    ) -> Result<Model<Self>> {
         let channel_id = channel.id;
         let subscription = client.subscribe_to_entity(channel_id).unwrap();
 
@@ -110,7 +99,8 @@ impl ChannelChat {
         let messages = messages_from_proto(response.messages, &user_store, &mut cx).await?;
         let loaded_all_messages = response.done;
 
-        Ok(cx.add_model(|cx| {
+        Ok(cx.new_model(|cx| {
+            cx.on_release(Self::release).detach();
             let mut this = Self {
                 channel_id: channel.id,
                 user_store,
@@ -127,7 +117,15 @@ impl ChannelChat {
             };
             this.insert_messages(messages, cx);
             this
-        }))
+        })?)
+    }
+
+    fn release(&mut self, _: &mut AppContext) {
+        self.rpc
+            .send(proto::LeaveChannelChat {
+                channel_id: self.channel_id,
+            })
+            .log_err();
     }
 
     pub fn channel(&self, cx: &AppContext) -> Option<Arc<Channel>> {
@@ -176,7 +174,7 @@ impl ChannelChat {
         let user_store = self.user_store.clone();
         let rpc = self.rpc.clone();
         let outgoing_messages_lock = self.outgoing_messages_lock.clone();
-        Ok(cx.spawn(|this, mut cx| async move {
+        Ok(cx.spawn(move |this, mut cx| async move {
             let outgoing_message_guard = outgoing_messages_lock.lock().await;
             let request = rpc.request(proto::SendChannelMessage {
                 channel_id,
@@ -191,8 +189,8 @@ impl ChannelChat {
             let message = ChannelMessage::from_proto(response, &user_store, &mut cx).await?;
             this.update(&mut cx, |this, cx| {
                 this.insert_messages(SumTree::from_item(message, &()), cx);
-                Ok(id)
-            })
+            })?;
+            Ok(id)
         }))
     }
 
@@ -201,13 +199,12 @@ impl ChannelChat {
             channel_id: self.channel_id,
             message_id: id,
         });
-        cx.spawn(|this, mut cx| async move {
+        cx.spawn(move |this, mut cx| async move {
             response.await?;
-
             this.update(&mut cx, |this, cx| {
                 this.message_removed(id, cx);
-                Ok(())
-            })
+            })?;
+            Ok(())
         })
     }
 
@@ -220,7 +217,7 @@ impl ChannelChat {
         let user_store = self.user_store.clone();
         let channel_id = self.channel_id;
         let before_message_id = self.first_loaded_message_id()?;
-        Some(cx.spawn(|this, mut cx| {
+        Some(cx.spawn(move |this, mut cx| {
             async move {
                 let response = rpc
                     .request(proto::GetChannelMessages {
@@ -233,7 +230,7 @@ impl ChannelChat {
                 this.update(&mut cx, |this, cx| {
                     this.loaded_all_messages = loaded_all_messages;
                     this.insert_messages(messages, cx);
-                });
+                })?;
                 anyhow::Ok(())
             }
             .log_err()
@@ -251,31 +248,33 @@ impl ChannelChat {
     ///
     /// For now, we always maintain a suffix of the channel's messages.
     pub async fn load_history_since_message(
-        chat: ModelHandle<Self>,
+        chat: Model<Self>,
         message_id: u64,
         mut cx: AsyncAppContext,
     ) -> Option<usize> {
         loop {
-            let step = chat.update(&mut cx, |chat, cx| {
-                if let Some(first_id) = chat.first_loaded_message_id() {
-                    if first_id <= message_id {
-                        let mut cursor = chat.messages.cursor::<(ChannelMessageId, Count)>();
-                        let message_id = ChannelMessageId::Saved(message_id);
-                        cursor.seek(&message_id, Bias::Left, &());
-                        return ControlFlow::Break(
-                            if cursor
-                                .item()
-                                .map_or(false, |message| message.id == message_id)
-                            {
-                                Some(cursor.start().1 .0)
-                            } else {
-                                None
-                            },
-                        );
+            let step = chat
+                .update(&mut cx, |chat, cx| {
+                    if let Some(first_id) = chat.first_loaded_message_id() {
+                        if first_id <= message_id {
+                            let mut cursor = chat.messages.cursor::<(ChannelMessageId, Count)>();
+                            let message_id = ChannelMessageId::Saved(message_id);
+                            cursor.seek(&message_id, Bias::Left, &());
+                            return ControlFlow::Break(
+                                if cursor
+                                    .item()
+                                    .map_or(false, |message| message.id == message_id)
+                                {
+                                    Some(cursor.start().1 .0)
+                                } else {
+                                    None
+                                },
+                            );
+                        }
                     }
-                }
-                ControlFlow::Continue(chat.load_more_messages(cx))
-            });
+                    ControlFlow::Continue(chat.load_more_messages(cx))
+                })
+                .log_err()?;
             match step {
                 ControlFlow::Break(ix) => return ix,
                 ControlFlow::Continue(task) => task?.await?,
@@ -307,7 +306,7 @@ impl ChannelChat {
         let user_store = self.user_store.clone();
         let rpc = self.rpc.clone();
         let channel_id = self.channel_id;
-        cx.spawn(|this, mut cx| {
+        cx.spawn(move |this, mut cx| {
             async move {
                 let response = rpc.request(proto::JoinChannelChat { channel_id }).await?;
                 let messages = messages_from_proto(response.messages, &user_store, &mut cx).await?;
@@ -333,7 +332,7 @@ impl ChannelChat {
                     }
 
                     this.pending_messages().cloned().collect::<Vec<_>>()
-                });
+                })?;
 
                 for pending_message in pending_messages {
                     let request = rpc.request(proto::SendChannelMessage {
@@ -351,7 +350,7 @@ impl ChannelChat {
                     .await?;
                     this.update(&mut cx, |this, cx| {
                         this.insert_messages(SumTree::from_item(message, &()), cx);
-                    });
+                    })?;
                 }
 
                 anyhow::Ok(())
@@ -399,12 +398,12 @@ impl ChannelChat {
     }
 
     async fn handle_message_sent(
-        this: ModelHandle<Self>,
+        this: Model<Self>,
         message: TypedEnvelope<proto::ChannelMessageSent>,
         _: Arc<Client>,
         mut cx: AsyncAppContext,
     ) -> Result<()> {
-        let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
+        let user_store = this.update(&mut cx, |this, _| this.user_store.clone())?;
         let message = message
             .payload
             .message
@@ -418,20 +417,20 @@ impl ChannelChat {
                 channel_id: this.channel_id,
                 message_id,
             })
-        });
+        })?;
 
         Ok(())
     }
 
     async fn handle_message_removed(
-        this: ModelHandle<Self>,
+        this: Model<Self>,
         message: TypedEnvelope<proto::RemoveChannelMessage>,
         _: Arc<Client>,
         mut cx: AsyncAppContext,
     ) -> Result<()> {
         this.update(&mut cx, |this, cx| {
             this.message_removed(message.payload.message_id, cx)
-        });
+        })?;
         Ok(())
     }
 
@@ -515,7 +514,7 @@ impl ChannelChat {
 
 async fn messages_from_proto(
     proto_messages: Vec<proto::ChannelMessage>,
-    user_store: &ModelHandle<UserStore>,
+    user_store: &Model<UserStore>,
     cx: &mut AsyncAppContext,
 ) -> Result<SumTree<ChannelMessage>> {
     let messages = ChannelMessage::from_proto_vec(proto_messages, user_store, cx).await?;
@@ -527,13 +526,13 @@ async fn messages_from_proto(
 impl ChannelMessage {
     pub async fn from_proto(
         message: proto::ChannelMessage,
-        user_store: &ModelHandle<UserStore>,
+        user_store: &Model<UserStore>,
         cx: &mut AsyncAppContext,
     ) -> Result<Self> {
         let sender = user_store
             .update(cx, |user_store, cx| {
                 user_store.get_user(message.sender_id, cx)
-            })
+            })?
             .await?;
         Ok(ChannelMessage {
             id: ChannelMessageId::Saved(message.id),
@@ -561,7 +560,7 @@ impl ChannelMessage {
 
     pub async fn from_proto_vec(
         proto_messages: Vec<proto::ChannelMessage>,
-        user_store: &ModelHandle<UserStore>,
+        user_store: &Model<UserStore>,
         cx: &mut AsyncAppContext,
     ) -> Result<Vec<Self>> {
         let unique_user_ids = proto_messages
@@ -573,7 +572,7 @@ impl ChannelMessage {
         user_store
             .update(cx, |user_store, cx| {
                 user_store.get_users(unique_user_ids, cx)
-            })
+            })?
             .await?;
 
         let mut messages = Vec::with_capacity(proto_messages.len());

crates/channel/src/channel_store.rs 🔗

@@ -7,17 +7,20 @@ use client::{Client, Subscription, User, UserId, UserStore};
 use collections::{hash_map, HashMap, HashSet};
 use db::RELEASE_CHANNEL;
 use futures::{channel::mpsc, future::Shared, Future, FutureExt, StreamExt};
-use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task, WeakModelHandle};
+use gpui::{
+    AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, SharedString, Task,
+    WeakModel,
+};
 use rpc::{
     proto::{self, ChannelVisibility},
     TypedEnvelope,
 };
 use std::{mem, sync::Arc, time::Duration};
-use util::ResultExt;
+use util::{async_maybe, ResultExt};
 
-pub fn init(client: &Arc<Client>, user_store: ModelHandle<UserStore>, cx: &mut AppContext) {
+pub fn init(client: &Arc<Client>, user_store: Model<UserStore>, cx: &mut AppContext) {
     let channel_store =
-        cx.add_model(|cx| ChannelStore::new(client.clone(), user_store.clone(), cx));
+        cx.new_model(|cx| ChannelStore::new(client.clone(), user_store.clone(), cx));
     cx.set_global(channel_store);
 }
 
@@ -34,7 +37,7 @@ pub struct ChannelStore {
     opened_buffers: HashMap<ChannelId, OpenedModelHandle<ChannelBuffer>>,
     opened_chats: HashMap<ChannelId, OpenedModelHandle<ChannelChat>>,
     client: Arc<Client>,
-    user_store: ModelHandle<UserStore>,
+    user_store: Model<UserStore>,
     _rpc_subscription: Subscription,
     _watch_connection_status: Task<Option<()>>,
     disconnect_channel_buffers_task: Option<Task<()>>,
@@ -44,7 +47,7 @@ pub struct ChannelStore {
 #[derive(Clone, Debug, PartialEq)]
 pub struct Channel {
     pub id: ChannelId,
-    pub name: String,
+    pub name: SharedString,
     pub visibility: proto::ChannelVisibility,
     pub role: proto::ChannelRole,
     pub unseen_note_version: Option<(u64, clock::Global)>,
@@ -112,44 +115,45 @@ pub enum ChannelEvent {
     ChannelRenamed(ChannelId),
 }
 
-impl Entity for ChannelStore {
-    type Event = ChannelEvent;
-}
+impl EventEmitter<ChannelEvent> for ChannelStore {}
 
-enum OpenedModelHandle<E: Entity> {
-    Open(WeakModelHandle<E>),
-    Loading(Shared<Task<Result<ModelHandle<E>, Arc<anyhow::Error>>>>),
+enum OpenedModelHandle<E> {
+    Open(WeakModel<E>),
+    Loading(Shared<Task<Result<Model<E>, Arc<anyhow::Error>>>>),
 }
 
 impl ChannelStore {
-    pub fn global(cx: &AppContext) -> ModelHandle<Self> {
-        cx.global::<ModelHandle<Self>>().clone()
+    pub fn global(cx: &AppContext) -> Model<Self> {
+        cx.global::<Model<Self>>().clone()
     }
 
     pub fn new(
         client: Arc<Client>,
-        user_store: ModelHandle<UserStore>,
+        user_store: Model<UserStore>,
         cx: &mut ModelContext<Self>,
     ) -> Self {
         let rpc_subscription =
-            client.add_message_handler(cx.handle(), Self::handle_update_channels);
+            client.add_message_handler(cx.weak_model(), Self::handle_update_channels);
 
         let mut connection_status = client.status();
         let (update_channels_tx, mut update_channels_rx) = mpsc::unbounded();
-        let watch_connection_status = cx.spawn_weak(|this, mut cx| async move {
+        let watch_connection_status = cx.spawn(|this, mut cx| async move {
             while let Some(status) = connection_status.next().await {
-                let this = this.upgrade(&cx)?;
+                let this = this.upgrade()?;
                 match status {
                     client::Status::Connected { .. } => {
                         this.update(&mut cx, |this, cx| this.handle_connect(cx))
+                            .ok()?
                             .await
                             .log_err()?;
                     }
                     client::Status::SignedOut | client::Status::UpgradeRequired => {
-                        this.update(&mut cx, |this, cx| this.handle_disconnect(false, cx));
+                        this.update(&mut cx, |this, cx| this.handle_disconnect(false, cx))
+                            .ok();
                     }
                     _ => {
-                        this.update(&mut cx, |this, cx| this.handle_disconnect(true, cx));
+                        this.update(&mut cx, |this, cx| this.handle_disconnect(true, cx))
+                            .ok();
                     }
                 }
             }
@@ -169,17 +173,22 @@ impl ChannelStore {
             _rpc_subscription: rpc_subscription,
             _watch_connection_status: watch_connection_status,
             disconnect_channel_buffers_task: None,
-            _update_channels: cx.spawn_weak(|this, mut cx| async move {
-                while let Some(update_channels) = update_channels_rx.next().await {
-                    if let Some(this) = this.upgrade(&cx) {
-                        let update_task = this.update(&mut cx, |this, cx| {
-                            this.update_channels(update_channels, cx)
-                        });
-                        if let Some(update_task) = update_task {
-                            update_task.await.log_err();
+            _update_channels: cx.spawn(|this, mut cx| async move {
+                async_maybe!({
+                    while let Some(update_channels) = update_channels_rx.next().await {
+                        if let Some(this) = this.upgrade() {
+                            let update_task = this.update(&mut cx, |this, cx| {
+                                this.update_channels(update_channels, cx)
+                            })?;
+                            if let Some(update_task) = update_task {
+                                update_task.await.log_err();
+                            }
                         }
                     }
-                }
+                    anyhow::Ok(())
+                })
+                .await
+                .log_err();
             }),
         }
     }
@@ -240,10 +249,10 @@ impl ChannelStore {
         self.channel_index.by_id().get(&channel_id)
     }
 
-    pub fn has_open_channel_buffer(&self, channel_id: ChannelId, cx: &AppContext) -> bool {
+    pub fn has_open_channel_buffer(&self, channel_id: ChannelId, _cx: &AppContext) -> bool {
         if let Some(buffer) = self.opened_buffers.get(&channel_id) {
             if let OpenedModelHandle::Open(buffer) = buffer {
-                return buffer.upgrade(cx).is_some();
+                return buffer.upgrade().is_some();
             }
         }
         false
@@ -253,7 +262,7 @@ impl ChannelStore {
         &mut self,
         channel_id: ChannelId,
         cx: &mut ModelContext<Self>,
-    ) -> Task<Result<ModelHandle<ChannelBuffer>>> {
+    ) -> Task<Result<Model<ChannelBuffer>>> {
         let client = self.client.clone();
         let user_store = self.user_store.clone();
         let channel_store = cx.handle();
@@ -278,13 +287,13 @@ impl ChannelStore {
                     .request(proto::GetChannelMessagesById { message_ids }),
             )
         };
-        cx.spawn_weak(|this, mut cx| async move {
+        cx.spawn(|this, mut cx| async move {
             if let Some(request) = request {
                 let response = request.await?;
                 let this = this
-                    .upgrade(&cx)
+                    .upgrade()
                     .ok_or_else(|| anyhow!("channel store dropped"))?;
-                let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
+                let user_store = this.update(&mut cx, |this, _| this.user_store.clone())?;
                 ChannelMessage::from_proto_vec(response.messages, &user_store, &mut cx).await
             } else {
                 Ok(Vec::new())
@@ -354,7 +363,7 @@ impl ChannelStore {
         &mut self,
         channel_id: ChannelId,
         cx: &mut ModelContext<Self>,
-    ) -> Task<Result<ModelHandle<ChannelChat>>> {
+    ) -> Task<Result<Model<ChannelChat>>> {
         let client = self.client.clone();
         let user_store = self.user_store.clone();
         let this = cx.handle();
@@ -371,22 +380,23 @@ impl ChannelStore {
     /// Make sure that the resource is only opened once, even if this method
     /// is called multiple times with the same channel id while the first task
     /// is still running.
-    fn open_channel_resource<T: Entity, F, Fut>(
+    fn open_channel_resource<T, F, Fut>(
         &mut self,
         channel_id: ChannelId,
         get_map: fn(&mut Self) -> &mut HashMap<ChannelId, OpenedModelHandle<T>>,
         load: F,
         cx: &mut ModelContext<Self>,
-    ) -> Task<Result<ModelHandle<T>>>
+    ) -> Task<Result<Model<T>>>
     where
         F: 'static + FnOnce(Arc<Channel>, AsyncAppContext) -> Fut,
-        Fut: Future<Output = Result<ModelHandle<T>>>,
+        Fut: Future<Output = Result<Model<T>>>,
+        T: 'static,
     {
         let task = loop {
             match get_map(self).entry(channel_id) {
                 hash_map::Entry::Occupied(e) => match e.get() {
                     OpenedModelHandle::Open(model) => {
-                        if let Some(model) = model.upgrade(cx) {
+                        if let Some(model) = model.upgrade() {
                             break Task::ready(Ok(model)).shared();
                         } else {
                             get_map(self).remove(&channel_id);
@@ -399,12 +409,12 @@ impl ChannelStore {
                 },
                 hash_map::Entry::Vacant(e) => {
                     let task = cx
-                        .spawn(|this, cx| async move {
-                            let channel = this.read_with(&cx, |this, _| {
+                        .spawn(move |this, mut cx| async move {
+                            let channel = this.update(&mut cx, |this, _| {
                                 this.channel_for_id(channel_id).cloned().ok_or_else(|| {
                                     Arc::new(anyhow!("no channel for id: {}", channel_id))
                                 })
-                            })?;
+                            })??;
 
                             load(channel, cx).await.map_err(Arc::new)
                         })
@@ -413,7 +423,7 @@ impl ChannelStore {
                     e.insert(OpenedModelHandle::Loading(task.clone()));
                     cx.spawn({
                         let task = task.clone();
-                        |this, mut cx| async move {
+                        move |this, mut cx| async move {
                             let result = task.await;
                             this.update(&mut cx, |this, _| match result {
                                 Ok(model) => {
@@ -425,7 +435,8 @@ impl ChannelStore {
                                 Err(_) => {
                                     get_map(this).remove(&channel_id);
                                 }
-                            });
+                            })
+                            .ok();
                         }
                     })
                     .detach();
@@ -433,7 +444,7 @@ impl ChannelStore {
                 }
             }
         };
-        cx.foreground()
+        cx.background_executor()
             .spawn(async move { task.await.map_err(|error| anyhow!("{}", error)) })
     }
 
@@ -458,7 +469,7 @@ impl ChannelStore {
     ) -> Task<Result<ChannelId>> {
         let client = self.client.clone();
         let name = name.trim_start_matches("#").to_owned();
-        cx.spawn(|this, mut cx| async move {
+        cx.spawn(move |this, mut cx| async move {
             let response = client
                 .request(proto::CreateChannel { name, parent_id })
                 .await?;
@@ -468,15 +479,6 @@ impl ChannelStore {
                 .ok_or_else(|| anyhow!("missing channel in response"))?;
             let channel_id = channel.id;
 
-            // let parent_edge = if let Some(parent_id) = parent_id {
-            //     vec![ChannelEdge {
-            //         channel_id: channel.id,
-            //         parent_id,
-            //     }]
-            // } else {
-            //     vec![]
-            // };
-
             this.update(&mut cx, |this, cx| {
                 let task = this.update_channels(
                     proto::UpdateChannels {
@@ -492,7 +494,7 @@ impl ChannelStore {
                 // will resolve before this flush_effects finishes. Synchronously emitting this event
                 // ensures that the collab panel will observe this creation before the frame completes
                 cx.emit(ChannelEvent::ChannelCreated(channel_id));
-            });
+            })?;
 
             Ok(channel_id)
         })
@@ -505,7 +507,7 @@ impl ChannelStore {
         cx: &mut ModelContext<Self>,
     ) -> Task<Result<()>> {
         let client = self.client.clone();
-        cx.spawn(|_, _| async move {
+        cx.spawn(move |_, _| async move {
             let _ = client
                 .request(proto::MoveChannel { channel_id, to })
                 .await?;
@@ -521,7 +523,7 @@ impl ChannelStore {
         cx: &mut ModelContext<Self>,
     ) -> Task<Result<()>> {
         let client = self.client.clone();
-        cx.spawn(|_, _| async move {
+        cx.spawn(move |_, _| async move {
             let _ = client
                 .request(proto::SetChannelVisibility {
                     channel_id,
@@ -546,7 +548,7 @@ impl ChannelStore {
 
         cx.notify();
         let client = self.client.clone();
-        cx.spawn(|this, mut cx| async move {
+        cx.spawn(move |this, mut cx| async move {
             let result = client
                 .request(proto::InviteChannelMember {
                     channel_id,
@@ -558,7 +560,7 @@ impl ChannelStore {
             this.update(&mut cx, |this, cx| {
                 this.outgoing_invites.remove(&(channel_id, user_id));
                 cx.notify();
-            });
+            })?;
 
             result?;
 
@@ -578,7 +580,7 @@ impl ChannelStore {
 
         cx.notify();
         let client = self.client.clone();
-        cx.spawn(|this, mut cx| async move {
+        cx.spawn(move |this, mut cx| async move {
             let result = client
                 .request(proto::RemoveChannelMember {
                     channel_id,
@@ -589,7 +591,7 @@ impl ChannelStore {
             this.update(&mut cx, |this, cx| {
                 this.outgoing_invites.remove(&(channel_id, user_id));
                 cx.notify();
-            });
+            })?;
             result?;
             Ok(())
         })
@@ -608,7 +610,7 @@ impl ChannelStore {
 
         cx.notify();
         let client = self.client.clone();
-        cx.spawn(|this, mut cx| async move {
+        cx.spawn(move |this, mut cx| async move {
             let result = client
                 .request(proto::SetChannelMemberRole {
                     channel_id,
@@ -620,7 +622,7 @@ impl ChannelStore {
             this.update(&mut cx, |this, cx| {
                 this.outgoing_invites.remove(&(channel_id, user_id));
                 cx.notify();
-            });
+            })?;
 
             result?;
             Ok(())
@@ -635,7 +637,7 @@ impl ChannelStore {
     ) -> Task<Result<()>> {
         let client = self.client.clone();
         let name = new_name.to_string();
-        cx.spawn(|this, mut cx| async move {
+        cx.spawn(move |this, mut cx| async move {
             let channel = client
                 .request(proto::RenameChannel { channel_id, name })
                 .await?
@@ -656,7 +658,7 @@ impl ChannelStore {
                 // will resolve before this flush_effects finishes. Synchronously emitting this event
                 // ensures that the collab panel will observe this creation before the frame complete
                 cx.emit(ChannelEvent::ChannelRenamed(channel_id))
-            });
+            })?;
             Ok(())
         })
     }
@@ -668,7 +670,7 @@ impl ChannelStore {
         cx: &mut ModelContext<Self>,
     ) -> Task<Result<()>> {
         let client = self.client.clone();
-        cx.background().spawn(async move {
+        cx.background_executor().spawn(async move {
             client
                 .request(proto::RespondToChannelInvite { channel_id, accept })
                 .await?;
@@ -683,17 +685,17 @@ impl ChannelStore {
     ) -> Task<Result<Vec<ChannelMembership>>> {
         let client = self.client.clone();
         let user_store = self.user_store.downgrade();
-        cx.spawn(|_, mut cx| async move {
+        cx.spawn(move |_, mut cx| async move {
             let response = client
                 .request(proto::GetChannelMembers { channel_id })
                 .await?;
 
             let user_ids = response.members.iter().map(|m| m.user_id).collect();
             let user_store = user_store
-                .upgrade(&cx)
+                .upgrade()
                 .ok_or_else(|| anyhow!("user store dropped"))?;
             let users = user_store
-                .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
+                .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))?
                 .await?;
 
             Ok(users
@@ -727,7 +729,7 @@ impl ChannelStore {
     }
 
     async fn handle_update_channels(
-        this: ModelHandle<Self>,
+        this: Model<Self>,
         message: TypedEnvelope<proto::UpdateChannels>,
         _: Arc<Client>,
         mut cx: AsyncAppContext,
@@ -736,7 +738,7 @@ impl ChannelStore {
             this.update_channels_tx
                 .unbounded_send(message.payload)
                 .unwrap();
-        });
+        })?;
         Ok(())
     }
 
@@ -750,7 +752,7 @@ impl ChannelStore {
 
         for chat in self.opened_chats.values() {
             if let OpenedModelHandle::Open(chat) = chat {
-                if let Some(chat) = chat.upgrade(cx) {
+                if let Some(chat) = chat.upgrade() {
                     chat.update(cx, |chat, cx| {
                         chat.rejoin(cx);
                     });
@@ -761,7 +763,7 @@ impl ChannelStore {
         let mut buffer_versions = Vec::new();
         for buffer in self.opened_buffers.values() {
             if let OpenedModelHandle::Open(buffer) = buffer {
-                if let Some(buffer) = buffer.upgrade(cx) {
+                if let Some(buffer) = buffer.upgrade() {
                     let channel_buffer = buffer.read(cx);
                     let buffer = channel_buffer.buffer().read(cx);
                     buffer_versions.push(proto::ChannelBufferVersion {
@@ -787,7 +789,7 @@ impl ChannelStore {
             this.update(&mut cx, |this, cx| {
                 this.opened_buffers.retain(|_, buffer| match buffer {
                     OpenedModelHandle::Open(channel_buffer) => {
-                        let Some(channel_buffer) = channel_buffer.upgrade(cx) else {
+                        let Some(channel_buffer) = channel_buffer.upgrade() else {
                             return false;
                         };
 
@@ -824,7 +826,7 @@ impl ChannelStore {
 
                                 if let Some(operations) = operations {
                                     let client = this.client.clone();
-                                    cx.background()
+                                    cx.background_executor()
                                         .spawn(async move {
                                             let operations = operations.await;
                                             for chunk in
@@ -849,7 +851,8 @@ impl ChannelStore {
                     }
                     OpenedModelHandle::Loading(_) => true,
                 });
-            });
+            })
+            .ok();
             anyhow::Ok(())
         })
     }
@@ -858,21 +861,22 @@ impl ChannelStore {
         cx.notify();
 
         self.disconnect_channel_buffers_task.get_or_insert_with(|| {
-            cx.spawn_weak(|this, mut cx| async move {
+            cx.spawn(move |this, mut cx| async move {
                 if wait_for_reconnect {
-                    cx.background().timer(RECONNECT_TIMEOUT).await;
+                    cx.background_executor().timer(RECONNECT_TIMEOUT).await;
                 }
 
-                if let Some(this) = this.upgrade(&cx) {
+                if let Some(this) = this.upgrade() {
                     this.update(&mut cx, |this, cx| {
                         for (_, buffer) in this.opened_buffers.drain() {
                             if let OpenedModelHandle::Open(buffer) = buffer {
-                                if let Some(buffer) = buffer.upgrade(cx) {
+                                if let Some(buffer) = buffer.upgrade() {
                                     buffer.update(cx, |buffer, cx| buffer.disconnect(cx));
                                 }
                             }
                         }
-                    });
+                    })
+                    .ok();
                 }
             })
         });
@@ -892,14 +896,16 @@ impl ChannelStore {
                 .channel_invitations
                 .binary_search_by_key(&channel.id, |c| c.id)
             {
-                Ok(ix) => Arc::make_mut(&mut self.channel_invitations[ix]).name = channel.name,
+                Ok(ix) => {
+                    Arc::make_mut(&mut self.channel_invitations[ix]).name = channel.name.into()
+                }
                 Err(ix) => self.channel_invitations.insert(
                     ix,
                     Arc::new(Channel {
                         id: channel.id,
                         visibility: channel.visibility(),
                         role: channel.role(),
-                        name: channel.name,
+                        name: channel.name.into(),
                         unseen_note_version: None,
                         unseen_message_id: None,
                         parent_path: channel.parent_path,
@@ -931,7 +937,7 @@ impl ChannelStore {
                     if let Some(OpenedModelHandle::Open(buffer)) =
                         self.opened_buffers.remove(&channel_id)
                     {
-                        if let Some(buffer) = buffer.upgrade(cx) {
+                        if let Some(buffer) = buffer.upgrade() {
                             buffer.update(cx, ChannelBuffer::disconnect);
                         }
                     }
@@ -945,7 +951,7 @@ impl ChannelStore {
 
                 if channel_changed {
                     if let Some(OpenedModelHandle::Open(buffer)) = self.opened_buffers.get(&id) {
-                        if let Some(buffer) = buffer.upgrade(cx) {
+                        if let Some(buffer) = buffer.upgrade() {
                             buffer.update(cx, ChannelBuffer::channel_changed);
                         }
                     }
@@ -1010,8 +1016,7 @@ impl ChannelStore {
                 }
 
                 cx.notify();
-            });
-            anyhow::Ok(())
+            })
         }))
     }
 }

crates/channel/src/channel_store/channel_index.rs 🔗

@@ -104,7 +104,7 @@ impl<'a> ChannelPathsInsertGuard<'a> {
 
             existing_channel.visibility = channel_proto.visibility();
             existing_channel.role = channel_proto.role();
-            existing_channel.name = channel_proto.name;
+            existing_channel.name = channel_proto.name.into();
         } else {
             self.channels_by_id.insert(
                 channel_proto.id,
@@ -112,7 +112,7 @@ impl<'a> ChannelPathsInsertGuard<'a> {
                     id: channel_proto.id,
                     visibility: channel_proto.visibility(),
                     role: channel_proto.role(),
-                    name: channel_proto.name,
+                    name: channel_proto.name.into(),
                     unseen_note_version: None,
                     unseen_message_id: None,
                     parent_path: channel_proto.parent_path,
@@ -146,11 +146,11 @@ fn channel_path_sorting_key<'a>(
     let (parent_path, name) = channels_by_id
         .get(&id)
         .map_or((&[] as &[_], None), |channel| {
-            (channel.parent_path.as_slice(), Some(channel.name.as_str()))
+            (channel.parent_path.as_slice(), Some(channel.name.as_ref()))
         });
     parent_path
         .iter()
-        .filter_map(|id| Some(channels_by_id.get(id)?.name.as_str()))
+        .filter_map(|id| Some(channels_by_id.get(id)?.name.as_ref()))
         .chain(name)
 }
 

crates/channel/src/channel_store_tests.rs 🔗

@@ -2,7 +2,7 @@ use crate::channel_chat::ChannelChatEvent;
 
 use super::*;
 use client::{test::FakeServer, Client, UserStore};
-use gpui::{AppContext, ModelHandle, TestAppContext};
+use gpui::{AppContext, Context, Model, TestAppContext};
 use rpc::proto::{self};
 use settings::SettingsStore;
 use util::http::FakeHttpClient;
@@ -147,7 +147,7 @@ async fn test_channel_messages(cx: &mut TestAppContext) {
     let user_id = 5;
     let channel_id = 5;
     let channel_store = cx.update(init_test);
-    let client = channel_store.read_with(cx, |s, _| s.client());
+    let client = channel_store.update(cx, |s, _| s.client());
     let server = FakeServer::for_client(user_id, &client, cx).await;
 
     // Get the available channels.
@@ -161,8 +161,8 @@ async fn test_channel_messages(cx: &mut TestAppContext) {
         }],
         ..Default::default()
     });
-    cx.foreground().run_until_parked();
-    cx.read(|cx| {
+    cx.executor().run_until_parked();
+    cx.update(|cx| {
         assert_channels(
             &channel_store,
             &[(0, "the-channel".to_string(), proto::ChannelRole::Member)],
@@ -214,7 +214,7 @@ async fn test_channel_messages(cx: &mut TestAppContext) {
         },
     );
 
-    cx.foreground().start_waiting();
+    cx.executor().start_waiting();
 
     // Client requests all users for the received messages
     let mut get_users = server.receive::<proto::GetUsers>().await.unwrap();
@@ -232,7 +232,7 @@ async fn test_channel_messages(cx: &mut TestAppContext) {
     );
 
     let channel = channel.await.unwrap();
-    channel.read_with(cx, |channel, _| {
+    channel.update(cx, |channel, _| {
         assert_eq!(
             channel
                 .messages_in_range(0..2)
@@ -273,13 +273,13 @@ async fn test_channel_messages(cx: &mut TestAppContext) {
     );
 
     assert_eq!(
-        channel.next_event(cx).await,
+        channel.next_event(cx),
         ChannelChatEvent::MessagesUpdated {
             old_range: 2..2,
             new_count: 1,
         }
     );
-    channel.read_with(cx, |channel, _| {
+    channel.update(cx, |channel, _| {
         assert_eq!(
             channel
                 .messages_in_range(2..3)
@@ -322,13 +322,13 @@ async fn test_channel_messages(cx: &mut TestAppContext) {
     );
 
     assert_eq!(
-        channel.next_event(cx).await,
+        channel.next_event(cx),
         ChannelChatEvent::MessagesUpdated {
             old_range: 0..0,
             new_count: 2,
         }
     );
-    channel.read_with(cx, |channel, _| {
+    channel.update(cx, |channel, _| {
         assert_eq!(
             channel
                 .messages_in_range(0..2)
@@ -342,13 +342,13 @@ async fn test_channel_messages(cx: &mut TestAppContext) {
     });
 }
 
-fn init_test(cx: &mut AppContext) -> ModelHandle<ChannelStore> {
+fn init_test(cx: &mut AppContext) -> Model<ChannelStore> {
     let http = FakeHttpClient::with_404_response();
     let client = Client::new(http.clone(), cx);
-    let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http, cx));
+    let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx));
 
-    cx.foreground().forbid_parking();
-    cx.set_global(SettingsStore::test(cx));
+    let settings_store = SettingsStore::test(cx);
+    cx.set_global(settings_store);
     client::init(&client, cx);
     crate::init(&client, user_store, cx);
 
@@ -356,7 +356,7 @@ fn init_test(cx: &mut AppContext) -> ModelHandle<ChannelStore> {
 }
 
 fn update_channels(
-    channel_store: &ModelHandle<ChannelStore>,
+    channel_store: &Model<ChannelStore>,
     message: proto::UpdateChannels,
     cx: &mut AppContext,
 ) {
@@ -366,11 +366,11 @@ fn update_channels(
 
 #[track_caller]
 fn assert_channels(
-    channel_store: &ModelHandle<ChannelStore>,
+    channel_store: &Model<ChannelStore>,
     expected_channels: &[(usize, String, proto::ChannelRole)],
-    cx: &AppContext,
+    cx: &mut AppContext,
 ) {
-    let actual = channel_store.read_with(cx, |store, _| {
+    let actual = channel_store.update(cx, |store, _| {
         store
             .ordered_channels()
             .map(|(depth, channel)| (depth, channel.name.to_string(), channel.role))

crates/channel2/Cargo.toml 🔗

@@ -1,54 +0,0 @@
-[package]
-name = "channel2"
-version = "0.1.0"
-edition = "2021"
-publish = false
-
-[lib]
-path = "src/channel2.rs"
-doctest = false
-
-[features]
-test-support = ["collections/test-support", "gpui/test-support", "rpc/test-support"]
-
-[dependencies]
-client = { package = "client2", path = "../client2" }
-collections = { path = "../collections" }
-db = { package = "db2", path = "../db2" }
-gpui = { package = "gpui2", path = "../gpui2" }
-util = { path = "../util" }
-rpc = { package = "rpc2", path = "../rpc2" }
-text = { package = "text2", path = "../text2" }
-language = { path = "../language" }
-settings = { package = "settings2", path = "../settings2" }
-feature_flags = { path = "../feature_flags" }
-sum_tree = { path = "../sum_tree" }
-clock = { path = "../clock" }
-
-anyhow.workspace = true
-futures.workspace = true
-image = "0.23"
-lazy_static.workspace = true
-smallvec.workspace = true
-log.workspace = true
-parking_lot.workspace = true
-postage.workspace = true
-rand.workspace = true
-schemars.workspace = true
-smol.workspace = true
-thiserror.workspace = true
-time.workspace = true
-tiny_http = "0.8"
-uuid.workspace = true
-url = "2.2"
-serde.workspace = true
-serde_derive.workspace = true
-tempfile = "3"
-
-[dev-dependencies]
-collections = { path = "../collections", features = ["test-support"] }
-gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
-rpc = { package = "rpc2", path = "../rpc2", features = ["test-support"] }
-client = { package = "client2", path = "../client2", features = ["test-support"] }
-settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
-util = { path = "../util", features = ["test-support"] }

crates/channel2/src/channel2.rs 🔗

@@ -1,23 +0,0 @@
-mod channel_buffer;
-mod channel_chat;
-mod channel_store;
-
-use client::{Client, UserStore};
-use gpui::{AppContext, Model};
-use std::sync::Arc;
-
-pub use channel_buffer::{ChannelBuffer, ChannelBufferEvent, ACKNOWLEDGE_DEBOUNCE_INTERVAL};
-pub use channel_chat::{
-    mentions_to_proto, ChannelChat, ChannelChatEvent, ChannelMessage, ChannelMessageId,
-    MessageParams,
-};
-pub use channel_store::{Channel, ChannelEvent, ChannelId, ChannelMembership, ChannelStore};
-
-#[cfg(test)]
-mod channel_store_tests;
-
-pub fn init(client: &Arc<Client>, user_store: Model<UserStore>, cx: &mut AppContext) {
-    channel_store::init(client, user_store, cx);
-    channel_buffer::init(client);
-    channel_chat::init(client);
-}

crates/channel2/src/channel_buffer.rs 🔗

@@ -1,257 +0,0 @@
-use crate::{Channel, ChannelId, ChannelStore};
-use anyhow::Result;
-use client::{Client, Collaborator, UserStore};
-use collections::HashMap;
-use gpui::{AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, Task};
-use language::proto::serialize_version;
-use rpc::{
-    proto::{self, PeerId},
-    TypedEnvelope,
-};
-use std::{sync::Arc, time::Duration};
-use util::ResultExt;
-
-pub const ACKNOWLEDGE_DEBOUNCE_INTERVAL: Duration = Duration::from_millis(250);
-
-pub(crate) fn init(client: &Arc<Client>) {
-    client.add_model_message_handler(ChannelBuffer::handle_update_channel_buffer);
-    client.add_model_message_handler(ChannelBuffer::handle_update_channel_buffer_collaborators);
-}
-
-pub struct ChannelBuffer {
-    pub channel_id: ChannelId,
-    connected: bool,
-    collaborators: HashMap<PeerId, Collaborator>,
-    user_store: Model<UserStore>,
-    channel_store: Model<ChannelStore>,
-    buffer: Model<language::Buffer>,
-    buffer_epoch: u64,
-    client: Arc<Client>,
-    subscription: Option<client::Subscription>,
-    acknowledge_task: Option<Task<Result<()>>>,
-}
-
-pub enum ChannelBufferEvent {
-    CollaboratorsChanged,
-    Disconnected,
-    BufferEdited,
-    ChannelChanged,
-}
-
-impl EventEmitter<ChannelBufferEvent> for ChannelBuffer {}
-
-impl ChannelBuffer {
-    pub(crate) async fn new(
-        channel: Arc<Channel>,
-        client: Arc<Client>,
-        user_store: Model<UserStore>,
-        channel_store: Model<ChannelStore>,
-        mut cx: AsyncAppContext,
-    ) -> Result<Model<Self>> {
-        let response = client
-            .request(proto::JoinChannelBuffer {
-                channel_id: channel.id,
-            })
-            .await?;
-
-        let base_text = response.base_text;
-        let operations = response
-            .operations
-            .into_iter()
-            .map(language::proto::deserialize_operation)
-            .collect::<Result<Vec<_>, _>>()?;
-
-        let buffer = cx.new_model(|_| {
-            language::Buffer::remote(response.buffer_id, response.replica_id as u16, base_text)
-        })?;
-        buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))??;
-
-        let subscription = client.subscribe_to_entity(channel.id)?;
-
-        anyhow::Ok(cx.new_model(|cx| {
-            cx.subscribe(&buffer, Self::on_buffer_update).detach();
-            cx.on_release(Self::release).detach();
-            let mut this = Self {
-                buffer,
-                buffer_epoch: response.epoch,
-                client,
-                connected: true,
-                collaborators: Default::default(),
-                acknowledge_task: None,
-                channel_id: channel.id,
-                subscription: Some(subscription.set_model(&cx.handle(), &mut cx.to_async())),
-                user_store,
-                channel_store,
-            };
-            this.replace_collaborators(response.collaborators, cx);
-            this
-        })?)
-    }
-
-    fn release(&mut self, _: &mut AppContext) {
-        if self.connected {
-            if let Some(task) = self.acknowledge_task.take() {
-                task.detach();
-            }
-            self.client
-                .send(proto::LeaveChannelBuffer {
-                    channel_id: self.channel_id,
-                })
-                .log_err();
-        }
-    }
-
-    pub fn remote_id(&self, cx: &AppContext) -> u64 {
-        self.buffer.read(cx).remote_id()
-    }
-
-    pub fn user_store(&self) -> &Model<UserStore> {
-        &self.user_store
-    }
-
-    pub(crate) fn replace_collaborators(
-        &mut self,
-        collaborators: Vec<proto::Collaborator>,
-        cx: &mut ModelContext<Self>,
-    ) {
-        let mut new_collaborators = HashMap::default();
-        for collaborator in collaborators {
-            if let Ok(collaborator) = Collaborator::from_proto(collaborator) {
-                new_collaborators.insert(collaborator.peer_id, collaborator);
-            }
-        }
-
-        for (_, old_collaborator) in &self.collaborators {
-            if !new_collaborators.contains_key(&old_collaborator.peer_id) {
-                self.buffer.update(cx, |buffer, cx| {
-                    buffer.remove_peer(old_collaborator.replica_id as u16, cx)
-                });
-            }
-        }
-        self.collaborators = new_collaborators;
-        cx.emit(ChannelBufferEvent::CollaboratorsChanged);
-        cx.notify();
-    }
-
-    async fn handle_update_channel_buffer(
-        this: Model<Self>,
-        update_channel_buffer: TypedEnvelope<proto::UpdateChannelBuffer>,
-        _: Arc<Client>,
-        mut cx: AsyncAppContext,
-    ) -> Result<()> {
-        let ops = update_channel_buffer
-            .payload
-            .operations
-            .into_iter()
-            .map(language::proto::deserialize_operation)
-            .collect::<Result<Vec<_>, _>>()?;
-
-        this.update(&mut cx, |this, cx| {
-            cx.notify();
-            this.buffer
-                .update(cx, |buffer, cx| buffer.apply_ops(ops, cx))
-        })??;
-
-        Ok(())
-    }
-
-    async fn handle_update_channel_buffer_collaborators(
-        this: Model<Self>,
-        message: TypedEnvelope<proto::UpdateChannelBufferCollaborators>,
-        _: Arc<Client>,
-        mut cx: AsyncAppContext,
-    ) -> Result<()> {
-        this.update(&mut cx, |this, cx| {
-            this.replace_collaborators(message.payload.collaborators, cx);
-            cx.emit(ChannelBufferEvent::CollaboratorsChanged);
-            cx.notify();
-        })
-    }
-
-    fn on_buffer_update(
-        &mut self,
-        _: Model<language::Buffer>,
-        event: &language::Event,
-        cx: &mut ModelContext<Self>,
-    ) {
-        match event {
-            language::Event::Operation(operation) => {
-                let operation = language::proto::serialize_operation(operation);
-                self.client
-                    .send(proto::UpdateChannelBuffer {
-                        channel_id: self.channel_id,
-                        operations: vec![operation],
-                    })
-                    .log_err();
-            }
-            language::Event::Edited => {
-                cx.emit(ChannelBufferEvent::BufferEdited);
-            }
-            _ => {}
-        }
-    }
-
-    pub fn acknowledge_buffer_version(&mut self, cx: &mut ModelContext<'_, ChannelBuffer>) {
-        let buffer = self.buffer.read(cx);
-        let version = buffer.version();
-        let buffer_id = buffer.remote_id();
-        let client = self.client.clone();
-        let epoch = self.epoch();
-
-        self.acknowledge_task = Some(cx.spawn(move |_, cx| async move {
-            cx.background_executor()
-                .timer(ACKNOWLEDGE_DEBOUNCE_INTERVAL)
-                .await;
-            client
-                .send(proto::AckBufferOperation {
-                    buffer_id,
-                    epoch,
-                    version: serialize_version(&version),
-                })
-                .ok();
-            Ok(())
-        }));
-    }
-
-    pub fn epoch(&self) -> u64 {
-        self.buffer_epoch
-    }
-
-    pub fn buffer(&self) -> Model<language::Buffer> {
-        self.buffer.clone()
-    }
-
-    pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
-        &self.collaborators
-    }
-
-    pub fn channel(&self, cx: &AppContext) -> Option<Arc<Channel>> {
-        self.channel_store
-            .read(cx)
-            .channel_for_id(self.channel_id)
-            .cloned()
-    }
-
-    pub(crate) fn disconnect(&mut self, cx: &mut ModelContext<Self>) {
-        log::info!("channel buffer {} disconnected", self.channel_id);
-        if self.connected {
-            self.connected = false;
-            self.subscription.take();
-            cx.emit(ChannelBufferEvent::Disconnected);
-            cx.notify()
-        }
-    }
-
-    pub(crate) fn channel_changed(&mut self, cx: &mut ModelContext<Self>) {
-        cx.emit(ChannelBufferEvent::ChannelChanged);
-        cx.notify()
-    }
-
-    pub fn is_connected(&self) -> bool {
-        self.connected
-    }
-
-    pub fn replica_id(&self, cx: &AppContext) -> u16 {
-        self.buffer.read(cx).replica_id()
-    }
-}

crates/channel2/src/channel_chat.rs 🔗

@@ -1,645 +0,0 @@
-use crate::{Channel, ChannelId, ChannelStore};
-use anyhow::{anyhow, Result};
-use client::{
-    proto,
-    user::{User, UserStore},
-    Client, Subscription, TypedEnvelope, UserId,
-};
-use futures::lock::Mutex;
-use gpui::{AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, Task};
-use rand::prelude::*;
-use std::{
-    collections::HashSet,
-    mem,
-    ops::{ControlFlow, Range},
-    sync::Arc,
-};
-use sum_tree::{Bias, SumTree};
-use time::OffsetDateTime;
-use util::{post_inc, ResultExt as _, TryFutureExt};
-
-pub struct ChannelChat {
-    pub channel_id: ChannelId,
-    messages: SumTree<ChannelMessage>,
-    acknowledged_message_ids: HashSet<u64>,
-    channel_store: Model<ChannelStore>,
-    loaded_all_messages: bool,
-    last_acknowledged_id: Option<u64>,
-    next_pending_message_id: usize,
-    user_store: Model<UserStore>,
-    rpc: Arc<Client>,
-    outgoing_messages_lock: Arc<Mutex<()>>,
-    rng: StdRng,
-    _subscription: Subscription,
-}
-
-#[derive(Debug, PartialEq, Eq)]
-pub struct MessageParams {
-    pub text: String,
-    pub mentions: Vec<(Range<usize>, UserId)>,
-}
-
-#[derive(Clone, Debug)]
-pub struct ChannelMessage {
-    pub id: ChannelMessageId,
-    pub body: String,
-    pub timestamp: OffsetDateTime,
-    pub sender: Arc<User>,
-    pub nonce: u128,
-    pub mentions: Vec<(Range<usize>, UserId)>,
-}
-
-#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
-pub enum ChannelMessageId {
-    Saved(u64),
-    Pending(usize),
-}
-
-#[derive(Clone, Debug, Default)]
-pub struct ChannelMessageSummary {
-    max_id: ChannelMessageId,
-    count: usize,
-}
-
-#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord)]
-struct Count(usize);
-
-#[derive(Clone, Debug, PartialEq)]
-pub enum ChannelChatEvent {
-    MessagesUpdated {
-        old_range: Range<usize>,
-        new_count: usize,
-    },
-    NewMessage {
-        channel_id: ChannelId,
-        message_id: u64,
-    },
-}
-
-impl EventEmitter<ChannelChatEvent> for ChannelChat {}
-pub fn init(client: &Arc<Client>) {
-    client.add_model_message_handler(ChannelChat::handle_message_sent);
-    client.add_model_message_handler(ChannelChat::handle_message_removed);
-}
-
-impl ChannelChat {
-    pub async fn new(
-        channel: Arc<Channel>,
-        channel_store: Model<ChannelStore>,
-        user_store: Model<UserStore>,
-        client: Arc<Client>,
-        mut cx: AsyncAppContext,
-    ) -> Result<Model<Self>> {
-        let channel_id = channel.id;
-        let subscription = client.subscribe_to_entity(channel_id).unwrap();
-
-        let response = client
-            .request(proto::JoinChannelChat { channel_id })
-            .await?;
-        let messages = messages_from_proto(response.messages, &user_store, &mut cx).await?;
-        let loaded_all_messages = response.done;
-
-        Ok(cx.new_model(|cx| {
-            cx.on_release(Self::release).detach();
-            let mut this = Self {
-                channel_id: channel.id,
-                user_store,
-                channel_store,
-                rpc: client,
-                outgoing_messages_lock: Default::default(),
-                messages: Default::default(),
-                acknowledged_message_ids: Default::default(),
-                loaded_all_messages,
-                next_pending_message_id: 0,
-                last_acknowledged_id: None,
-                rng: StdRng::from_entropy(),
-                _subscription: subscription.set_model(&cx.handle(), &mut cx.to_async()),
-            };
-            this.insert_messages(messages, cx);
-            this
-        })?)
-    }
-
-    fn release(&mut self, _: &mut AppContext) {
-        self.rpc
-            .send(proto::LeaveChannelChat {
-                channel_id: self.channel_id,
-            })
-            .log_err();
-    }
-
-    pub fn channel(&self, cx: &AppContext) -> Option<Arc<Channel>> {
-        self.channel_store
-            .read(cx)
-            .channel_for_id(self.channel_id)
-            .cloned()
-    }
-
-    pub fn client(&self) -> &Arc<Client> {
-        &self.rpc
-    }
-
-    pub fn send_message(
-        &mut self,
-        message: MessageParams,
-        cx: &mut ModelContext<Self>,
-    ) -> Result<Task<Result<u64>>> {
-        if message.text.is_empty() {
-            Err(anyhow!("message body can't be empty"))?;
-        }
-
-        let current_user = self
-            .user_store
-            .read(cx)
-            .current_user()
-            .ok_or_else(|| anyhow!("current_user is not present"))?;
-
-        let channel_id = self.channel_id;
-        let pending_id = ChannelMessageId::Pending(post_inc(&mut self.next_pending_message_id));
-        let nonce = self.rng.gen();
-        self.insert_messages(
-            SumTree::from_item(
-                ChannelMessage {
-                    id: pending_id,
-                    body: message.text.clone(),
-                    sender: current_user,
-                    timestamp: OffsetDateTime::now_utc(),
-                    mentions: message.mentions.clone(),
-                    nonce,
-                },
-                &(),
-            ),
-            cx,
-        );
-        let user_store = self.user_store.clone();
-        let rpc = self.rpc.clone();
-        let outgoing_messages_lock = self.outgoing_messages_lock.clone();
-        Ok(cx.spawn(move |this, mut cx| async move {
-            let outgoing_message_guard = outgoing_messages_lock.lock().await;
-            let request = rpc.request(proto::SendChannelMessage {
-                channel_id,
-                body: message.text,
-                nonce: Some(nonce.into()),
-                mentions: mentions_to_proto(&message.mentions),
-            });
-            let response = request.await?;
-            drop(outgoing_message_guard);
-            let response = response.message.ok_or_else(|| anyhow!("invalid message"))?;
-            let id = response.id;
-            let message = ChannelMessage::from_proto(response, &user_store, &mut cx).await?;
-            this.update(&mut cx, |this, cx| {
-                this.insert_messages(SumTree::from_item(message, &()), cx);
-            })?;
-            Ok(id)
-        }))
-    }
-
-    pub fn remove_message(&mut self, id: u64, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
-        let response = self.rpc.request(proto::RemoveChannelMessage {
-            channel_id: self.channel_id,
-            message_id: id,
-        });
-        cx.spawn(move |this, mut cx| async move {
-            response.await?;
-            this.update(&mut cx, |this, cx| {
-                this.message_removed(id, cx);
-            })?;
-            Ok(())
-        })
-    }
-
-    pub fn load_more_messages(&mut self, cx: &mut ModelContext<Self>) -> Option<Task<Option<()>>> {
-        if self.loaded_all_messages {
-            return None;
-        }
-
-        let rpc = self.rpc.clone();
-        let user_store = self.user_store.clone();
-        let channel_id = self.channel_id;
-        let before_message_id = self.first_loaded_message_id()?;
-        Some(cx.spawn(move |this, mut cx| {
-            async move {
-                let response = rpc
-                    .request(proto::GetChannelMessages {
-                        channel_id,
-                        before_message_id,
-                    })
-                    .await?;
-                let loaded_all_messages = response.done;
-                let messages = messages_from_proto(response.messages, &user_store, &mut cx).await?;
-                this.update(&mut cx, |this, cx| {
-                    this.loaded_all_messages = loaded_all_messages;
-                    this.insert_messages(messages, cx);
-                })?;
-                anyhow::Ok(())
-            }
-            .log_err()
-        }))
-    }
-
-    pub fn first_loaded_message_id(&mut self) -> Option<u64> {
-        self.messages.first().and_then(|message| match message.id {
-            ChannelMessageId::Saved(id) => Some(id),
-            ChannelMessageId::Pending(_) => None,
-        })
-    }
-
-    /// Load all of the chat messages since a certain message id.
-    ///
-    /// For now, we always maintain a suffix of the channel's messages.
-    pub async fn load_history_since_message(
-        chat: Model<Self>,
-        message_id: u64,
-        mut cx: AsyncAppContext,
-    ) -> Option<usize> {
-        loop {
-            let step = chat
-                .update(&mut cx, |chat, cx| {
-                    if let Some(first_id) = chat.first_loaded_message_id() {
-                        if first_id <= message_id {
-                            let mut cursor = chat.messages.cursor::<(ChannelMessageId, Count)>();
-                            let message_id = ChannelMessageId::Saved(message_id);
-                            cursor.seek(&message_id, Bias::Left, &());
-                            return ControlFlow::Break(
-                                if cursor
-                                    .item()
-                                    .map_or(false, |message| message.id == message_id)
-                                {
-                                    Some(cursor.start().1 .0)
-                                } else {
-                                    None
-                                },
-                            );
-                        }
-                    }
-                    ControlFlow::Continue(chat.load_more_messages(cx))
-                })
-                .log_err()?;
-            match step {
-                ControlFlow::Break(ix) => return ix,
-                ControlFlow::Continue(task) => task?.await?,
-            }
-        }
-    }
-
-    pub fn acknowledge_last_message(&mut self, cx: &mut ModelContext<Self>) {
-        if let ChannelMessageId::Saved(latest_message_id) = self.messages.summary().max_id {
-            if self
-                .last_acknowledged_id
-                .map_or(true, |acknowledged_id| acknowledged_id < latest_message_id)
-            {
-                self.rpc
-                    .send(proto::AckChannelMessage {
-                        channel_id: self.channel_id,
-                        message_id: latest_message_id,
-                    })
-                    .ok();
-                self.last_acknowledged_id = Some(latest_message_id);
-                self.channel_store.update(cx, |store, cx| {
-                    store.acknowledge_message_id(self.channel_id, latest_message_id, cx);
-                });
-            }
-        }
-    }
-
-    pub fn rejoin(&mut self, cx: &mut ModelContext<Self>) {
-        let user_store = self.user_store.clone();
-        let rpc = self.rpc.clone();
-        let channel_id = self.channel_id;
-        cx.spawn(move |this, mut cx| {
-            async move {
-                let response = rpc.request(proto::JoinChannelChat { channel_id }).await?;
-                let messages = messages_from_proto(response.messages, &user_store, &mut cx).await?;
-                let loaded_all_messages = response.done;
-
-                let pending_messages = this.update(&mut cx, |this, cx| {
-                    if let Some((first_new_message, last_old_message)) =
-                        messages.first().zip(this.messages.last())
-                    {
-                        if first_new_message.id > last_old_message.id {
-                            let old_messages = mem::take(&mut this.messages);
-                            cx.emit(ChannelChatEvent::MessagesUpdated {
-                                old_range: 0..old_messages.summary().count,
-                                new_count: 0,
-                            });
-                            this.loaded_all_messages = loaded_all_messages;
-                        }
-                    }
-
-                    this.insert_messages(messages, cx);
-                    if loaded_all_messages {
-                        this.loaded_all_messages = loaded_all_messages;
-                    }
-
-                    this.pending_messages().cloned().collect::<Vec<_>>()
-                })?;
-
-                for pending_message in pending_messages {
-                    let request = rpc.request(proto::SendChannelMessage {
-                        channel_id,
-                        body: pending_message.body,
-                        mentions: mentions_to_proto(&pending_message.mentions),
-                        nonce: Some(pending_message.nonce.into()),
-                    });
-                    let response = request.await?;
-                    let message = ChannelMessage::from_proto(
-                        response.message.ok_or_else(|| anyhow!("invalid message"))?,
-                        &user_store,
-                        &mut cx,
-                    )
-                    .await?;
-                    this.update(&mut cx, |this, cx| {
-                        this.insert_messages(SumTree::from_item(message, &()), cx);
-                    })?;
-                }
-
-                anyhow::Ok(())
-            }
-            .log_err()
-        })
-        .detach();
-    }
-
-    pub fn message_count(&self) -> usize {
-        self.messages.summary().count
-    }
-
-    pub fn messages(&self) -> &SumTree<ChannelMessage> {
-        &self.messages
-    }
-
-    pub fn message(&self, ix: usize) -> &ChannelMessage {
-        let mut cursor = self.messages.cursor::<Count>();
-        cursor.seek(&Count(ix), Bias::Right, &());
-        cursor.item().unwrap()
-    }
-
-    pub fn acknowledge_message(&mut self, id: u64) {
-        if self.acknowledged_message_ids.insert(id) {
-            self.rpc
-                .send(proto::AckChannelMessage {
-                    channel_id: self.channel_id,
-                    message_id: id,
-                })
-                .ok();
-        }
-    }
-
-    pub fn messages_in_range(&self, range: Range<usize>) -> impl Iterator<Item = &ChannelMessage> {
-        let mut cursor = self.messages.cursor::<Count>();
-        cursor.seek(&Count(range.start), Bias::Right, &());
-        cursor.take(range.len())
-    }
-
-    pub fn pending_messages(&self) -> impl Iterator<Item = &ChannelMessage> {
-        let mut cursor = self.messages.cursor::<ChannelMessageId>();
-        cursor.seek(&ChannelMessageId::Pending(0), Bias::Left, &());
-        cursor
-    }
-
-    async fn handle_message_sent(
-        this: Model<Self>,
-        message: TypedEnvelope<proto::ChannelMessageSent>,
-        _: Arc<Client>,
-        mut cx: AsyncAppContext,
-    ) -> Result<()> {
-        let user_store = this.update(&mut cx, |this, _| this.user_store.clone())?;
-        let message = message
-            .payload
-            .message
-            .ok_or_else(|| anyhow!("empty message"))?;
-        let message_id = message.id;
-
-        let message = ChannelMessage::from_proto(message, &user_store, &mut cx).await?;
-        this.update(&mut cx, |this, cx| {
-            this.insert_messages(SumTree::from_item(message, &()), cx);
-            cx.emit(ChannelChatEvent::NewMessage {
-                channel_id: this.channel_id,
-                message_id,
-            })
-        })?;
-
-        Ok(())
-    }
-
-    async fn handle_message_removed(
-        this: Model<Self>,
-        message: TypedEnvelope<proto::RemoveChannelMessage>,
-        _: Arc<Client>,
-        mut cx: AsyncAppContext,
-    ) -> Result<()> {
-        this.update(&mut cx, |this, cx| {
-            this.message_removed(message.payload.message_id, cx)
-        })?;
-        Ok(())
-    }
-
-    fn insert_messages(&mut self, messages: SumTree<ChannelMessage>, cx: &mut ModelContext<Self>) {
-        if let Some((first_message, last_message)) = messages.first().zip(messages.last()) {
-            let nonces = messages
-                .cursor::<()>()
-                .map(|m| m.nonce)
-                .collect::<HashSet<_>>();
-
-            let mut old_cursor = self.messages.cursor::<(ChannelMessageId, Count)>();
-            let mut new_messages = old_cursor.slice(&first_message.id, Bias::Left, &());
-            let start_ix = old_cursor.start().1 .0;
-            let removed_messages = old_cursor.slice(&last_message.id, Bias::Right, &());
-            let removed_count = removed_messages.summary().count;
-            let new_count = messages.summary().count;
-            let end_ix = start_ix + removed_count;
-
-            new_messages.append(messages, &());
-
-            let mut ranges = Vec::<Range<usize>>::new();
-            if new_messages.last().unwrap().is_pending() {
-                new_messages.append(old_cursor.suffix(&()), &());
-            } else {
-                new_messages.append(
-                    old_cursor.slice(&ChannelMessageId::Pending(0), Bias::Left, &()),
-                    &(),
-                );
-
-                while let Some(message) = old_cursor.item() {
-                    let message_ix = old_cursor.start().1 .0;
-                    if nonces.contains(&message.nonce) {
-                        if ranges.last().map_or(false, |r| r.end == message_ix) {
-                            ranges.last_mut().unwrap().end += 1;
-                        } else {
-                            ranges.push(message_ix..message_ix + 1);
-                        }
-                    } else {
-                        new_messages.push(message.clone(), &());
-                    }
-                    old_cursor.next(&());
-                }
-            }
-
-            drop(old_cursor);
-            self.messages = new_messages;
-
-            for range in ranges.into_iter().rev() {
-                cx.emit(ChannelChatEvent::MessagesUpdated {
-                    old_range: range,
-                    new_count: 0,
-                });
-            }
-            cx.emit(ChannelChatEvent::MessagesUpdated {
-                old_range: start_ix..end_ix,
-                new_count,
-            });
-
-            cx.notify();
-        }
-    }
-
-    fn message_removed(&mut self, id: u64, cx: &mut ModelContext<Self>) {
-        let mut cursor = self.messages.cursor::<ChannelMessageId>();
-        let mut messages = cursor.slice(&ChannelMessageId::Saved(id), Bias::Left, &());
-        if let Some(item) = cursor.item() {
-            if item.id == ChannelMessageId::Saved(id) {
-                let ix = messages.summary().count;
-                cursor.next(&());
-                messages.append(cursor.suffix(&()), &());
-                drop(cursor);
-                self.messages = messages;
-                cx.emit(ChannelChatEvent::MessagesUpdated {
-                    old_range: ix..ix + 1,
-                    new_count: 0,
-                });
-            }
-        }
-    }
-}
-
-async fn messages_from_proto(
-    proto_messages: Vec<proto::ChannelMessage>,
-    user_store: &Model<UserStore>,
-    cx: &mut AsyncAppContext,
-) -> Result<SumTree<ChannelMessage>> {
-    let messages = ChannelMessage::from_proto_vec(proto_messages, user_store, cx).await?;
-    let mut result = SumTree::new();
-    result.extend(messages, &());
-    Ok(result)
-}
-
-impl ChannelMessage {
-    pub async fn from_proto(
-        message: proto::ChannelMessage,
-        user_store: &Model<UserStore>,
-        cx: &mut AsyncAppContext,
-    ) -> Result<Self> {
-        let sender = user_store
-            .update(cx, |user_store, cx| {
-                user_store.get_user(message.sender_id, cx)
-            })?
-            .await?;
-        Ok(ChannelMessage {
-            id: ChannelMessageId::Saved(message.id),
-            body: message.body,
-            mentions: message
-                .mentions
-                .into_iter()
-                .filter_map(|mention| {
-                    let range = mention.range?;
-                    Some((range.start as usize..range.end as usize, mention.user_id))
-                })
-                .collect(),
-            timestamp: OffsetDateTime::from_unix_timestamp(message.timestamp as i64)?,
-            sender,
-            nonce: message
-                .nonce
-                .ok_or_else(|| anyhow!("nonce is required"))?
-                .into(),
-        })
-    }
-
-    pub fn is_pending(&self) -> bool {
-        matches!(self.id, ChannelMessageId::Pending(_))
-    }
-
-    pub async fn from_proto_vec(
-        proto_messages: Vec<proto::ChannelMessage>,
-        user_store: &Model<UserStore>,
-        cx: &mut AsyncAppContext,
-    ) -> Result<Vec<Self>> {
-        let unique_user_ids = proto_messages
-            .iter()
-            .map(|m| m.sender_id)
-            .collect::<HashSet<_>>()
-            .into_iter()
-            .collect();
-        user_store
-            .update(cx, |user_store, cx| {
-                user_store.get_users(unique_user_ids, cx)
-            })?
-            .await?;
-
-        let mut messages = Vec::with_capacity(proto_messages.len());
-        for message in proto_messages {
-            messages.push(ChannelMessage::from_proto(message, user_store, cx).await?);
-        }
-        Ok(messages)
-    }
-}
-
-pub fn mentions_to_proto(mentions: &[(Range<usize>, UserId)]) -> Vec<proto::ChatMention> {
-    mentions
-        .iter()
-        .map(|(range, user_id)| proto::ChatMention {
-            range: Some(proto::Range {
-                start: range.start as u64,
-                end: range.end as u64,
-            }),
-            user_id: *user_id as u64,
-        })
-        .collect()
-}
-
-impl sum_tree::Item for ChannelMessage {
-    type Summary = ChannelMessageSummary;
-
-    fn summary(&self) -> Self::Summary {
-        ChannelMessageSummary {
-            max_id: self.id,
-            count: 1,
-        }
-    }
-}
-
-impl Default for ChannelMessageId {
-    fn default() -> Self {
-        Self::Saved(0)
-    }
-}
-
-impl sum_tree::Summary for ChannelMessageSummary {
-    type Context = ();
-
-    fn add_summary(&mut self, summary: &Self, _: &()) {
-        self.max_id = summary.max_id;
-        self.count += summary.count;
-    }
-}
-
-impl<'a> sum_tree::Dimension<'a, ChannelMessageSummary> for ChannelMessageId {
-    fn add_summary(&mut self, summary: &'a ChannelMessageSummary, _: &()) {
-        debug_assert!(summary.max_id > *self);
-        *self = summary.max_id;
-    }
-}
-
-impl<'a> sum_tree::Dimension<'a, ChannelMessageSummary> for Count {
-    fn add_summary(&mut self, summary: &'a ChannelMessageSummary, _: &()) {
-        self.0 += summary.count;
-    }
-}
-
-impl<'a> From<&'a str> for MessageParams {
-    fn from(value: &'a str) -> Self {
-        Self {
-            text: value.into(),
-            mentions: Vec::new(),
-        }
-    }
-}

crates/channel2/src/channel_store.rs 🔗

@@ -1,1022 +0,0 @@
-mod channel_index;
-
-use crate::{channel_buffer::ChannelBuffer, channel_chat::ChannelChat, ChannelMessage};
-use anyhow::{anyhow, Result};
-use channel_index::ChannelIndex;
-use client::{Client, Subscription, User, UserId, UserStore};
-use collections::{hash_map, HashMap, HashSet};
-use db::RELEASE_CHANNEL;
-use futures::{channel::mpsc, future::Shared, Future, FutureExt, StreamExt};
-use gpui::{
-    AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, SharedString, Task,
-    WeakModel,
-};
-use rpc::{
-    proto::{self, ChannelVisibility},
-    TypedEnvelope,
-};
-use std::{mem, sync::Arc, time::Duration};
-use util::{async_maybe, ResultExt};
-
-pub fn init(client: &Arc<Client>, user_store: Model<UserStore>, cx: &mut AppContext) {
-    let channel_store =
-        cx.new_model(|cx| ChannelStore::new(client.clone(), user_store.clone(), cx));
-    cx.set_global(channel_store);
-}
-
-pub const RECONNECT_TIMEOUT: Duration = Duration::from_secs(30);
-
-pub type ChannelId = u64;
-
-pub struct ChannelStore {
-    pub channel_index: ChannelIndex,
-    channel_invitations: Vec<Arc<Channel>>,
-    channel_participants: HashMap<ChannelId, Vec<Arc<User>>>,
-    outgoing_invites: HashSet<(ChannelId, UserId)>,
-    update_channels_tx: mpsc::UnboundedSender<proto::UpdateChannels>,
-    opened_buffers: HashMap<ChannelId, OpenedModelHandle<ChannelBuffer>>,
-    opened_chats: HashMap<ChannelId, OpenedModelHandle<ChannelChat>>,
-    client: Arc<Client>,
-    user_store: Model<UserStore>,
-    _rpc_subscription: Subscription,
-    _watch_connection_status: Task<Option<()>>,
-    disconnect_channel_buffers_task: Option<Task<()>>,
-    _update_channels: Task<()>,
-}
-
-#[derive(Clone, Debug, PartialEq)]
-pub struct Channel {
-    pub id: ChannelId,
-    pub name: SharedString,
-    pub visibility: proto::ChannelVisibility,
-    pub role: proto::ChannelRole,
-    pub unseen_note_version: Option<(u64, clock::Global)>,
-    pub unseen_message_id: Option<u64>,
-    pub parent_path: Vec<u64>,
-}
-
-impl Channel {
-    pub fn link(&self) -> String {
-        RELEASE_CHANNEL.link_prefix().to_owned()
-            + "channel/"
-            + &self.slug()
-            + "-"
-            + &self.id.to_string()
-    }
-
-    pub fn slug(&self) -> String {
-        let slug: String = self
-            .name
-            .chars()
-            .map(|c| if c.is_alphanumeric() { c } else { '-' })
-            .collect();
-
-        slug.trim_matches(|c| c == '-').to_string()
-    }
-
-    pub fn can_edit_notes(&self) -> bool {
-        self.role == proto::ChannelRole::Member || self.role == proto::ChannelRole::Admin
-    }
-}
-
-pub struct ChannelMembership {
-    pub user: Arc<User>,
-    pub kind: proto::channel_member::Kind,
-    pub role: proto::ChannelRole,
-}
-impl ChannelMembership {
-    pub fn sort_key(&self) -> MembershipSortKey {
-        MembershipSortKey {
-            role_order: match self.role {
-                proto::ChannelRole::Admin => 0,
-                proto::ChannelRole::Member => 1,
-                proto::ChannelRole::Banned => 2,
-                proto::ChannelRole::Guest => 3,
-            },
-            kind_order: match self.kind {
-                proto::channel_member::Kind::Member => 0,
-                proto::channel_member::Kind::AncestorMember => 1,
-                proto::channel_member::Kind::Invitee => 2,
-            },
-            username_order: self.user.github_login.as_str(),
-        }
-    }
-}
-
-#[derive(PartialOrd, Ord, PartialEq, Eq)]
-pub struct MembershipSortKey<'a> {
-    role_order: u8,
-    kind_order: u8,
-    username_order: &'a str,
-}
-
-pub enum ChannelEvent {
-    ChannelCreated(ChannelId),
-    ChannelRenamed(ChannelId),
-}
-
-impl EventEmitter<ChannelEvent> for ChannelStore {}
-
-enum OpenedModelHandle<E> {
-    Open(WeakModel<E>),
-    Loading(Shared<Task<Result<Model<E>, Arc<anyhow::Error>>>>),
-}
-
-impl ChannelStore {
-    pub fn global(cx: &AppContext) -> Model<Self> {
-        cx.global::<Model<Self>>().clone()
-    }
-
-    pub fn new(
-        client: Arc<Client>,
-        user_store: Model<UserStore>,
-        cx: &mut ModelContext<Self>,
-    ) -> Self {
-        let rpc_subscription =
-            client.add_message_handler(cx.weak_model(), Self::handle_update_channels);
-
-        let mut connection_status = client.status();
-        let (update_channels_tx, mut update_channels_rx) = mpsc::unbounded();
-        let watch_connection_status = cx.spawn(|this, mut cx| async move {
-            while let Some(status) = connection_status.next().await {
-                let this = this.upgrade()?;
-                match status {
-                    client::Status::Connected { .. } => {
-                        this.update(&mut cx, |this, cx| this.handle_connect(cx))
-                            .ok()?
-                            .await
-                            .log_err()?;
-                    }
-                    client::Status::SignedOut | client::Status::UpgradeRequired => {
-                        this.update(&mut cx, |this, cx| this.handle_disconnect(false, cx))
-                            .ok();
-                    }
-                    _ => {
-                        this.update(&mut cx, |this, cx| this.handle_disconnect(true, cx))
-                            .ok();
-                    }
-                }
-            }
-            Some(())
-        });
-
-        Self {
-            channel_invitations: Vec::default(),
-            channel_index: ChannelIndex::default(),
-            channel_participants: Default::default(),
-            outgoing_invites: Default::default(),
-            opened_buffers: Default::default(),
-            opened_chats: Default::default(),
-            update_channels_tx,
-            client,
-            user_store,
-            _rpc_subscription: rpc_subscription,
-            _watch_connection_status: watch_connection_status,
-            disconnect_channel_buffers_task: None,
-            _update_channels: cx.spawn(|this, mut cx| async move {
-                async_maybe!({
-                    while let Some(update_channels) = update_channels_rx.next().await {
-                        if let Some(this) = this.upgrade() {
-                            let update_task = this.update(&mut cx, |this, cx| {
-                                this.update_channels(update_channels, cx)
-                            })?;
-                            if let Some(update_task) = update_task {
-                                update_task.await.log_err();
-                            }
-                        }
-                    }
-                    anyhow::Ok(())
-                })
-                .await
-                .log_err();
-            }),
-        }
-    }
-
-    pub fn client(&self) -> Arc<Client> {
-        self.client.clone()
-    }
-
-    /// Returns the number of unique channels in the store
-    pub fn channel_count(&self) -> usize {
-        self.channel_index.by_id().len()
-    }
-
-    /// Returns the index of a channel ID in the list of unique channels
-    pub fn index_of_channel(&self, channel_id: ChannelId) -> Option<usize> {
-        self.channel_index
-            .by_id()
-            .keys()
-            .position(|id| *id == channel_id)
-    }
-
-    /// Returns an iterator over all unique channels
-    pub fn channels(&self) -> impl '_ + Iterator<Item = &Arc<Channel>> {
-        self.channel_index.by_id().values()
-    }
-
-    /// Iterate over all entries in the channel DAG
-    pub fn ordered_channels(&self) -> impl '_ + Iterator<Item = (usize, &Arc<Channel>)> {
-        self.channel_index
-            .ordered_channels()
-            .iter()
-            .filter_map(move |id| {
-                let channel = self.channel_index.by_id().get(id)?;
-                Some((channel.parent_path.len(), channel))
-            })
-    }
-
-    pub fn channel_at_index(&self, ix: usize) -> Option<&Arc<Channel>> {
-        let channel_id = self.channel_index.ordered_channels().get(ix)?;
-        self.channel_index.by_id().get(channel_id)
-    }
-
-    pub fn channel_at(&self, ix: usize) -> Option<&Arc<Channel>> {
-        self.channel_index.by_id().values().nth(ix)
-    }
-
-    pub fn has_channel_invitation(&self, channel_id: ChannelId) -> bool {
-        self.channel_invitations
-            .iter()
-            .any(|channel| channel.id == channel_id)
-    }
-
-    pub fn channel_invitations(&self) -> &[Arc<Channel>] {
-        &self.channel_invitations
-    }
-
-    pub fn channel_for_id(&self, channel_id: ChannelId) -> Option<&Arc<Channel>> {
-        self.channel_index.by_id().get(&channel_id)
-    }
-
-    pub fn has_open_channel_buffer(&self, channel_id: ChannelId, _cx: &AppContext) -> bool {
-        if let Some(buffer) = self.opened_buffers.get(&channel_id) {
-            if let OpenedModelHandle::Open(buffer) = buffer {
-                return buffer.upgrade().is_some();
-            }
-        }
-        false
-    }
-
-    pub fn open_channel_buffer(
-        &mut self,
-        channel_id: ChannelId,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<Model<ChannelBuffer>>> {
-        let client = self.client.clone();
-        let user_store = self.user_store.clone();
-        let channel_store = cx.handle();
-        self.open_channel_resource(
-            channel_id,
-            |this| &mut this.opened_buffers,
-            |channel, cx| ChannelBuffer::new(channel, client, user_store, channel_store, cx),
-            cx,
-        )
-    }
-
-    pub fn fetch_channel_messages(
-        &self,
-        message_ids: Vec<u64>,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<Vec<ChannelMessage>>> {
-        let request = if message_ids.is_empty() {
-            None
-        } else {
-            Some(
-                self.client
-                    .request(proto::GetChannelMessagesById { message_ids }),
-            )
-        };
-        cx.spawn(|this, mut cx| async move {
-            if let Some(request) = request {
-                let response = request.await?;
-                let this = this
-                    .upgrade()
-                    .ok_or_else(|| anyhow!("channel store dropped"))?;
-                let user_store = this.update(&mut cx, |this, _| this.user_store.clone())?;
-                ChannelMessage::from_proto_vec(response.messages, &user_store, &mut cx).await
-            } else {
-                Ok(Vec::new())
-            }
-        })
-    }
-
-    pub fn has_channel_buffer_changed(&self, channel_id: ChannelId) -> Option<bool> {
-        self.channel_index
-            .by_id()
-            .get(&channel_id)
-            .map(|channel| channel.unseen_note_version.is_some())
-    }
-
-    pub fn has_new_messages(&self, channel_id: ChannelId) -> Option<bool> {
-        self.channel_index
-            .by_id()
-            .get(&channel_id)
-            .map(|channel| channel.unseen_message_id.is_some())
-    }
-
-    pub fn notes_changed(
-        &mut self,
-        channel_id: ChannelId,
-        epoch: u64,
-        version: &clock::Global,
-        cx: &mut ModelContext<Self>,
-    ) {
-        self.channel_index.note_changed(channel_id, epoch, version);
-        cx.notify();
-    }
-
-    pub fn new_message(
-        &mut self,
-        channel_id: ChannelId,
-        message_id: u64,
-        cx: &mut ModelContext<Self>,
-    ) {
-        self.channel_index.new_message(channel_id, message_id);
-        cx.notify();
-    }
-
-    pub fn acknowledge_message_id(
-        &mut self,
-        channel_id: ChannelId,
-        message_id: u64,
-        cx: &mut ModelContext<Self>,
-    ) {
-        self.channel_index
-            .acknowledge_message_id(channel_id, message_id);
-        cx.notify();
-    }
-
-    pub fn acknowledge_notes_version(
-        &mut self,
-        channel_id: ChannelId,
-        epoch: u64,
-        version: &clock::Global,
-        cx: &mut ModelContext<Self>,
-    ) {
-        self.channel_index
-            .acknowledge_note_version(channel_id, epoch, version);
-        cx.notify();
-    }
-
-    pub fn open_channel_chat(
-        &mut self,
-        channel_id: ChannelId,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<Model<ChannelChat>>> {
-        let client = self.client.clone();
-        let user_store = self.user_store.clone();
-        let this = cx.handle();
-        self.open_channel_resource(
-            channel_id,
-            |this| &mut this.opened_chats,
-            |channel, cx| ChannelChat::new(channel, this, user_store, client, cx),
-            cx,
-        )
-    }
-
-    /// Asynchronously open a given resource associated with a channel.
-    ///
-    /// Make sure that the resource is only opened once, even if this method
-    /// is called multiple times with the same channel id while the first task
-    /// is still running.
-    fn open_channel_resource<T, F, Fut>(
-        &mut self,
-        channel_id: ChannelId,
-        get_map: fn(&mut Self) -> &mut HashMap<ChannelId, OpenedModelHandle<T>>,
-        load: F,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<Model<T>>>
-    where
-        F: 'static + FnOnce(Arc<Channel>, AsyncAppContext) -> Fut,
-        Fut: Future<Output = Result<Model<T>>>,
-        T: 'static,
-    {
-        let task = loop {
-            match get_map(self).entry(channel_id) {
-                hash_map::Entry::Occupied(e) => match e.get() {
-                    OpenedModelHandle::Open(model) => {
-                        if let Some(model) = model.upgrade() {
-                            break Task::ready(Ok(model)).shared();
-                        } else {
-                            get_map(self).remove(&channel_id);
-                            continue;
-                        }
-                    }
-                    OpenedModelHandle::Loading(task) => {
-                        break task.clone();
-                    }
-                },
-                hash_map::Entry::Vacant(e) => {
-                    let task = cx
-                        .spawn(move |this, mut cx| async move {
-                            let channel = this.update(&mut cx, |this, _| {
-                                this.channel_for_id(channel_id).cloned().ok_or_else(|| {
-                                    Arc::new(anyhow!("no channel for id: {}", channel_id))
-                                })
-                            })??;
-
-                            load(channel, cx).await.map_err(Arc::new)
-                        })
-                        .shared();
-
-                    e.insert(OpenedModelHandle::Loading(task.clone()));
-                    cx.spawn({
-                        let task = task.clone();
-                        move |this, mut cx| async move {
-                            let result = task.await;
-                            this.update(&mut cx, |this, _| match result {
-                                Ok(model) => {
-                                    get_map(this).insert(
-                                        channel_id,
-                                        OpenedModelHandle::Open(model.downgrade()),
-                                    );
-                                }
-                                Err(_) => {
-                                    get_map(this).remove(&channel_id);
-                                }
-                            })
-                            .ok();
-                        }
-                    })
-                    .detach();
-                    break task;
-                }
-            }
-        };
-        cx.background_executor()
-            .spawn(async move { task.await.map_err(|error| anyhow!("{}", error)) })
-    }
-
-    pub fn is_channel_admin(&self, channel_id: ChannelId) -> bool {
-        let Some(channel) = self.channel_for_id(channel_id) else {
-            return false;
-        };
-        channel.role == proto::ChannelRole::Admin
-    }
-
-    pub fn channel_participants(&self, channel_id: ChannelId) -> &[Arc<User>] {
-        self.channel_participants
-            .get(&channel_id)
-            .map_or(&[], |v| v.as_slice())
-    }
-
-    pub fn create_channel(
-        &self,
-        name: &str,
-        parent_id: Option<ChannelId>,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<ChannelId>> {
-        let client = self.client.clone();
-        let name = name.trim_start_matches("#").to_owned();
-        cx.spawn(move |this, mut cx| async move {
-            let response = client
-                .request(proto::CreateChannel { name, parent_id })
-                .await?;
-
-            let channel = response
-                .channel
-                .ok_or_else(|| anyhow!("missing channel in response"))?;
-            let channel_id = channel.id;
-
-            this.update(&mut cx, |this, cx| {
-                let task = this.update_channels(
-                    proto::UpdateChannels {
-                        channels: vec![channel],
-                        ..Default::default()
-                    },
-                    cx,
-                );
-                assert!(task.is_none());
-
-                // This event is emitted because the collab panel wants to clear the pending edit state
-                // before this frame is rendered. But we can't guarantee that the collab panel's future
-                // will resolve before this flush_effects finishes. Synchronously emitting this event
-                // ensures that the collab panel will observe this creation before the frame completes
-                cx.emit(ChannelEvent::ChannelCreated(channel_id));
-            })?;
-
-            Ok(channel_id)
-        })
-    }
-
-    pub fn move_channel(
-        &mut self,
-        channel_id: ChannelId,
-        to: Option<ChannelId>,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<()>> {
-        let client = self.client.clone();
-        cx.spawn(move |_, _| async move {
-            let _ = client
-                .request(proto::MoveChannel { channel_id, to })
-                .await?;
-
-            Ok(())
-        })
-    }
-
-    pub fn set_channel_visibility(
-        &mut self,
-        channel_id: ChannelId,
-        visibility: ChannelVisibility,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<()>> {
-        let client = self.client.clone();
-        cx.spawn(move |_, _| async move {
-            let _ = client
-                .request(proto::SetChannelVisibility {
-                    channel_id,
-                    visibility: visibility.into(),
-                })
-                .await?;
-
-            Ok(())
-        })
-    }
-
-    pub fn invite_member(
-        &mut self,
-        channel_id: ChannelId,
-        user_id: UserId,
-        role: proto::ChannelRole,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<()>> {
-        if !self.outgoing_invites.insert((channel_id, user_id)) {
-            return Task::ready(Err(anyhow!("invite request already in progress")));
-        }
-
-        cx.notify();
-        let client = self.client.clone();
-        cx.spawn(move |this, mut cx| async move {
-            let result = client
-                .request(proto::InviteChannelMember {
-                    channel_id,
-                    user_id,
-                    role: role.into(),
-                })
-                .await;
-
-            this.update(&mut cx, |this, cx| {
-                this.outgoing_invites.remove(&(channel_id, user_id));
-                cx.notify();
-            })?;
-
-            result?;
-
-            Ok(())
-        })
-    }
-
-    pub fn remove_member(
-        &mut self,
-        channel_id: ChannelId,
-        user_id: u64,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<()>> {
-        if !self.outgoing_invites.insert((channel_id, user_id)) {
-            return Task::ready(Err(anyhow!("invite request already in progress")));
-        }
-
-        cx.notify();
-        let client = self.client.clone();
-        cx.spawn(move |this, mut cx| async move {
-            let result = client
-                .request(proto::RemoveChannelMember {
-                    channel_id,
-                    user_id,
-                })
-                .await;
-
-            this.update(&mut cx, |this, cx| {
-                this.outgoing_invites.remove(&(channel_id, user_id));
-                cx.notify();
-            })?;
-            result?;
-            Ok(())
-        })
-    }
-
-    pub fn set_member_role(
-        &mut self,
-        channel_id: ChannelId,
-        user_id: UserId,
-        role: proto::ChannelRole,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<()>> {
-        if !self.outgoing_invites.insert((channel_id, user_id)) {
-            return Task::ready(Err(anyhow!("member request already in progress")));
-        }
-
-        cx.notify();
-        let client = self.client.clone();
-        cx.spawn(move |this, mut cx| async move {
-            let result = client
-                .request(proto::SetChannelMemberRole {
-                    channel_id,
-                    user_id,
-                    role: role.into(),
-                })
-                .await;
-
-            this.update(&mut cx, |this, cx| {
-                this.outgoing_invites.remove(&(channel_id, user_id));
-                cx.notify();
-            })?;
-
-            result?;
-            Ok(())
-        })
-    }
-
-    pub fn rename(
-        &mut self,
-        channel_id: ChannelId,
-        new_name: &str,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<()>> {
-        let client = self.client.clone();
-        let name = new_name.to_string();
-        cx.spawn(move |this, mut cx| async move {
-            let channel = client
-                .request(proto::RenameChannel { channel_id, name })
-                .await?
-                .channel
-                .ok_or_else(|| anyhow!("missing channel in response"))?;
-            this.update(&mut cx, |this, cx| {
-                let task = this.update_channels(
-                    proto::UpdateChannels {
-                        channels: vec![channel],
-                        ..Default::default()
-                    },
-                    cx,
-                );
-                assert!(task.is_none());
-
-                // This event is emitted because the collab panel wants to clear the pending edit state
-                // before this frame is rendered. But we can't guarantee that the collab panel's future
-                // will resolve before this flush_effects finishes. Synchronously emitting this event
-                // ensures that the collab panel will observe this creation before the frame complete
-                cx.emit(ChannelEvent::ChannelRenamed(channel_id))
-            })?;
-            Ok(())
-        })
-    }
-
-    pub fn respond_to_channel_invite(
-        &mut self,
-        channel_id: ChannelId,
-        accept: bool,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<()>> {
-        let client = self.client.clone();
-        cx.background_executor().spawn(async move {
-            client
-                .request(proto::RespondToChannelInvite { channel_id, accept })
-                .await?;
-            Ok(())
-        })
-    }
-
-    pub fn get_channel_member_details(
-        &self,
-        channel_id: ChannelId,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<Vec<ChannelMembership>>> {
-        let client = self.client.clone();
-        let user_store = self.user_store.downgrade();
-        cx.spawn(move |_, mut cx| async move {
-            let response = client
-                .request(proto::GetChannelMembers { channel_id })
-                .await?;
-
-            let user_ids = response.members.iter().map(|m| m.user_id).collect();
-            let user_store = user_store
-                .upgrade()
-                .ok_or_else(|| anyhow!("user store dropped"))?;
-            let users = user_store
-                .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))?
-                .await?;
-
-            Ok(users
-                .into_iter()
-                .zip(response.members)
-                .filter_map(|(user, member)| {
-                    Some(ChannelMembership {
-                        user,
-                        role: member.role(),
-                        kind: member.kind(),
-                    })
-                })
-                .collect())
-        })
-    }
-
-    pub fn remove_channel(&self, channel_id: ChannelId) -> impl Future<Output = Result<()>> {
-        let client = self.client.clone();
-        async move {
-            client.request(proto::DeleteChannel { channel_id }).await?;
-            Ok(())
-        }
-    }
-
-    pub fn has_pending_channel_invite_response(&self, _: &Arc<Channel>) -> bool {
-        false
-    }
-
-    pub fn has_pending_channel_invite(&self, channel_id: ChannelId, user_id: UserId) -> bool {
-        self.outgoing_invites.contains(&(channel_id, user_id))
-    }
-
-    async fn handle_update_channels(
-        this: Model<Self>,
-        message: TypedEnvelope<proto::UpdateChannels>,
-        _: Arc<Client>,
-        mut cx: AsyncAppContext,
-    ) -> Result<()> {
-        this.update(&mut cx, |this, _| {
-            this.update_channels_tx
-                .unbounded_send(message.payload)
-                .unwrap();
-        })?;
-        Ok(())
-    }
-
-    fn handle_connect(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
-        self.channel_index.clear();
-        self.channel_invitations.clear();
-        self.channel_participants.clear();
-        self.channel_index.clear();
-        self.outgoing_invites.clear();
-        self.disconnect_channel_buffers_task.take();
-
-        for chat in self.opened_chats.values() {
-            if let OpenedModelHandle::Open(chat) = chat {
-                if let Some(chat) = chat.upgrade() {
-                    chat.update(cx, |chat, cx| {
-                        chat.rejoin(cx);
-                    });
-                }
-            }
-        }
-
-        let mut buffer_versions = Vec::new();
-        for buffer in self.opened_buffers.values() {
-            if let OpenedModelHandle::Open(buffer) = buffer {
-                if let Some(buffer) = buffer.upgrade() {
-                    let channel_buffer = buffer.read(cx);
-                    let buffer = channel_buffer.buffer().read(cx);
-                    buffer_versions.push(proto::ChannelBufferVersion {
-                        channel_id: channel_buffer.channel_id,
-                        epoch: channel_buffer.epoch(),
-                        version: language::proto::serialize_version(&buffer.version()),
-                    });
-                }
-            }
-        }
-
-        if buffer_versions.is_empty() {
-            return Task::ready(Ok(()));
-        }
-
-        let response = self.client.request(proto::RejoinChannelBuffers {
-            buffers: buffer_versions,
-        });
-
-        cx.spawn(|this, mut cx| async move {
-            let mut response = response.await?;
-
-            this.update(&mut cx, |this, cx| {
-                this.opened_buffers.retain(|_, buffer| match buffer {
-                    OpenedModelHandle::Open(channel_buffer) => {
-                        let Some(channel_buffer) = channel_buffer.upgrade() else {
-                            return false;
-                        };
-
-                        channel_buffer.update(cx, |channel_buffer, cx| {
-                            let channel_id = channel_buffer.channel_id;
-                            if let Some(remote_buffer) = response
-                                .buffers
-                                .iter_mut()
-                                .find(|buffer| buffer.channel_id == channel_id)
-                            {
-                                let channel_id = channel_buffer.channel_id;
-                                let remote_version =
-                                    language::proto::deserialize_version(&remote_buffer.version);
-
-                                channel_buffer.replace_collaborators(
-                                    mem::take(&mut remote_buffer.collaborators),
-                                    cx,
-                                );
-
-                                let operations = channel_buffer
-                                    .buffer()
-                                    .update(cx, |buffer, cx| {
-                                        let outgoing_operations =
-                                            buffer.serialize_ops(Some(remote_version), cx);
-                                        let incoming_operations =
-                                            mem::take(&mut remote_buffer.operations)
-                                                .into_iter()
-                                                .map(language::proto::deserialize_operation)
-                                                .collect::<Result<Vec<_>>>()?;
-                                        buffer.apply_ops(incoming_operations, cx)?;
-                                        anyhow::Ok(outgoing_operations)
-                                    })
-                                    .log_err();
-
-                                if let Some(operations) = operations {
-                                    let client = this.client.clone();
-                                    cx.background_executor()
-                                        .spawn(async move {
-                                            let operations = operations.await;
-                                            for chunk in
-                                                language::proto::split_operations(operations)
-                                            {
-                                                client
-                                                    .send(proto::UpdateChannelBuffer {
-                                                        channel_id,
-                                                        operations: chunk,
-                                                    })
-                                                    .ok();
-                                            }
-                                        })
-                                        .detach();
-                                    return true;
-                                }
-                            }
-
-                            channel_buffer.disconnect(cx);
-                            false
-                        })
-                    }
-                    OpenedModelHandle::Loading(_) => true,
-                });
-            })
-            .ok();
-            anyhow::Ok(())
-        })
-    }
-
-    fn handle_disconnect(&mut self, wait_for_reconnect: bool, cx: &mut ModelContext<Self>) {
-        cx.notify();
-
-        self.disconnect_channel_buffers_task.get_or_insert_with(|| {
-            cx.spawn(move |this, mut cx| async move {
-                if wait_for_reconnect {
-                    cx.background_executor().timer(RECONNECT_TIMEOUT).await;
-                }
-
-                if let Some(this) = this.upgrade() {
-                    this.update(&mut cx, |this, cx| {
-                        for (_, buffer) in this.opened_buffers.drain() {
-                            if let OpenedModelHandle::Open(buffer) = buffer {
-                                if let Some(buffer) = buffer.upgrade() {
-                                    buffer.update(cx, |buffer, cx| buffer.disconnect(cx));
-                                }
-                            }
-                        }
-                    })
-                    .ok();
-                }
-            })
-        });
-    }
-
-    pub(crate) fn update_channels(
-        &mut self,
-        payload: proto::UpdateChannels,
-        cx: &mut ModelContext<ChannelStore>,
-    ) -> Option<Task<Result<()>>> {
-        if !payload.remove_channel_invitations.is_empty() {
-            self.channel_invitations
-                .retain(|channel| !payload.remove_channel_invitations.contains(&channel.id));
-        }
-        for channel in payload.channel_invitations {
-            match self
-                .channel_invitations
-                .binary_search_by_key(&channel.id, |c| c.id)
-            {
-                Ok(ix) => {
-                    Arc::make_mut(&mut self.channel_invitations[ix]).name = channel.name.into()
-                }
-                Err(ix) => self.channel_invitations.insert(
-                    ix,
-                    Arc::new(Channel {
-                        id: channel.id,
-                        visibility: channel.visibility(),
-                        role: channel.role(),
-                        name: channel.name.into(),
-                        unseen_note_version: None,
-                        unseen_message_id: None,
-                        parent_path: channel.parent_path,
-                    }),
-                ),
-            }
-        }
-
-        let channels_changed = !payload.channels.is_empty()
-            || !payload.delete_channels.is_empty()
-            || !payload.unseen_channel_messages.is_empty()
-            || !payload.unseen_channel_buffer_changes.is_empty();
-
-        if channels_changed {
-            if !payload.delete_channels.is_empty() {
-                self.channel_index.delete_channels(&payload.delete_channels);
-                self.channel_participants
-                    .retain(|channel_id, _| !&payload.delete_channels.contains(channel_id));
-
-                for channel_id in &payload.delete_channels {
-                    let channel_id = *channel_id;
-                    if payload
-                        .channels
-                        .iter()
-                        .any(|channel| channel.id == channel_id)
-                    {
-                        continue;
-                    }
-                    if let Some(OpenedModelHandle::Open(buffer)) =
-                        self.opened_buffers.remove(&channel_id)
-                    {
-                        if let Some(buffer) = buffer.upgrade() {
-                            buffer.update(cx, ChannelBuffer::disconnect);
-                        }
-                    }
-                }
-            }
-
-            let mut index = self.channel_index.bulk_insert();
-            for channel in payload.channels {
-                let id = channel.id;
-                let channel_changed = index.insert(channel);
-
-                if channel_changed {
-                    if let Some(OpenedModelHandle::Open(buffer)) = self.opened_buffers.get(&id) {
-                        if let Some(buffer) = buffer.upgrade() {
-                            buffer.update(cx, ChannelBuffer::channel_changed);
-                        }
-                    }
-                }
-            }
-
-            for unseen_buffer_change in payload.unseen_channel_buffer_changes {
-                let version = language::proto::deserialize_version(&unseen_buffer_change.version);
-                index.note_changed(
-                    unseen_buffer_change.channel_id,
-                    unseen_buffer_change.epoch,
-                    &version,
-                );
-            }
-
-            for unseen_channel_message in payload.unseen_channel_messages {
-                index.new_messages(
-                    unseen_channel_message.channel_id,
-                    unseen_channel_message.message_id,
-                );
-            }
-        }
-
-        cx.notify();
-        if payload.channel_participants.is_empty() {
-            return None;
-        }
-
-        let mut all_user_ids = Vec::new();
-        let channel_participants = payload.channel_participants;
-        for entry in &channel_participants {
-            for user_id in entry.participant_user_ids.iter() {
-                if let Err(ix) = all_user_ids.binary_search(user_id) {
-                    all_user_ids.insert(ix, *user_id);
-                }
-            }
-        }
-
-        let users = self
-            .user_store
-            .update(cx, |user_store, cx| user_store.get_users(all_user_ids, cx));
-        Some(cx.spawn(|this, mut cx| async move {
-            let users = users.await?;
-
-            this.update(&mut cx, |this, cx| {
-                for entry in &channel_participants {
-                    let mut participants: Vec<_> = entry
-                        .participant_user_ids
-                        .iter()
-                        .filter_map(|user_id| {
-                            users
-                                .binary_search_by_key(&user_id, |user| &user.id)
-                                .ok()
-                                .map(|ix| users[ix].clone())
-                        })
-                        .collect();
-
-                    participants.sort_by_key(|u| u.id);
-
-                    this.channel_participants
-                        .insert(entry.channel_id, participants);
-                }
-
-                cx.notify();
-            })
-        }))
-    }
-}

crates/channel2/src/channel_store/channel_index.rs 🔗

@@ -1,184 +0,0 @@
-use crate::{Channel, ChannelId};
-use collections::BTreeMap;
-use rpc::proto;
-use std::sync::Arc;
-
-#[derive(Default, Debug)]
-pub struct ChannelIndex {
-    channels_ordered: Vec<ChannelId>,
-    channels_by_id: BTreeMap<ChannelId, Arc<Channel>>,
-}
-
-impl ChannelIndex {
-    pub fn by_id(&self) -> &BTreeMap<ChannelId, Arc<Channel>> {
-        &self.channels_by_id
-    }
-
-    pub fn ordered_channels(&self) -> &[ChannelId] {
-        &self.channels_ordered
-    }
-
-    pub fn clear(&mut self) {
-        self.channels_ordered.clear();
-        self.channels_by_id.clear();
-    }
-
-    /// Delete the given channels from this index.
-    pub fn delete_channels(&mut self, channels: &[ChannelId]) {
-        self.channels_by_id
-            .retain(|channel_id, _| !channels.contains(channel_id));
-        self.channels_ordered
-            .retain(|channel_id| !channels.contains(channel_id));
-    }
-
-    pub fn bulk_insert(&mut self) -> ChannelPathsInsertGuard {
-        ChannelPathsInsertGuard {
-            channels_ordered: &mut self.channels_ordered,
-            channels_by_id: &mut self.channels_by_id,
-        }
-    }
-
-    pub fn acknowledge_note_version(
-        &mut self,
-        channel_id: ChannelId,
-        epoch: u64,
-        version: &clock::Global,
-    ) {
-        if let Some(channel) = self.channels_by_id.get_mut(&channel_id) {
-            let channel = Arc::make_mut(channel);
-            if let Some((unseen_epoch, unseen_version)) = &channel.unseen_note_version {
-                if epoch > *unseen_epoch
-                    || epoch == *unseen_epoch && version.observed_all(unseen_version)
-                {
-                    channel.unseen_note_version = None;
-                }
-            }
-        }
-    }
-
-    pub fn acknowledge_message_id(&mut self, channel_id: ChannelId, message_id: u64) {
-        if let Some(channel) = self.channels_by_id.get_mut(&channel_id) {
-            let channel = Arc::make_mut(channel);
-            if let Some(unseen_message_id) = channel.unseen_message_id {
-                if message_id >= unseen_message_id {
-                    channel.unseen_message_id = None;
-                }
-            }
-        }
-    }
-
-    pub fn note_changed(&mut self, channel_id: ChannelId, epoch: u64, version: &clock::Global) {
-        insert_note_changed(&mut self.channels_by_id, channel_id, epoch, version);
-    }
-
-    pub fn new_message(&mut self, channel_id: ChannelId, message_id: u64) {
-        insert_new_message(&mut self.channels_by_id, channel_id, message_id)
-    }
-}
-
-/// A guard for ensuring that the paths index maintains its sort and uniqueness
-/// invariants after a series of insertions
-#[derive(Debug)]
-pub struct ChannelPathsInsertGuard<'a> {
-    channels_ordered: &'a mut Vec<ChannelId>,
-    channels_by_id: &'a mut BTreeMap<ChannelId, Arc<Channel>>,
-}
-
-impl<'a> ChannelPathsInsertGuard<'a> {
-    pub fn note_changed(&mut self, channel_id: ChannelId, epoch: u64, version: &clock::Global) {
-        insert_note_changed(&mut self.channels_by_id, channel_id, epoch, &version);
-    }
-
-    pub fn new_messages(&mut self, channel_id: ChannelId, message_id: u64) {
-        insert_new_message(&mut self.channels_by_id, channel_id, message_id)
-    }
-
-    pub fn insert(&mut self, channel_proto: proto::Channel) -> bool {
-        let mut ret = false;
-        if let Some(existing_channel) = self.channels_by_id.get_mut(&channel_proto.id) {
-            let existing_channel = Arc::make_mut(existing_channel);
-
-            ret = existing_channel.visibility != channel_proto.visibility()
-                || existing_channel.role != channel_proto.role()
-                || existing_channel.name != channel_proto.name;
-
-            existing_channel.visibility = channel_proto.visibility();
-            existing_channel.role = channel_proto.role();
-            existing_channel.name = channel_proto.name.into();
-        } else {
-            self.channels_by_id.insert(
-                channel_proto.id,
-                Arc::new(Channel {
-                    id: channel_proto.id,
-                    visibility: channel_proto.visibility(),
-                    role: channel_proto.role(),
-                    name: channel_proto.name.into(),
-                    unseen_note_version: None,
-                    unseen_message_id: None,
-                    parent_path: channel_proto.parent_path,
-                }),
-            );
-            self.insert_root(channel_proto.id);
-        }
-        ret
-    }
-
-    fn insert_root(&mut self, channel_id: ChannelId) {
-        self.channels_ordered.push(channel_id);
-    }
-}
-
-impl<'a> Drop for ChannelPathsInsertGuard<'a> {
-    fn drop(&mut self) {
-        self.channels_ordered.sort_by(|a, b| {
-            let a = channel_path_sorting_key(*a, &self.channels_by_id);
-            let b = channel_path_sorting_key(*b, &self.channels_by_id);
-            a.cmp(b)
-        });
-        self.channels_ordered.dedup();
-    }
-}
-
-fn channel_path_sorting_key<'a>(
-    id: ChannelId,
-    channels_by_id: &'a BTreeMap<ChannelId, Arc<Channel>>,
-) -> impl Iterator<Item = &str> {
-    let (parent_path, name) = channels_by_id
-        .get(&id)
-        .map_or((&[] as &[_], None), |channel| {
-            (channel.parent_path.as_slice(), Some(channel.name.as_ref()))
-        });
-    parent_path
-        .iter()
-        .filter_map(|id| Some(channels_by_id.get(id)?.name.as_ref()))
-        .chain(name)
-}
-
-fn insert_note_changed(
-    channels_by_id: &mut BTreeMap<ChannelId, Arc<Channel>>,
-    channel_id: u64,
-    epoch: u64,
-    version: &clock::Global,
-) {
-    if let Some(channel) = channels_by_id.get_mut(&channel_id) {
-        let unseen_version = Arc::make_mut(channel)
-            .unseen_note_version
-            .get_or_insert((0, clock::Global::new()));
-        if epoch > unseen_version.0 {
-            *unseen_version = (epoch, version.clone());
-        } else {
-            unseen_version.1.join(&version);
-        }
-    }
-}
-
-fn insert_new_message(
-    channels_by_id: &mut BTreeMap<ChannelId, Arc<Channel>>,
-    channel_id: u64,
-    message_id: u64,
-) {
-    if let Some(channel) = channels_by_id.get_mut(&channel_id) {
-        let unseen_message_id = Arc::make_mut(channel).unseen_message_id.get_or_insert(0);
-        *unseen_message_id = message_id.max(*unseen_message_id);
-    }
-}

crates/channel2/src/channel_store_tests.rs 🔗

@@ -1,380 +0,0 @@
-use crate::channel_chat::ChannelChatEvent;
-
-use super::*;
-use client::{test::FakeServer, Client, UserStore};
-use gpui::{AppContext, Context, Model, TestAppContext};
-use rpc::proto::{self};
-use settings::SettingsStore;
-use util::http::FakeHttpClient;
-
-#[gpui::test]
-fn test_update_channels(cx: &mut AppContext) {
-    let channel_store = init_test(cx);
-
-    update_channels(
-        &channel_store,
-        proto::UpdateChannels {
-            channels: vec![
-                proto::Channel {
-                    id: 1,
-                    name: "b".to_string(),
-                    visibility: proto::ChannelVisibility::Members as i32,
-                    role: proto::ChannelRole::Admin.into(),
-                    parent_path: Vec::new(),
-                },
-                proto::Channel {
-                    id: 2,
-                    name: "a".to_string(),
-                    visibility: proto::ChannelVisibility::Members as i32,
-                    role: proto::ChannelRole::Member.into(),
-                    parent_path: Vec::new(),
-                },
-            ],
-            ..Default::default()
-        },
-        cx,
-    );
-    assert_channels(
-        &channel_store,
-        &[
-            //
-            (0, "a".to_string(), proto::ChannelRole::Member),
-            (0, "b".to_string(), proto::ChannelRole::Admin),
-        ],
-        cx,
-    );
-
-    update_channels(
-        &channel_store,
-        proto::UpdateChannels {
-            channels: vec![
-                proto::Channel {
-                    id: 3,
-                    name: "x".to_string(),
-                    visibility: proto::ChannelVisibility::Members as i32,
-                    role: proto::ChannelRole::Admin.into(),
-                    parent_path: vec![1],
-                },
-                proto::Channel {
-                    id: 4,
-                    name: "y".to_string(),
-                    visibility: proto::ChannelVisibility::Members as i32,
-                    role: proto::ChannelRole::Member.into(),
-                    parent_path: vec![2],
-                },
-            ],
-            ..Default::default()
-        },
-        cx,
-    );
-    assert_channels(
-        &channel_store,
-        &[
-            (0, "a".to_string(), proto::ChannelRole::Member),
-            (1, "y".to_string(), proto::ChannelRole::Member),
-            (0, "b".to_string(), proto::ChannelRole::Admin),
-            (1, "x".to_string(), proto::ChannelRole::Admin),
-        ],
-        cx,
-    );
-}
-
-#[gpui::test]
-fn test_dangling_channel_paths(cx: &mut AppContext) {
-    let channel_store = init_test(cx);
-
-    update_channels(
-        &channel_store,
-        proto::UpdateChannels {
-            channels: vec![
-                proto::Channel {
-                    id: 0,
-                    name: "a".to_string(),
-                    visibility: proto::ChannelVisibility::Members as i32,
-                    role: proto::ChannelRole::Admin.into(),
-                    parent_path: vec![],
-                },
-                proto::Channel {
-                    id: 1,
-                    name: "b".to_string(),
-                    visibility: proto::ChannelVisibility::Members as i32,
-                    role: proto::ChannelRole::Admin.into(),
-                    parent_path: vec![0],
-                },
-                proto::Channel {
-                    id: 2,
-                    name: "c".to_string(),
-                    visibility: proto::ChannelVisibility::Members as i32,
-                    role: proto::ChannelRole::Admin.into(),
-                    parent_path: vec![0, 1],
-                },
-            ],
-            ..Default::default()
-        },
-        cx,
-    );
-    // Sanity check
-    assert_channels(
-        &channel_store,
-        &[
-            //
-            (0, "a".to_string(), proto::ChannelRole::Admin),
-            (1, "b".to_string(), proto::ChannelRole::Admin),
-            (2, "c".to_string(), proto::ChannelRole::Admin),
-        ],
-        cx,
-    );
-
-    update_channels(
-        &channel_store,
-        proto::UpdateChannels {
-            delete_channels: vec![1, 2],
-            ..Default::default()
-        },
-        cx,
-    );
-
-    // Make sure that the 1/2/3 path is gone
-    assert_channels(
-        &channel_store,
-        &[(0, "a".to_string(), proto::ChannelRole::Admin)],
-        cx,
-    );
-}
-
-#[gpui::test]
-async fn test_channel_messages(cx: &mut TestAppContext) {
-    let user_id = 5;
-    let channel_id = 5;
-    let channel_store = cx.update(init_test);
-    let client = channel_store.update(cx, |s, _| s.client());
-    let server = FakeServer::for_client(user_id, &client, cx).await;
-
-    // Get the available channels.
-    server.send(proto::UpdateChannels {
-        channels: vec![proto::Channel {
-            id: channel_id,
-            name: "the-channel".to_string(),
-            visibility: proto::ChannelVisibility::Members as i32,
-            role: proto::ChannelRole::Member.into(),
-            parent_path: vec![],
-        }],
-        ..Default::default()
-    });
-    cx.executor().run_until_parked();
-    cx.update(|cx| {
-        assert_channels(
-            &channel_store,
-            &[(0, "the-channel".to_string(), proto::ChannelRole::Member)],
-            cx,
-        );
-    });
-
-    let get_users = server.receive::<proto::GetUsers>().await.unwrap();
-    assert_eq!(get_users.payload.user_ids, vec![5]);
-    server.respond(
-        get_users.receipt(),
-        proto::UsersResponse {
-            users: vec![proto::User {
-                id: 5,
-                github_login: "nathansobo".into(),
-                avatar_url: "http://avatar.com/nathansobo".into(),
-            }],
-        },
-    );
-
-    // Join a channel and populate its existing messages.
-    let channel = channel_store.update(cx, |store, cx| {
-        let channel_id = store.ordered_channels().next().unwrap().1.id;
-        store.open_channel_chat(channel_id, cx)
-    });
-    let join_channel = server.receive::<proto::JoinChannelChat>().await.unwrap();
-    server.respond(
-        join_channel.receipt(),
-        proto::JoinChannelChatResponse {
-            messages: vec![
-                proto::ChannelMessage {
-                    id: 10,
-                    body: "a".into(),
-                    timestamp: 1000,
-                    sender_id: 5,
-                    mentions: vec![],
-                    nonce: Some(1.into()),
-                },
-                proto::ChannelMessage {
-                    id: 11,
-                    body: "b".into(),
-                    timestamp: 1001,
-                    sender_id: 6,
-                    mentions: vec![],
-                    nonce: Some(2.into()),
-                },
-            ],
-            done: false,
-        },
-    );
-
-    cx.executor().start_waiting();
-
-    // Client requests all users for the received messages
-    let mut get_users = server.receive::<proto::GetUsers>().await.unwrap();
-    get_users.payload.user_ids.sort();
-    assert_eq!(get_users.payload.user_ids, vec![6]);
-    server.respond(
-        get_users.receipt(),
-        proto::UsersResponse {
-            users: vec![proto::User {
-                id: 6,
-                github_login: "maxbrunsfeld".into(),
-                avatar_url: "http://avatar.com/maxbrunsfeld".into(),
-            }],
-        },
-    );
-
-    let channel = channel.await.unwrap();
-    channel.update(cx, |channel, _| {
-        assert_eq!(
-            channel
-                .messages_in_range(0..2)
-                .map(|message| (message.sender.github_login.clone(), message.body.clone()))
-                .collect::<Vec<_>>(),
-            &[
-                ("nathansobo".into(), "a".into()),
-                ("maxbrunsfeld".into(), "b".into())
-            ]
-        );
-    });
-
-    // Receive a new message.
-    server.send(proto::ChannelMessageSent {
-        channel_id,
-        message: Some(proto::ChannelMessage {
-            id: 12,
-            body: "c".into(),
-            timestamp: 1002,
-            sender_id: 7,
-            mentions: vec![],
-            nonce: Some(3.into()),
-        }),
-    });
-
-    // Client requests user for message since they haven't seen them yet
-    let get_users = server.receive::<proto::GetUsers>().await.unwrap();
-    assert_eq!(get_users.payload.user_ids, vec![7]);
-    server.respond(
-        get_users.receipt(),
-        proto::UsersResponse {
-            users: vec![proto::User {
-                id: 7,
-                github_login: "as-cii".into(),
-                avatar_url: "http://avatar.com/as-cii".into(),
-            }],
-        },
-    );
-
-    assert_eq!(
-        channel.next_event(cx),
-        ChannelChatEvent::MessagesUpdated {
-            old_range: 2..2,
-            new_count: 1,
-        }
-    );
-    channel.update(cx, |channel, _| {
-        assert_eq!(
-            channel
-                .messages_in_range(2..3)
-                .map(|message| (message.sender.github_login.clone(), message.body.clone()))
-                .collect::<Vec<_>>(),
-            &[("as-cii".into(), "c".into())]
-        )
-    });
-
-    // Scroll up to view older messages.
-    channel.update(cx, |channel, cx| {
-        channel.load_more_messages(cx).unwrap().detach();
-    });
-    let get_messages = server.receive::<proto::GetChannelMessages>().await.unwrap();
-    assert_eq!(get_messages.payload.channel_id, 5);
-    assert_eq!(get_messages.payload.before_message_id, 10);
-    server.respond(
-        get_messages.receipt(),
-        proto::GetChannelMessagesResponse {
-            done: true,
-            messages: vec![
-                proto::ChannelMessage {
-                    id: 8,
-                    body: "y".into(),
-                    timestamp: 998,
-                    sender_id: 5,
-                    nonce: Some(4.into()),
-                    mentions: vec![],
-                },
-                proto::ChannelMessage {
-                    id: 9,
-                    body: "z".into(),
-                    timestamp: 999,
-                    sender_id: 6,
-                    nonce: Some(5.into()),
-                    mentions: vec![],
-                },
-            ],
-        },
-    );
-
-    assert_eq!(
-        channel.next_event(cx),
-        ChannelChatEvent::MessagesUpdated {
-            old_range: 0..0,
-            new_count: 2,
-        }
-    );
-    channel.update(cx, |channel, _| {
-        assert_eq!(
-            channel
-                .messages_in_range(0..2)
-                .map(|message| (message.sender.github_login.clone(), message.body.clone()))
-                .collect::<Vec<_>>(),
-            &[
-                ("nathansobo".into(), "y".into()),
-                ("maxbrunsfeld".into(), "z".into())
-            ]
-        );
-    });
-}
-
-fn init_test(cx: &mut AppContext) -> Model<ChannelStore> {
-    let http = FakeHttpClient::with_404_response();
-    let client = Client::new(http.clone(), cx);
-    let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx));
-
-    let settings_store = SettingsStore::test(cx);
-    cx.set_global(settings_store);
-    client::init(&client, cx);
-    crate::init(&client, user_store, cx);
-
-    ChannelStore::global(cx)
-}
-
-fn update_channels(
-    channel_store: &Model<ChannelStore>,
-    message: proto::UpdateChannels,
-    cx: &mut AppContext,
-) {
-    let task = channel_store.update(cx, |store, cx| store.update_channels(message, cx));
-    assert!(task.is_none());
-}
-
-#[track_caller]
-fn assert_channels(
-    channel_store: &Model<ChannelStore>,
-    expected_channels: &[(usize, String, proto::ChannelRole)],
-    cx: &mut AppContext,
-) {
-    let actual = channel_store.update(cx, |store, _| {
-        store
-            .ordered_channels()
-            .map(|(depth, channel)| (depth, channel.name.to_string(), channel.role))
-            .collect::<Vec<_>>()
-    });
-    assert_eq!(actual, expected_channels);
-}

crates/client/Cargo.toml 🔗

@@ -14,12 +14,12 @@ test-support = ["collections/test-support", "gpui/test-support", "rpc/test-suppo
 [dependencies]
 chrono = { version = "0.4", features = ["serde"] }
 collections = { path = "../collections" }
-db = { path = "../db" }
-gpui = { path = "../gpui" }
+db = { package = "db2", path = "../db2" }
+gpui = { package = "gpui2", path = "../gpui2" }
 util = { path = "../util" }
-rpc = { path = "../rpc" }
-text = { path = "../text" }
-settings = { path = "../settings" }
+rpc = { package = "rpc2", path = "../rpc2" }
+text = { package = "text2",  path = "../text2" }
+settings = { package = "settings2", path = "../settings2" }
 feature_flags = { path = "../feature_flags" }
 sum_tree = { path = "../sum_tree" }
 
@@ -47,7 +47,7 @@ url = "2.2"
 
 [dev-dependencies]
 collections = { path = "../collections", features = ["test-support"] }
-gpui = { path = "../gpui", features = ["test-support"] }
-rpc = { path = "../rpc", features = ["test-support"] }
-settings = { path = "../settings", features = ["test-support"] }
+gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
+rpc = { package = "rpc2", path = "../rpc2", features = ["test-support"] }
+settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
 util = { path = "../util", features = ["test-support"] }

crates/client/src/client.rs 🔗

@@ -4,20 +4,19 @@ pub mod test;
 pub mod telemetry;
 pub mod user;
 
-use anyhow::{anyhow, Context, Result};
+use anyhow::{anyhow, Context as _, Result};
 use async_recursion::async_recursion;
 use async_tungstenite::tungstenite::{
     error::Error as WebsocketError,
     http::{Request, StatusCode},
 };
 use futures::{
-    future::LocalBoxFuture, AsyncReadExt, FutureExt, SinkExt, StreamExt, TryFutureExt as _,
-    TryStreamExt,
+    channel::oneshot, future::LocalBoxFuture, AsyncReadExt, FutureExt, SinkExt, StreamExt,
+    TryFutureExt as _, TryStreamExt,
 };
 use gpui::{
-    actions, platform::AppVersion, serde_json, AnyModelHandle, AnyWeakModelHandle,
-    AnyWeakViewHandle, AppContext, AsyncAppContext, Entity, ModelHandle, Task, View, ViewContext,
-    WeakViewHandle,
+    actions, serde_json, AnyModel, AnyWeakModel, AppContext, AsyncAppContext, Model,
+    SemanticVersion, Task, WeakModel,
 };
 use lazy_static::lazy_static;
 use parking_lot::RwLock;
@@ -26,6 +25,7 @@ use rand::prelude::*;
 use rpc::proto::{AnyTypedEnvelope, EntityMessage, EnvelopedMessage, PeerId, RequestMessage};
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
+use settings::Settings;
 use std::{
     any::TypeId,
     collections::HashMap,
@@ -57,7 +57,7 @@ lazy_static! {
     pub static ref ADMIN_API_TOKEN: Option<String> = std::env::var("ZED_ADMIN_API_TOKEN")
         .ok()
         .and_then(|s| if s.is_empty() { None } else { Some(s) });
-    pub static ref ZED_APP_VERSION: Option<AppVersion> = std::env::var("ZED_APP_VERSION")
+    pub static ref ZED_APP_VERSION: Option<SemanticVersion> = std::env::var("ZED_APP_VERSION")
         .ok()
         .and_then(|v| v.parse().ok());
     pub static ref ZED_APP_PATH: Option<PathBuf> =
@@ -73,14 +73,14 @@ pub const CONNECTION_TIMEOUT: Duration = Duration::from_secs(5);
 actions!(client, [SignIn, SignOut, Reconnect]);
 
 pub fn init_settings(cx: &mut AppContext) {
-    settings::register::<TelemetrySettings>(cx);
+    TelemetrySettings::register(cx);
 }
 
 pub fn init(client: &Arc<Client>, cx: &mut AppContext) {
     init_settings(cx);
 
     let client = Arc::downgrade(client);
-    cx.add_global_action({
+    cx.on_action({
         let client = client.clone();
         move |_: &SignIn, cx| {
             if let Some(client) = client.upgrade() {
@@ -91,7 +91,8 @@ pub fn init(client: &Arc<Client>, cx: &mut AppContext) {
             }
         }
     });
-    cx.add_global_action({
+
+    cx.on_action({
         let client = client.clone();
         move |_: &SignOut, cx| {
             if let Some(client) = client.upgrade() {
@@ -102,7 +103,8 @@ pub fn init(client: &Arc<Client>, cx: &mut AppContext) {
             }
         }
     });
-    cx.add_global_action({
+
+    cx.on_action({
         let client = client.clone();
         move |_: &Reconnect, cx| {
             if let Some(client) = client.upgrade() {
@@ -216,7 +218,7 @@ struct ClientState {
     _reconnect_task: Option<Task<()>>,
     reconnect_interval: Duration,
     entities_by_type_and_remote_id: HashMap<(TypeId, u64), WeakSubscriber>,
-    models_by_message_type: HashMap<TypeId, AnyWeakModelHandle>,
+    models_by_message_type: HashMap<TypeId, AnyWeakModel>,
     entity_types_by_message_type: HashMap<TypeId, TypeId>,
     #[allow(clippy::type_complexity)]
     message_handlers: HashMap<
@@ -225,7 +227,7 @@ struct ClientState {
             dyn Send
                 + Sync
                 + Fn(
-                    Subscriber,
+                    AnyModel,
                     Box<dyn AnyTypedEnvelope>,
                     &Arc<Client>,
                     AsyncAppContext,
@@ -235,16 +237,10 @@ struct ClientState {
 }
 
 enum WeakSubscriber {
-    Model(AnyWeakModelHandle),
-    View(AnyWeakViewHandle),
+    Entity { handle: AnyWeakModel },
     Pending(Vec<Box<dyn AnyTypedEnvelope>>),
 }
 
-enum Subscriber {
-    Model(AnyModelHandle),
-    View(AnyWeakViewHandle),
-}
-
 #[derive(Clone, Debug)]
 pub struct Credentials {
     pub user_id: u64,
@@ -298,15 +294,15 @@ impl Drop for Subscription {
     }
 }
 
-pub struct PendingEntitySubscription<T: Entity> {
+pub struct PendingEntitySubscription<T: 'static> {
     client: Arc<Client>,
     remote_id: u64,
     _entity_type: PhantomData<T>,
     consumed: bool,
 }
 
-impl<T: Entity> PendingEntitySubscription<T> {
-    pub fn set_model(mut self, model: &ModelHandle<T>, cx: &mut AsyncAppContext) -> Subscription {
+impl<T: 'static> PendingEntitySubscription<T> {
+    pub fn set_model(mut self, model: &Model<T>, cx: &mut AsyncAppContext) -> Subscription {
         self.consumed = true;
         let mut state = self.client.state.write();
         let id = (TypeId::of::<T>(), self.remote_id);
@@ -316,9 +312,12 @@ impl<T: Entity> PendingEntitySubscription<T> {
             unreachable!()
         };
 
-        state
-            .entities_by_type_and_remote_id
-            .insert(id, WeakSubscriber::Model(model.downgrade().into_any()));
+        state.entities_by_type_and_remote_id.insert(
+            id,
+            WeakSubscriber::Entity {
+                handle: model.downgrade().into(),
+            },
+        );
         drop(state);
         for message in messages {
             self.client.handle_message(message, cx);
@@ -330,7 +329,7 @@ impl<T: Entity> PendingEntitySubscription<T> {
     }
 }
 
-impl<T: Entity> Drop for PendingEntitySubscription<T> {
+impl<T: 'static> Drop for PendingEntitySubscription<T> {
     fn drop(&mut self) {
         if !self.consumed {
             let mut state = self.client.state.write();
@@ -346,7 +345,7 @@ impl<T: Entity> Drop for PendingEntitySubscription<T> {
     }
 }
 
-#[derive(Debug, Copy, Clone)]
+#[derive(Copy, Clone)]
 pub struct TelemetrySettings {
     pub diagnostics: bool,
     pub metrics: bool,
@@ -358,7 +357,7 @@ pub struct TelemetrySettingsContent {
     pub metrics: Option<bool>,
 }
 
-impl settings::Setting for TelemetrySettings {
+impl settings::Settings for TelemetrySettings {
     const KEY: Option<&'static str> = Some("telemetry");
 
     type FileContent = TelemetrySettingsContent;
@@ -366,7 +365,7 @@ impl settings::Setting for TelemetrySettings {
     fn load(
         default_value: &Self::FileContent,
         user_values: &[&Self::FileContent],
-        _: &AppContext,
+        _: &mut AppContext,
     ) -> Result<Self> {
         Ok(Self {
             diagnostics: user_values.first().and_then(|v| v.diagnostics).unwrap_or(
@@ -383,7 +382,7 @@ impl settings::Setting for TelemetrySettings {
 }
 
 impl Client {
-    pub fn new(http: Arc<dyn HttpClient>, cx: &AppContext) -> Arc<Self> {
+    pub fn new(http: Arc<dyn HttpClient>, cx: &mut AppContext) -> Arc<Self> {
         Arc::new(Self {
             id: AtomicU64::new(0),
             peer: Peer::new(0),
@@ -475,7 +474,7 @@ impl Client {
             Status::ConnectionLost => {
                 let this = self.clone();
                 let reconnect_interval = state.reconnect_interval;
-                state._reconnect_task = Some(cx.spawn(|cx| async move {
+                state._reconnect_task = Some(cx.spawn(move |cx| async move {
                     #[cfg(any(test, feature = "test-support"))]
                     let mut rng = StdRng::seed_from_u64(0);
                     #[cfg(not(any(test, feature = "test-support")))]
@@ -491,7 +490,7 @@ impl Client {
                                 },
                                 &cx,
                             );
-                            cx.background().timer(delay).await;
+                            cx.background_executor().timer(delay).await;
                             delay = delay
                                 .mul_f32(rng.gen_range(1.0..=2.0))
                                 .min(reconnect_interval);
@@ -502,33 +501,21 @@ impl Client {
                 }));
             }
             Status::SignedOut | Status::UpgradeRequired => {
-                cx.read(|cx| self.telemetry.set_authenticated_user_info(None, false, cx));
+                cx.update(|cx| self.telemetry.set_authenticated_user_info(None, false, cx))
+                    .log_err();
                 state._reconnect_task.take();
             }
             _ => {}
         }
     }
 
-    pub fn add_view_for_remote_entity<T: View>(
+    pub fn subscribe_to_entity<T>(
         self: &Arc<Self>,
         remote_id: u64,
-        cx: &mut ViewContext<T>,
-    ) -> Subscription {
-        let id = (TypeId::of::<T>(), remote_id);
-        self.state
-            .write()
-            .entities_by_type_and_remote_id
-            .insert(id, WeakSubscriber::View(cx.weak_handle().into_any()));
-        Subscription::Entity {
-            client: Arc::downgrade(self),
-            id,
-        }
-    }
-
-    pub fn subscribe_to_entity<T: Entity>(
-        self: &Arc<Self>,
-        remote_id: u64,
-    ) -> Result<PendingEntitySubscription<T>> {
+    ) -> Result<PendingEntitySubscription<T>>
+    where
+        T: 'static,
+    {
         let id = (TypeId::of::<T>(), remote_id);
 
         let mut state = self.state.write();
@@ -550,36 +537,31 @@ impl Client {
     #[track_caller]
     pub fn add_message_handler<M, E, H, F>(
         self: &Arc<Self>,
-        model: ModelHandle<E>,
+        entity: WeakModel<E>,
         handler: H,
     ) -> Subscription
     where
         M: EnvelopedMessage,
-        E: Entity,
+        E: 'static,
         H: 'static
-            + Send
             + Sync
-            + Fn(ModelHandle<E>, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F,
+            + Fn(Model<E>, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F
+            + Send
+            + Sync,
         F: 'static + Future<Output = Result<()>>,
     {
         let message_type_id = TypeId::of::<M>();
-
         let mut state = self.state.write();
         state
             .models_by_message_type
-            .insert(message_type_id, model.downgrade().into_any());
+            .insert(message_type_id, entity.into());
 
         let prev_handler = state.message_handlers.insert(
             message_type_id,
-            Arc::new(move |handle, envelope, client, cx| {
-                let handle = if let Subscriber::Model(handle) = handle {
-                    handle
-                } else {
-                    unreachable!();
-                };
-                let model = handle.downcast::<E>().unwrap();
+            Arc::new(move |subscriber, envelope, client, cx| {
+                let subscriber = subscriber.downcast::<E>().unwrap();
                 let envelope = envelope.into_any().downcast::<TypedEnvelope<M>>().unwrap();
-                handler(model, *envelope, client.clone(), cx).boxed_local()
+                handler(subscriber, *envelope, client.clone(), cx).boxed_local()
             }),
         );
         if prev_handler.is_some() {
@@ -600,16 +582,17 @@ impl Client {
 
     pub fn add_request_handler<M, E, H, F>(
         self: &Arc<Self>,
-        model: ModelHandle<E>,
+        model: WeakModel<E>,
         handler: H,
     ) -> Subscription
     where
         M: RequestMessage,
-        E: Entity,
+        E: 'static,
         H: 'static
-            + Send
             + Sync
-            + Fn(ModelHandle<E>, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F,
+            + Fn(Model<E>, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F
+            + Send
+            + Sync,
         F: 'static + Future<Output = Result<M::Response>>,
     {
         self.add_message_handler(model, move |handle, envelope, this, cx| {
@@ -621,52 +604,23 @@ impl Client {
         })
     }
 
-    pub fn add_view_message_handler<M, E, H, F>(self: &Arc<Self>, handler: H)
-    where
-        M: EntityMessage,
-        E: View,
-        H: 'static
-            + Send
-            + Sync
-            + Fn(WeakViewHandle<E>, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F,
-        F: 'static + Future<Output = Result<()>>,
-    {
-        self.add_entity_message_handler::<M, E, _, _>(move |handle, message, client, cx| {
-            if let Subscriber::View(handle) = handle {
-                handler(handle.downcast::<E>().unwrap(), message, client, cx)
-            } else {
-                unreachable!();
-            }
-        })
-    }
-
     pub fn add_model_message_handler<M, E, H, F>(self: &Arc<Self>, handler: H)
     where
         M: EntityMessage,
-        E: Entity,
-        H: 'static
-            + Send
-            + Sync
-            + Fn(ModelHandle<E>, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F,
+        E: 'static,
+        H: 'static + Fn(Model<E>, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F + Send + Sync,
         F: 'static + Future<Output = Result<()>>,
     {
-        self.add_entity_message_handler::<M, E, _, _>(move |handle, message, client, cx| {
-            if let Subscriber::Model(handle) = handle {
-                handler(handle.downcast::<E>().unwrap(), message, client, cx)
-            } else {
-                unreachable!();
-            }
+        self.add_entity_message_handler::<M, E, _, _>(move |subscriber, message, client, cx| {
+            handler(subscriber.downcast::<E>().unwrap(), message, client, cx)
         })
     }
 
     fn add_entity_message_handler<M, E, H, F>(self: &Arc<Self>, handler: H)
     where
         M: EntityMessage,
-        E: Entity,
-        H: 'static
-            + Send
-            + Sync
-            + Fn(Subscriber, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F,
+        E: 'static,
+        H: 'static + Fn(AnyModel, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F + Send + Sync,
         F: 'static + Future<Output = Result<()>>,
     {
         let model_type_id = TypeId::of::<E>();
@@ -704,11 +658,8 @@ impl Client {
     pub fn add_model_request_handler<M, E, H, F>(self: &Arc<Self>, handler: H)
     where
         M: EntityMessage + RequestMessage,
-        E: Entity,
-        H: 'static
-            + Send
-            + Sync
-            + Fn(ModelHandle<E>, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F,
+        E: 'static,
+        H: 'static + Fn(Model<E>, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F + Send + Sync,
         F: 'static + Future<Output = Result<M::Response>>,
     {
         self.add_model_message_handler(move |entity, envelope, client, cx| {
@@ -720,25 +671,6 @@ impl Client {
         })
     }
 
-    pub fn add_view_request_handler<M, E, H, F>(self: &Arc<Self>, handler: H)
-    where
-        M: EntityMessage + RequestMessage,
-        E: View,
-        H: 'static
-            + Send
-            + Sync
-            + Fn(WeakViewHandle<E>, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F,
-        F: 'static + Future<Output = Result<M::Response>>,
-    {
-        self.add_view_message_handler(move |entity, envelope, client, cx| {
-            Self::respond_to_request::<M, _>(
-                envelope.receipt(),
-                handler(entity, envelope, client.clone(), cx),
-                client,
-            )
-        })
-    }
-
     async fn respond_to_request<T: RequestMessage, F: Future<Output = Result<T::Response>>>(
         receipt: Receipt<T>,
         response: F,
@@ -823,14 +755,15 @@ impl Client {
             self.set_status(Status::Reconnecting, cx);
         }
 
-        let mut timeout = cx.background().timer(CONNECTION_TIMEOUT).fuse();
+        let mut timeout =
+            futures::FutureExt::fuse(cx.background_executor().timer(CONNECTION_TIMEOUT));
         futures::select_biased! {
             connection = self.establish_connection(&credentials, cx).fuse() => {
                 match connection {
                     Ok(conn) => {
                         self.state.write().credentials = Some(credentials.clone());
                         if !read_from_keychain && IMPERSONATE_LOGIN.is_none() {
-                            write_credentials_to_keychain(&credentials, cx).log_err();
+                            write_credentials_to_keychain(credentials, cx).log_err();
                         }
 
                         futures::select_biased! {
@@ -844,7 +777,7 @@ impl Client {
                     Err(EstablishConnectionError::Unauthorized) => {
                         self.state.write().credentials.take();
                         if read_from_keychain {
-                            cx.platform().delete_credentials(&ZED_SERVER_URL).log_err();
+                            delete_credentials_from_keychain(cx).log_err();
                             self.set_status(Status::SignedOut, cx);
                             self.authenticate_and_connect(false, cx).await
                         } else {
@@ -874,12 +807,13 @@ impl Client {
         conn: Connection,
         cx: &AsyncAppContext,
     ) -> Result<()> {
-        let executor = cx.background();
+        let executor = cx.background_executor();
         log::info!("add connection to peer");
-        let (connection_id, handle_io, mut incoming) = self
-            .peer
-            .add_connection(conn, move |duration| executor.timer(duration));
-        let handle_io = cx.background().spawn(handle_io);
+        let (connection_id, handle_io, mut incoming) = self.peer.add_connection(conn, {
+            let executor = executor.clone();
+            move |duration| executor.timer(duration)
+        });
+        let handle_io = executor.spawn(handle_io);
 
         let peer_id = async {
             log::info!("waiting for server hello");
@@ -925,10 +859,10 @@ impl Client {
             },
             cx,
         );
-        cx.foreground()
-            .spawn({
-                let cx = cx.clone();
-                let this = self.clone();
+
+        cx.spawn({
+            let this = self.clone();
+            |cx| {
                 async move {
                     while let Some(message) = incoming.next().await {
                         this.handle_message(message, &cx);
@@ -936,13 +870,13 @@ impl Client {
                         smol::future::yield_now().await;
                     }
                 }
-            })
-            .detach();
+            }
+        })
+        .detach();
 
-        let this = self.clone();
-        let cx = cx.clone();
-        cx.foreground()
-            .spawn(async move {
+        cx.spawn({
+            let this = self.clone();
+            move |cx| async move {
                 match handle_io.await {
                     Ok(()) => {
                         if this.status().borrow().clone()
@@ -959,8 +893,9 @@ impl Client {
                         this.set_status(Status::ConnectionLost, &cx);
                     }
                 }
-            })
-            .detach();
+            }
+        })
+        .detach();
 
         Ok(())
     }
@@ -1032,13 +967,7 @@ impl Client {
         credentials: &Credentials,
         cx: &AsyncAppContext,
     ) -> Task<Result<Connection, EstablishConnectionError>> {
-        let release_channel = cx.read(|cx| {
-            if cx.has_global::<ReleaseChannel>() {
-                Some(*cx.global::<ReleaseChannel>())
-            } else {
-                None
-            }
-        });
+        let release_channel = cx.try_read_global(|channel: &ReleaseChannel, _| *channel);
 
         let request = Request::builder()
             .header(
@@ -1048,7 +977,7 @@ impl Client {
             .header("x-zed-protocol-version", rpc::PROTOCOL_VERSION);
 
         let http = self.http.clone();
-        cx.background().spawn(async move {
+        cx.background_executor().spawn(async move {
             let mut rpc_url = Self::get_rpc_url(http, release_channel).await?;
             let rpc_host = rpc_url
                 .host_str()
@@ -1089,96 +1018,118 @@ impl Client {
         self: &Arc<Self>,
         cx: &AsyncAppContext,
     ) -> Task<Result<Credentials>> {
-        let platform = cx.platform();
-        let executor = cx.background();
         let http = self.http.clone();
+        cx.spawn(|cx| async move {
+            let background = cx.background_executor().clone();
+
+            let (open_url_tx, open_url_rx) = oneshot::channel::<String>();
+            cx.update(|cx| {
+                cx.spawn(move |cx| async move {
+                    let url = open_url_rx.await?;
+                    cx.update(|cx| cx.open_url(&url))
+                })
+                .detach_and_log_err(cx);
+            })
+            .log_err();
 
-        executor.clone().spawn(async move {
-            // Generate a pair of asymmetric encryption keys. The public key will be used by the
-            // zed server to encrypt the user's access token, so that it can'be intercepted by
-            // any other app running on the user's device.
-            let (public_key, private_key) =
-                rpc::auth::keypair().expect("failed to generate keypair for auth");
-            let public_key_string =
-                String::try_from(public_key).expect("failed to serialize public key for auth");
-
-            if let Some((login, token)) = IMPERSONATE_LOGIN.as_ref().zip(ADMIN_API_TOKEN.as_ref()) {
-                return Self::authenticate_as_admin(http, login.clone(), token.clone()).await;
-            }
-
-            // Start an HTTP server to receive the redirect from Zed's sign-in page.
-            let server = tiny_http::Server::http("127.0.0.1:0").expect("failed to find open port");
-            let port = server.server_addr().port();
-
-            // Open the Zed sign-in page in the user's browser, with query parameters that indicate
-            // that the user is signing in from a Zed app running on the same device.
-            let mut url = format!(
-                "{}/native_app_signin?native_app_port={}&native_app_public_key={}",
-                *ZED_SERVER_URL, port, public_key_string
-            );
-
-            if let Some(impersonate_login) = IMPERSONATE_LOGIN.as_ref() {
-                log::info!("impersonating user @{}", impersonate_login);
-                write!(&mut url, "&impersonate={}", impersonate_login).unwrap();
-            }
+            let credentials = background
+                .clone()
+                .spawn(async move {
+                    // Generate a pair of asymmetric encryption keys. The public key will be used by the
+                    // zed server to encrypt the user's access token, so that it can'be intercepted by
+                    // any other app running on the user's device.
+                    let (public_key, private_key) =
+                        rpc::auth::keypair().expect("failed to generate keypair for auth");
+                    let public_key_string = String::try_from(public_key)
+                        .expect("failed to serialize public key for auth");
+
+                    if let Some((login, token)) =
+                        IMPERSONATE_LOGIN.as_ref().zip(ADMIN_API_TOKEN.as_ref())
+                    {
+                        return Self::authenticate_as_admin(http, login.clone(), token.clone())
+                            .await;
+                    }
 
-            platform.open_url(&url);
+                    // Start an HTTP server to receive the redirect from Zed's sign-in page.
+                    let server =
+                        tiny_http::Server::http("127.0.0.1:0").expect("failed to find open port");
+                    let port = server.server_addr().port();
+
+                    // Open the Zed sign-in page in the user's browser, with query parameters that indicate
+                    // that the user is signing in from a Zed app running on the same device.
+                    let mut url = format!(
+                        "{}/native_app_signin?native_app_port={}&native_app_public_key={}",
+                        *ZED_SERVER_URL, port, public_key_string
+                    );
+
+                    if let Some(impersonate_login) = IMPERSONATE_LOGIN.as_ref() {
+                        log::info!("impersonating user @{}", impersonate_login);
+                        write!(&mut url, "&impersonate={}", impersonate_login).unwrap();
+                    }
 
-            // Receive the HTTP request from the user's browser. Retrieve the user id and encrypted
-            // access token from the query params.
-            //
-            // TODO - Avoid ever starting more than one HTTP server. Maybe switch to using a
-            // custom URL scheme instead of this local HTTP server.
-            let (user_id, access_token) = executor
-                .spawn(async move {
-                    for _ in 0..100 {
-                        if let Some(req) = server.recv_timeout(Duration::from_secs(1))? {
-                            let path = req.url();
-                            let mut user_id = None;
-                            let mut access_token = None;
-                            let url = Url::parse(&format!("http://example.com{}", path))
-                                .context("failed to parse login notification url")?;
-                            for (key, value) in url.query_pairs() {
-                                if key == "access_token" {
-                                    access_token = Some(value.to_string());
-                                } else if key == "user_id" {
-                                    user_id = Some(value.to_string());
+                    open_url_tx.send(url).log_err();
+
+                    // Receive the HTTP request from the user's browser. Retrieve the user id and encrypted
+                    // access token from the query params.
+                    //
+                    // TODO - Avoid ever starting more than one HTTP server. Maybe switch to using a
+                    // custom URL scheme instead of this local HTTP server.
+                    let (user_id, access_token) = background
+                        .spawn(async move {
+                            for _ in 0..100 {
+                                if let Some(req) = server.recv_timeout(Duration::from_secs(1))? {
+                                    let path = req.url();
+                                    let mut user_id = None;
+                                    let mut access_token = None;
+                                    let url = Url::parse(&format!("http://example.com{}", path))
+                                        .context("failed to parse login notification url")?;
+                                    for (key, value) in url.query_pairs() {
+                                        if key == "access_token" {
+                                            access_token = Some(value.to_string());
+                                        } else if key == "user_id" {
+                                            user_id = Some(value.to_string());
+                                        }
+                                    }
+
+                                    let post_auth_url =
+                                        format!("{}/native_app_signin_succeeded", *ZED_SERVER_URL);
+                                    req.respond(
+                                        tiny_http::Response::empty(302).with_header(
+                                            tiny_http::Header::from_bytes(
+                                                &b"Location"[..],
+                                                post_auth_url.as_bytes(),
+                                            )
+                                            .unwrap(),
+                                        ),
+                                    )
+                                    .context("failed to respond to login http request")?;
+                                    return Ok((
+                                        user_id
+                                            .ok_or_else(|| anyhow!("missing user_id parameter"))?,
+                                        access_token.ok_or_else(|| {
+                                            anyhow!("missing access_token parameter")
+                                        })?,
+                                    ));
                                 }
                             }
 
-                            let post_auth_url =
-                                format!("{}/native_app_signin_succeeded", *ZED_SERVER_URL);
-                            req.respond(
-                                tiny_http::Response::empty(302).with_header(
-                                    tiny_http::Header::from_bytes(
-                                        &b"Location"[..],
-                                        post_auth_url.as_bytes(),
-                                    )
-                                    .unwrap(),
-                                ),
-                            )
-                            .context("failed to respond to login http request")?;
-                            return Ok((
-                                user_id.ok_or_else(|| anyhow!("missing user_id parameter"))?,
-                                access_token
-                                    .ok_or_else(|| anyhow!("missing access_token parameter"))?,
-                            ));
-                        }
-                    }
+                            Err(anyhow!("didn't receive login redirect"))
+                        })
+                        .await?;
+
+                    let access_token = private_key
+                        .decrypt_string(&access_token)
+                        .context("failed to decrypt access token")?;
 
-                    Err(anyhow!("didn't receive login redirect"))
+                    Ok(Credentials {
+                        user_id: user_id.parse()?,
+                        access_token,
+                    })
                 })
                 .await?;
 
-            let access_token = private_key
-                .decrypt_string(&access_token)
-                .context("failed to decrypt access token")?;
-            platform.activate(true);
-
-            Ok(Credentials {
-                user_id: user_id.parse()?,
-                access_token,
-            })
+            cx.update(|cx| cx.activate(true))?;
+            Ok(credentials)
         })
     }
 
@@ -1307,12 +1258,12 @@ impl Client {
 
         let mut subscriber = None;
 
-        if let Some(message_model) = state
+        if let Some(handle) = state
             .models_by_message_type
             .get(&payload_type_id)
-            .and_then(|model| model.upgrade(cx))
+            .and_then(|handle| handle.upgrade())
         {
-            subscriber = Some(Subscriber::Model(message_model));
+            subscriber = Some(handle);
         } else if let Some((extract_entity_id, entity_type_id)) =
             state.entity_id_extractors.get(&payload_type_id).zip(
                 state
@@ -1332,12 +1283,10 @@ impl Client {
                     return;
                 }
                 Some(weak_subscriber @ _) => match weak_subscriber {
-                    WeakSubscriber::Model(handle) => {
-                        subscriber = handle.upgrade(cx).map(Subscriber::Model);
-                    }
-                    WeakSubscriber::View(handle) => {
-                        subscriber = Some(Subscriber::View(handle.clone()));
+                    WeakSubscriber::Entity { handle } => {
+                        subscriber = handle.upgrade();
                     }
+
                     WeakSubscriber::Pending(_) => {}
                 },
                 _ => {}
@@ -1367,8 +1316,7 @@ impl Client {
                 sender_id,
                 type_name
             );
-            cx.foreground()
-                .spawn(async move {
+            cx.spawn(move |_| async move {
                     match future.await {
                         Ok(()) => {
                             log::debug!(
@@ -1407,22 +1355,30 @@ fn read_credentials_from_keychain(cx: &AsyncAppContext) -> Option<Credentials> {
     }
 
     let (user_id, access_token) = cx
-        .platform()
-        .read_credentials(&ZED_SERVER_URL)
-        .log_err()
-        .flatten()?;
+        .update(|cx| cx.read_credentials(&ZED_SERVER_URL).log_err().flatten())
+        .ok()??;
+
     Some(Credentials {
         user_id: user_id.parse().ok()?,
         access_token: String::from_utf8(access_token).ok()?,
     })
 }
 
-fn write_credentials_to_keychain(credentials: &Credentials, cx: &AsyncAppContext) -> Result<()> {
-    cx.platform().write_credentials(
-        &ZED_SERVER_URL,
-        &credentials.user_id.to_string(),
-        credentials.access_token.as_bytes(),
-    )
+async fn write_credentials_to_keychain(
+    credentials: Credentials,
+    cx: &AsyncAppContext,
+) -> Result<()> {
+    cx.update(move |cx| {
+        cx.write_credentials(
+            &ZED_SERVER_URL,
+            &credentials.user_id.to_string(),
+            credentials.access_token.as_bytes(),
+        )
+    })?
+}
+
+async fn delete_credentials_from_keychain(cx: &AsyncAppContext) -> Result<()> {
+    cx.update(move |cx| cx.delete_credentials(&ZED_SERVER_URL))?
 }
 
 const WORKTREE_URL_PREFIX: &str = "zed://worktrees/";
@@ -1446,15 +1402,14 @@ pub fn decode_worktree_url(url: &str) -> Option<(u64, String)> {
 mod tests {
     use super::*;
     use crate::test::FakeServer;
-    use gpui::{executor::Deterministic, TestAppContext};
+
+    use gpui::{BackgroundExecutor, Context, TestAppContext};
     use parking_lot::Mutex;
     use std::future;
     use util::http::FakeHttpClient;
 
     #[gpui::test(iterations = 10)]
     async fn test_reconnection(cx: &mut TestAppContext) {
-        cx.foreground().forbid_parking();
-
         let user_id = 5;
         let client = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
         let server = FakeServer::for_client(user_id, &client, cx).await;
@@ -1470,7 +1425,7 @@ mod tests {
         while !matches!(status.next().await, Some(Status::ReconnectionError { .. })) {}
 
         server.allow_connections();
-        cx.foreground().advance_clock(Duration::from_secs(10));
+        cx.executor().advance_clock(Duration::from_secs(10));
         while !matches!(status.next().await, Some(Status::Connected { .. })) {}
         assert_eq!(server.auth_count(), 1); // Client reused the cached credentials when reconnecting
 
@@ -1481,22 +1436,21 @@ mod tests {
         // Clear cached credentials after authentication fails
         server.roll_access_token();
         server.allow_connections();
-        cx.foreground().advance_clock(Duration::from_secs(10));
+        cx.executor().run_until_parked();
+        cx.executor().advance_clock(Duration::from_secs(10));
         while !matches!(status.next().await, Some(Status::Connected { .. })) {}
         assert_eq!(server.auth_count(), 2); // Client re-authenticated due to an invalid token
     }
 
     #[gpui::test(iterations = 10)]
-    async fn test_connection_timeout(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
-        deterministic.forbid_parking();
-
+    async fn test_connection_timeout(executor: BackgroundExecutor, cx: &mut TestAppContext) {
         let user_id = 5;
         let client = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
         let mut status = client.status();
 
         // Time out when client tries to connect.
         client.override_authenticate(move |cx| {
-            cx.foreground().spawn(async move {
+            cx.background_executor().spawn(async move {
                 Ok(Credentials {
                     user_id,
                     access_token: "token".into(),
@@ -1504,7 +1458,7 @@ mod tests {
             })
         });
         client.override_establish_connection(|_, cx| {
-            cx.foreground().spawn(async move {
+            cx.background_executor().spawn(async move {
                 future::pending::<()>().await;
                 unreachable!()
             })
@@ -1513,10 +1467,10 @@ mod tests {
             let client = client.clone();
             |cx| async move { client.authenticate_and_connect(false, &cx).await }
         });
-        deterministic.run_until_parked();
+        executor.run_until_parked();
         assert!(matches!(status.next().await, Some(Status::Connecting)));
 
-        deterministic.advance_clock(CONNECTION_TIMEOUT);
+        executor.advance_clock(CONNECTION_TIMEOUT);
         assert!(matches!(
             status.next().await,
             Some(Status::ConnectionError { .. })
@@ -1538,18 +1492,18 @@ mod tests {
         // Time out when re-establishing the connection.
         server.allow_connections();
         client.override_establish_connection(|_, cx| {
-            cx.foreground().spawn(async move {
+            cx.background_executor().spawn(async move {
                 future::pending::<()>().await;
                 unreachable!()
             })
         });
-        deterministic.advance_clock(2 * INITIAL_RECONNECTION_DELAY);
+        executor.advance_clock(2 * INITIAL_RECONNECTION_DELAY);
         assert!(matches!(
             status.next().await,
             Some(Status::Reconnecting { .. })
         ));
 
-        deterministic.advance_clock(CONNECTION_TIMEOUT);
+        executor.advance_clock(CONNECTION_TIMEOUT);
         assert!(matches!(
             status.next().await,
             Some(Status::ReconnectionError { .. })
@@ -1559,10 +1513,8 @@ mod tests {
     #[gpui::test(iterations = 10)]
     async fn test_authenticating_more_than_once(
         cx: &mut TestAppContext,
-        deterministic: Arc<Deterministic>,
+        executor: BackgroundExecutor,
     ) {
-        cx.foreground().forbid_parking();
-
         let auth_count = Arc::new(Mutex::new(0));
         let dropped_auth_count = Arc::new(Mutex::new(0));
         let client = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
@@ -1572,7 +1524,7 @@ mod tests {
             move |cx| {
                 let auth_count = auth_count.clone();
                 let dropped_auth_count = dropped_auth_count.clone();
-                cx.foreground().spawn(async move {
+                cx.background_executor().spawn(async move {
                     *auth_count.lock() += 1;
                     let _drop = util::defer(move || *dropped_auth_count.lock() += 1);
                     future::pending::<()>().await;
@@ -1581,19 +1533,19 @@ mod tests {
             }
         });
 
-        let _authenticate = cx.spawn(|cx| {
+        let _authenticate = cx.spawn({
             let client = client.clone();
-            async move { client.authenticate_and_connect(false, &cx).await }
+            move |cx| async move { client.authenticate_and_connect(false, &cx).await }
         });
-        deterministic.run_until_parked();
+        executor.run_until_parked();
         assert_eq!(*auth_count.lock(), 1);
         assert_eq!(*dropped_auth_count.lock(), 0);
 
-        let _authenticate = cx.spawn(|cx| {
+        let _authenticate = cx.spawn({
             let client = client.clone();
-            async move { client.authenticate_and_connect(false, &cx).await }
+            |cx| async move { client.authenticate_and_connect(false, &cx).await }
         });
-        deterministic.run_until_parked();
+        executor.run_until_parked();
         assert_eq!(*auth_count.lock(), 2);
         assert_eq!(*dropped_auth_count.lock(), 1);
     }
@@ -1611,8 +1563,6 @@ mod tests {
 
     #[gpui::test]
     async fn test_subscribing_to_entity(cx: &mut TestAppContext) {
-        cx.foreground().forbid_parking();
-
         let user_id = 5;
         let client = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
         let server = FakeServer::for_client(user_id, &client, cx).await;
@@ -1620,8 +1570,8 @@ mod tests {
         let (done_tx1, mut done_rx1) = smol::channel::unbounded();
         let (done_tx2, mut done_rx2) = smol::channel::unbounded();
         client.add_model_message_handler(
-            move |model: ModelHandle<Model>, _: TypedEnvelope<proto::JoinProject>, _, cx| {
-                match model.read_with(&cx, |model, _| model.id) {
+            move |model: Model<TestModel>, _: TypedEnvelope<proto::JoinProject>, _, mut cx| {
+                match model.update(&mut cx, |model, _| model.id).unwrap() {
                     1 => done_tx1.try_send(()).unwrap(),
                     2 => done_tx2.try_send(()).unwrap(),
                     _ => unreachable!(),
@@ -1629,15 +1579,15 @@ mod tests {
                 async { Ok(()) }
             },
         );
-        let model1 = cx.add_model(|_| Model {
+        let model1 = cx.new_model(|_| TestModel {
             id: 1,
             subscription: None,
         });
-        let model2 = cx.add_model(|_| Model {
+        let model2 = cx.new_model(|_| TestModel {
             id: 2,
             subscription: None,
         });
-        let model3 = cx.add_model(|_| Model {
+        let model3 = cx.new_model(|_| TestModel {
             id: 3,
             subscription: None,
         });

crates/client/src/telemetry.rs 🔗

@@ -1,9 +1,11 @@
 use crate::{TelemetrySettings, ZED_SECRET_CLIENT_TOKEN, ZED_SERVER_URL};
 use chrono::{DateTime, Utc};
-use gpui::{executor::Background, serde_json, AppContext, Task};
+use futures::Future;
+use gpui::{serde_json, AppContext, AppMetadata, BackgroundExecutor, Task};
 use lazy_static::lazy_static;
 use parking_lot::Mutex;
 use serde::Serialize;
+use settings::Settings;
 use std::{env, io::Write, mem, path::PathBuf, sync::Arc, time::Duration};
 use sysinfo::{
     CpuRefreshKind, Pid, PidExt, ProcessExt, ProcessRefreshKind, RefreshKind, System, SystemExt,
@@ -14,19 +16,16 @@ use util::{channel::ReleaseChannel, TryFutureExt};
 
 pub struct Telemetry {
     http_client: Arc<dyn HttpClient>,
-    executor: Arc<Background>,
+    executor: BackgroundExecutor,
     state: Mutex<TelemetryState>,
 }
 
-#[derive(Default)]
 struct TelemetryState {
     metrics_id: Option<Arc<str>>,      // Per logged-in user
     installation_id: Option<Arc<str>>, // Per app installation (different for dev, nightly, preview, and stable)
     session_id: Option<Arc<str>>,      // Per app launch
-    app_version: Option<Arc<str>>,
     release_channel: Option<&'static str>,
-    os_name: &'static str,
-    os_version: Option<Arc<str>>,
+    app_metadata: AppMetadata,
     architecture: &'static str,
     clickhouse_events_queue: Vec<ClickhouseEventWrapper>,
     flush_clickhouse_events_task: Option<Task<()>>,
@@ -48,9 +47,9 @@ struct ClickhouseEventRequestBody {
     installation_id: Option<Arc<str>>,
     session_id: Option<Arc<str>>,
     is_staff: Option<bool>,
-    app_version: Option<Arc<str>>,
+    app_version: Option<String>,
     os_name: &'static str,
-    os_version: Option<Arc<str>>,
+    os_version: Option<String>,
     architecture: &'static str,
     release_channel: Option<&'static str>,
     events: Vec<ClickhouseEventWrapper>,
@@ -130,25 +129,23 @@ const MAX_QUEUE_LEN: usize = 50;
 const DEBOUNCE_INTERVAL: Duration = Duration::from_secs(1);
 
 #[cfg(not(debug_assertions))]
-const DEBOUNCE_INTERVAL: Duration = Duration::from_secs(120);
+const DEBOUNCE_INTERVAL: Duration = Duration::from_secs(60 * 5);
 
 impl Telemetry {
-    pub fn new(client: Arc<dyn HttpClient>, cx: &AppContext) -> Arc<Self> {
-        let platform = cx.platform();
+    pub fn new(client: Arc<dyn HttpClient>, cx: &mut AppContext) -> Arc<Self> {
         let release_channel = if cx.has_global::<ReleaseChannel>() {
             Some(cx.global::<ReleaseChannel>().display_name())
         } else {
             None
         };
+
         // TODO: Replace all hardware stuff with nested SystemSpecs json
         let this = Arc::new(Self {
             http_client: client,
-            executor: cx.background().clone(),
+            executor: cx.background_executor().clone(),
             state: Mutex::new(TelemetryState {
-                os_name: platform.os_name().into(),
-                os_version: platform.os_version().ok().map(|v| v.to_string().into()),
+                app_metadata: cx.app_metadata(),
                 architecture: env::consts::ARCH,
-                app_version: platform.app_version().ok().map(|v| v.to_string().into()),
                 release_channel,
                 installation_id: None,
                 metrics_id: None,
@@ -161,9 +158,30 @@ impl Telemetry {
             }),
         });
 
+        // We should only ever have one instance of Telemetry, leak the subscription to keep it alive
+        // rather than store in TelemetryState, complicating spawn as subscriptions are not Send
+        std::mem::forget(cx.on_app_quit({
+            let this = this.clone();
+            move |cx| this.shutdown_telemetry(cx)
+        }));
+
         this
     }
 
+    #[cfg(any(test, feature = "test-support"))]
+    fn shutdown_telemetry(self: &Arc<Self>, _: &mut AppContext) -> impl Future<Output = ()> {
+        Task::ready(())
+    }
+
+    // Skip calling this function in tests.
+    // TestAppContext ends up calling this function on shutdown and it panics when trying to find the TelemetrySettings
+    #[cfg(not(any(test, feature = "test-support")))]
+    fn shutdown_telemetry(self: &Arc<Self>, cx: &mut AppContext) -> impl Future<Output = ()> {
+        let telemetry_settings = TelemetrySettings::get_global(cx).clone();
+        self.report_app_event(telemetry_settings, "close", true);
+        Task::ready(())
+    }
+
     pub fn log_file_path(&self) -> Option<PathBuf> {
         Some(self.state.lock().log_file.as_ref()?.path().to_path_buf())
     }
@@ -180,7 +198,7 @@ impl Telemetry {
         drop(state);
 
         let this = self.clone();
-        cx.spawn(|mut cx| async move {
+        cx.spawn(|cx| async move {
             // Avoiding calling `System::new_all()`, as there have been crashes related to it
             let refresh_kind = RefreshKind::new()
                 .with_memory() // For memory usage
@@ -209,7 +227,13 @@ impl Telemetry {
                     return;
                 };
 
-                let telemetry_settings = cx.update(|cx| *settings::get::<TelemetrySettings>(cx));
+                let telemetry_settings = if let Ok(telemetry_settings) =
+                    cx.update(|cx| *TelemetrySettings::get_global(cx))
+                {
+                    telemetry_settings
+                } else {
+                    break;
+                };
 
                 this.report_memory_event(
                     telemetry_settings,
@@ -232,7 +256,7 @@ impl Telemetry {
         is_staff: bool,
         cx: &AppContext,
     ) {
-        if !settings::get::<TelemetrySettings>(cx).metrics {
+        if !TelemetrySettings::get_global(cx).metrics {
             return;
         }
 
@@ -461,9 +485,15 @@ impl Telemetry {
                             installation_id: state.installation_id.clone(),
                             session_id: state.session_id.clone(),
                             is_staff: state.is_staff.clone(),
-                            app_version: state.app_version.clone(),
-                            os_name: state.os_name,
-                            os_version: state.os_version.clone(),
+                            app_version: state
+                                .app_metadata
+                                .app_version
+                                .map(|version| version.to_string()),
+                            os_name: state.app_metadata.os_name,
+                            os_version: state
+                                .app_metadata
+                                .os_version
+                                .map(|version| version.to_string()),
                             architecture: state.architecture,
 
                             release_channel: state.release_channel,

crates/client/src/test.rs 🔗

@@ -1,20 +1,19 @@
 use crate::{Client, Connection, Credentials, EstablishConnectionError, UserStore};
 use anyhow::{anyhow, Result};
 use futures::{stream::BoxStream, StreamExt};
-use gpui::{executor, ModelHandle, TestAppContext};
+use gpui::{BackgroundExecutor, Context, Model, TestAppContext};
 use parking_lot::Mutex;
 use rpc::{
     proto::{self, GetPrivateUserInfo, GetPrivateUserInfoResponse},
     ConnectionId, Peer, Receipt, TypedEnvelope,
 };
-use std::{rc::Rc, sync::Arc};
-use util::http::FakeHttpClient;
+use std::sync::Arc;
 
 pub struct FakeServer {
     peer: Arc<Peer>,
     state: Arc<Mutex<FakeServerState>>,
     user_id: u64,
-    executor: Rc<executor::Foreground>,
+    executor: BackgroundExecutor,
 }
 
 #[derive(Default)]
@@ -36,7 +35,7 @@ impl FakeServer {
             peer: Peer::new(0),
             state: Default::default(),
             user_id: client_user_id,
-            executor: cx.foreground(),
+            executor: cx.executor(),
         };
 
         client
@@ -78,10 +77,11 @@ impl FakeServer {
                             Err(EstablishConnectionError::Unauthorized)?
                         }
 
-                        let (client_conn, server_conn, _) = Connection::in_memory(cx.background());
+                        let (client_conn, server_conn, _) =
+                            Connection::in_memory(cx.background_executor().clone());
                         let (connection_id, io, incoming) =
-                            peer.add_test_connection(server_conn, cx.background());
-                        cx.background().spawn(io).detach();
+                            peer.add_test_connection(server_conn, cx.background_executor().clone());
+                        cx.background_executor().spawn(io).detach();
                         {
                             let mut state = state.lock();
                             state.connection_id = Some(connection_id);
@@ -193,9 +193,8 @@ impl FakeServer {
         &self,
         client: Arc<Client>,
         cx: &mut TestAppContext,
-    ) -> ModelHandle<UserStore> {
-        let http_client = FakeHttpClient::with_404_response();
-        let user_store = cx.add_model(|cx| UserStore::new(client, http_client, cx));
+    ) -> Model<UserStore> {
+        let user_store = cx.new_model(|cx| UserStore::new(client, cx));
         assert_eq!(
             self.receive::<proto::GetUsers>()
                 .await

crates/client/src/user.rs 🔗

@@ -2,13 +2,12 @@ use super::{proto, Client, Status, TypedEnvelope};
 use anyhow::{anyhow, Context, Result};
 use collections::{hash_map::Entry, HashMap, HashSet};
 use feature_flags::FeatureFlagAppExt;
-use futures::{channel::mpsc, future, AsyncReadExt, Future, StreamExt};
-use gpui::{AsyncAppContext, Entity, ImageData, ModelContext, ModelHandle, Task};
+use futures::{channel::mpsc, Future, StreamExt};
+use gpui::{AsyncAppContext, EventEmitter, Model, ModelContext, SharedString, Task};
 use postage::{sink::Sink, watch};
 use rpc::proto::{RequestMessage, UsersResponse};
 use std::sync::{Arc, Weak};
 use text::ReplicaId;
-use util::http::HttpClient;
 use util::TryFutureExt as _;
 
 pub type UserId = u64;
@@ -20,7 +19,7 @@ pub struct ParticipantIndex(pub u32);
 pub struct User {
     pub id: UserId,
     pub github_login: String,
-    pub avatar: Option<Arc<ImageData>>,
+    pub avatar_uri: SharedString,
 }
 
 #[derive(Clone, Debug, PartialEq, Eq)]
@@ -76,9 +75,8 @@ pub struct UserStore {
     pending_contact_requests: HashMap<u64, usize>,
     invite_info: Option<InviteInfo>,
     client: Weak<Client>,
-    http: Arc<dyn HttpClient>,
     _maintain_contacts: Task<()>,
-    _maintain_current_user: Task<()>,
+    _maintain_current_user: Task<Result<()>>,
 }
 
 #[derive(Clone)]
@@ -103,9 +101,7 @@ pub enum ContactEventKind {
     Cancelled,
 }
 
-impl Entity for UserStore {
-    type Event = Event;
-}
+impl EventEmitter<Event> for UserStore {}
 
 enum UpdateContacts {
     Update(proto::UpdateContacts),
@@ -114,17 +110,13 @@ enum UpdateContacts {
 }
 
 impl UserStore {
-    pub fn new(
-        client: Arc<Client>,
-        http: Arc<dyn HttpClient>,
-        cx: &mut ModelContext<Self>,
-    ) -> Self {
+    pub fn new(client: Arc<Client>, cx: &mut ModelContext<Self>) -> Self {
         let (mut current_user_tx, current_user_rx) = watch::channel();
         let (update_contacts_tx, mut update_contacts_rx) = mpsc::unbounded();
         let rpc_subscriptions = vec![
-            client.add_message_handler(cx.handle(), Self::handle_update_contacts),
-            client.add_message_handler(cx.handle(), Self::handle_update_invite_info),
-            client.add_message_handler(cx.handle(), Self::handle_show_contacts),
+            client.add_message_handler(cx.weak_model(), Self::handle_update_contacts),
+            client.add_message_handler(cx.weak_model(), Self::handle_update_invite_info),
+            client.add_message_handler(cx.weak_model(), Self::handle_show_contacts),
         ];
         Self {
             users: Default::default(),
@@ -136,76 +128,71 @@ impl UserStore {
             invite_info: None,
             client: Arc::downgrade(&client),
             update_contacts_tx,
-            http,
-            _maintain_contacts: cx.spawn_weak(|this, mut cx| async move {
+            _maintain_contacts: cx.spawn(|this, mut cx| async move {
                 let _subscriptions = rpc_subscriptions;
                 while let Some(message) = update_contacts_rx.next().await {
-                    if let Some(this) = this.upgrade(&cx) {
+                    if let Ok(task) =
                         this.update(&mut cx, |this, cx| this.update_contacts(message, cx))
-                            .log_err()
-                            .await;
+                    {
+                        task.log_err().await;
+                    } else {
+                        break;
                     }
                 }
             }),
-            _maintain_current_user: cx.spawn_weak(|this, mut cx| async move {
+            _maintain_current_user: cx.spawn(|this, mut cx| async move {
                 let mut status = client.status();
                 while let Some(status) = status.next().await {
                     match status {
                         Status::Connected { .. } => {
-                            if let Some((this, user_id)) = this.upgrade(&cx).zip(client.user_id()) {
-                                let fetch_user = this
-                                    .update(&mut cx, |this, cx| this.get_user(user_id, cx))
-                                    .log_err();
+                            if let Some(user_id) = client.user_id() {
+                                let fetch_user = if let Ok(fetch_user) = this
+                                    .update(&mut cx, |this, cx| {
+                                        this.get_user(user_id, cx).log_err()
+                                    }) {
+                                    fetch_user
+                                } else {
+                                    break;
+                                };
                                 let fetch_metrics_id =
                                     client.request(proto::GetPrivateUserInfo {}).log_err();
                                 let (user, info) = futures::join!(fetch_user, fetch_metrics_id);
 
-                                if let Some(info) = info {
-                                    cx.update(|cx| {
+                                cx.update(|cx| {
+                                    if let Some(info) = info {
                                         cx.update_flags(info.staff, info.flags);
                                         client.telemetry.set_authenticated_user_info(
                                             Some(info.metrics_id.clone()),
                                             info.staff,
                                             cx,
                                         )
-                                    });
-                                } else {
-                                    cx.read(|cx| {
-                                        client
-                                            .telemetry
-                                            .set_authenticated_user_info(None, false, cx)
-                                    });
-                                }
+                                    }
+                                })?;
 
                                 current_user_tx.send(user).await.ok();
 
-                                this.update(&mut cx, |_, cx| {
-                                    cx.notify();
-                                });
+                                this.update(&mut cx, |_, cx| cx.notify())?;
                             }
                         }
                         Status::SignedOut => {
                             current_user_tx.send(None).await.ok();
-                            if let Some(this) = this.upgrade(&cx) {
-                                this.update(&mut cx, |this, cx| {
-                                    cx.notify();
-                                    this.clear_contacts()
-                                })
-                                .await;
-                            }
+                            this.update(&mut cx, |this, cx| {
+                                cx.notify();
+                                this.clear_contacts()
+                            })?
+                            .await;
                         }
                         Status::ConnectionLost => {
-                            if let Some(this) = this.upgrade(&cx) {
-                                this.update(&mut cx, |this, cx| {
-                                    cx.notify();
-                                    this.clear_contacts()
-                                })
-                                .await;
-                            }
+                            this.update(&mut cx, |this, cx| {
+                                cx.notify();
+                                this.clear_contacts()
+                            })?
+                            .await;
                         }
                         _ => {}
                     }
                 }
+                Ok(())
             }),
             pending_contact_requests: Default::default(),
         }
@@ -217,7 +204,7 @@ impl UserStore {
     }
 
     async fn handle_update_invite_info(
-        this: ModelHandle<Self>,
+        this: Model<Self>,
         message: TypedEnvelope<proto::UpdateInviteInfo>,
         _: Arc<Client>,
         mut cx: AsyncAppContext,
@@ -228,17 +215,17 @@ impl UserStore {
                 count: message.payload.count,
             });
             cx.notify();
-        });
+        })?;
         Ok(())
     }
 
     async fn handle_show_contacts(
-        this: ModelHandle<Self>,
+        this: Model<Self>,
         _: TypedEnvelope<proto::ShowContacts>,
         _: Arc<Client>,
         mut cx: AsyncAppContext,
     ) -> Result<()> {
-        this.update(&mut cx, |_, cx| cx.emit(Event::ShowContacts));
+        this.update(&mut cx, |_, cx| cx.emit(Event::ShowContacts))?;
         Ok(())
     }
 
@@ -247,7 +234,7 @@ impl UserStore {
     }
 
     async fn handle_update_contacts(
-        this: ModelHandle<Self>,
+        this: Model<Self>,
         message: TypedEnvelope<proto::UpdateContacts>,
         _: Arc<Client>,
         mut cx: AsyncAppContext,
@@ -256,7 +243,7 @@ impl UserStore {
             this.update_contacts_tx
                 .unbounded_send(UpdateContacts::Update(message.payload))
                 .unwrap();
-        });
+        })?;
         Ok(())
     }
 
@@ -292,6 +279,9 @@ impl UserStore {
                     // Users are fetched in parallel above and cached in call to get_users
                     // No need to paralellize here
                     let mut updated_contacts = Vec::new();
+                    let this = this
+                        .upgrade()
+                        .ok_or_else(|| anyhow!("can't upgrade user store handle"))?;
                     for contact in message.contacts {
                         updated_contacts.push(Arc::new(
                             Contact::from_proto(contact, &this, &mut cx).await?,
@@ -300,18 +290,18 @@ impl UserStore {
 
                     let mut incoming_requests = Vec::new();
                     for request in message.incoming_requests {
-                        incoming_requests.push(
+                        incoming_requests.push({
                             this.update(&mut cx, |this, cx| {
                                 this.get_user(request.requester_id, cx)
-                            })
-                            .await?,
-                        );
+                            })?
+                            .await?
+                        });
                     }
 
                     let mut outgoing_requests = Vec::new();
                     for requested_user_id in message.outgoing_requests {
                         outgoing_requests.push(
-                            this.update(&mut cx, |this, cx| this.get_user(requested_user_id, cx))
+                            this.update(&mut cx, |this, cx| this.get_user(requested_user_id, cx))?
                                 .await?,
                         );
                     }
@@ -378,7 +368,7 @@ impl UserStore {
                         }
 
                         cx.notify();
-                    });
+                    })?;
 
                     Ok(())
                 })
@@ -400,12 +390,6 @@ impl UserStore {
         &self.incoming_contact_requests
     }
 
-    pub fn has_incoming_contact_request(&self, user_id: u64) -> bool {
-        self.incoming_contact_requests
-            .iter()
-            .any(|user| user.id == user_id)
-    }
-
     pub fn outgoing_contact_requests(&self) -> &[Arc<User>] {
         &self.outgoing_contact_requests
     }
@@ -454,6 +438,12 @@ impl UserStore {
         self.perform_contact_request(user_id, proto::RemoveContact { user_id }, cx)
     }
 
+    pub fn has_incoming_contact_request(&self, user_id: u64) -> bool {
+        self.incoming_contact_requests
+            .iter()
+            .any(|user| user.id == user_id)
+    }
+
     pub fn respond_to_contact_request(
         &mut self,
         requester_id: u64,
@@ -480,7 +470,7 @@ impl UserStore {
         cx: &mut ModelContext<Self>,
     ) -> Task<Result<()>> {
         let client = self.client.upgrade();
-        cx.spawn_weak(|_, _| async move {
+        cx.spawn(move |_, _| async move {
             client
                 .ok_or_else(|| anyhow!("can't upgrade client reference"))?
                 .request(proto::RespondToContactRequest {
@@ -502,7 +492,7 @@ impl UserStore {
         *self.pending_contact_requests.entry(user_id).or_insert(0) += 1;
         cx.notify();
 
-        cx.spawn(|this, mut cx| async move {
+        cx.spawn(move |this, mut cx| async move {
             let response = client
                 .ok_or_else(|| anyhow!("can't upgrade client reference"))?
                 .request(request)
@@ -517,7 +507,7 @@ impl UserStore {
                     }
                 }
                 cx.notify();
-            });
+            })?;
             response?;
             Ok(())
         })
@@ -560,11 +550,11 @@ impl UserStore {
                         },
                         cx,
                     )
-                })
+                })?
                 .await?;
             }
 
-            this.read_with(&cx, |this, _| {
+            this.update(&mut cx, |this, _| {
                 user_ids
                     .iter()
                     .map(|user_id| {
@@ -574,7 +564,7 @@ impl UserStore {
                             .ok_or_else(|| anyhow!("user {} not found", user_id))
                     })
                     .collect()
-            })
+            })?
         })
     }
 
@@ -596,18 +586,18 @@ impl UserStore {
         cx: &mut ModelContext<Self>,
     ) -> Task<Result<Arc<User>>> {
         if let Some(user) = self.users.get(&user_id).cloned() {
-            return cx.foreground().spawn(async move { Ok(user) });
+            return Task::ready(Ok(user));
         }
 
         let load_users = self.get_users(vec![user_id], cx);
-        cx.spawn(|this, mut cx| async move {
+        cx.spawn(move |this, mut cx| async move {
             load_users.await?;
             this.update(&mut cx, |this, _| {
                 this.users
                     .get(&user_id)
                     .cloned()
                     .ok_or_else(|| anyhow!("server responded with no users"))
-            })
+            })?
         })
     }
 
@@ -625,25 +615,22 @@ impl UserStore {
         cx: &mut ModelContext<Self>,
     ) -> Task<Result<Vec<Arc<User>>>> {
         let client = self.client.clone();
-        let http = self.http.clone();
-        cx.spawn_weak(|this, mut cx| async move {
+        cx.spawn(|this, mut cx| async move {
             if let Some(rpc) = client.upgrade() {
                 let response = rpc.request(request).await.context("error loading users")?;
-                let users = future::join_all(
-                    response
-                        .users
-                        .into_iter()
-                        .map(|user| User::new(user, http.as_ref())),
-                )
-                .await;
+                let users = response
+                    .users
+                    .into_iter()
+                    .map(|user| User::new(user))
+                    .collect::<Vec<_>>();
+
+                this.update(&mut cx, |this, _| {
+                    for user in &users {
+                        this.users.insert(user.id, user.clone());
+                    }
+                })
+                .ok();
 
-                if let Some(this) = this.upgrade(&cx) {
-                    this.update(&mut cx, |this, _| {
-                        for user in &users {
-                            this.users.insert(user.id, user.clone());
-                        }
-                    });
-                }
                 Ok(users)
             } else {
                 Ok(Vec::new())
@@ -668,11 +655,11 @@ impl UserStore {
 }
 
 impl User {
-    async fn new(message: proto::User, http: &dyn HttpClient) -> Arc<Self> {
+    fn new(message: proto::User) -> Arc<Self> {
         Arc::new(User {
             id: message.id,
             github_login: message.github_login,
-            avatar: fetch_avatar(http, &message.avatar_url).warn_on_err().await,
+            avatar_uri: message.avatar_url.into(),
         })
     }
 }
@@ -680,13 +667,13 @@ impl User {
 impl Contact {
     async fn from_proto(
         contact: proto::Contact,
-        user_store: &ModelHandle<UserStore>,
+        user_store: &Model<UserStore>,
         cx: &mut AsyncAppContext,
     ) -> Result<Self> {
         let user = user_store
             .update(cx, |user_store, cx| {
                 user_store.get_user(contact.user_id, cx)
-            })
+            })?
             .await?;
         Ok(Self {
             user,
@@ -705,24 +692,3 @@ impl Collaborator {
         })
     }
 }
-
-async fn fetch_avatar(http: &dyn HttpClient, url: &str) -> Result<Arc<ImageData>> {
-    let mut response = http
-        .get(url, Default::default(), true)
-        .await
-        .map_err(|e| anyhow!("failed to send user avatar request: {}", e))?;
-
-    if !response.status().is_success() {
-        return Err(anyhow!("avatar request failed {:?}", response.status()));
-    }
-
-    let mut body = Vec::new();
-    response
-        .body_mut()
-        .read_to_end(&mut body)
-        .await
-        .map_err(|e| anyhow!("failed to read user avatar response body: {}", e))?;
-    let format = image::guess_format(&body)?;
-    let image = image::load_from_memory_with_format(&body, format)?.into_bgra8();
-    Ok(ImageData::new(image))
-}

crates/client2/Cargo.toml 🔗

@@ -1,53 +0,0 @@
-[package]
-name = "client2"
-version = "0.1.0"
-edition = "2021"
-publish = false
-
-[lib]
-path = "src/client2.rs"
-doctest = false
-
-[features]
-test-support = ["collections/test-support", "gpui/test-support", "rpc/test-support"]
-
-[dependencies]
-chrono = { version = "0.4", features = ["serde"] }
-collections = { path = "../collections" }
-db = { package = "db2", path = "../db2" }
-gpui = { package = "gpui2", path = "../gpui2" }
-util = { path = "../util" }
-rpc = { package = "rpc2", path = "../rpc2" }
-text = { package = "text2",  path = "../text2" }
-settings = { package = "settings2", path = "../settings2" }
-feature_flags = { path = "../feature_flags" }
-sum_tree = { path = "../sum_tree" }
-
-anyhow.workspace = true
-async-recursion = "0.3"
-async-tungstenite = { version = "0.16", features = ["async-tls"] }
-futures.workspace = true
-image = "0.23"
-lazy_static.workspace = true
-log.workspace = true
-parking_lot.workspace = true
-postage.workspace = true
-rand.workspace = true
-schemars.workspace = true
-serde.workspace = true
-serde_derive.workspace = true
-smol.workspace = true
-sysinfo.workspace = true
-tempfile = "3"
-thiserror.workspace = true
-time.workspace = true
-tiny_http = "0.8"
-uuid.workspace = true
-url = "2.2"
-
-[dev-dependencies]
-collections = { path = "../collections", features = ["test-support"] }
-gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
-rpc = { package = "rpc2", path = "../rpc2", features = ["test-support"] }
-settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
-util = { path = "../util", features = ["test-support"] }

crates/client2/src/client2.rs 🔗

@@ -1,1675 +0,0 @@
-#[cfg(any(test, feature = "test-support"))]
-pub mod test;
-
-pub mod telemetry;
-pub mod user;
-
-use anyhow::{anyhow, Context as _, Result};
-use async_recursion::async_recursion;
-use async_tungstenite::tungstenite::{
-    error::Error as WebsocketError,
-    http::{Request, StatusCode},
-};
-use futures::{
-    channel::oneshot, future::LocalBoxFuture, AsyncReadExt, FutureExt, SinkExt, StreamExt,
-    TryFutureExt as _, TryStreamExt,
-};
-use gpui::{
-    actions, serde_json, AnyModel, AnyWeakModel, AppContext, AsyncAppContext, Model,
-    SemanticVersion, Task, WeakModel,
-};
-use lazy_static::lazy_static;
-use parking_lot::RwLock;
-use postage::watch;
-use rand::prelude::*;
-use rpc::proto::{AnyTypedEnvelope, EntityMessage, EnvelopedMessage, PeerId, RequestMessage};
-use schemars::JsonSchema;
-use serde::{Deserialize, Serialize};
-use settings::Settings;
-use std::{
-    any::TypeId,
-    collections::HashMap,
-    convert::TryFrom,
-    fmt::Write as _,
-    future::Future,
-    marker::PhantomData,
-    path::PathBuf,
-    sync::{atomic::AtomicU64, Arc, Weak},
-    time::{Duration, Instant},
-};
-use telemetry::Telemetry;
-use thiserror::Error;
-use url::Url;
-use util::channel::ReleaseChannel;
-use util::http::HttpClient;
-use util::{ResultExt, TryFutureExt};
-
-pub use rpc::*;
-pub use telemetry::ClickhouseEvent;
-pub use user::*;
-
-lazy_static! {
-    pub static ref ZED_SERVER_URL: String =
-        std::env::var("ZED_SERVER_URL").unwrap_or_else(|_| "https://zed.dev".to_string());
-    pub static ref IMPERSONATE_LOGIN: Option<String> = std::env::var("ZED_IMPERSONATE")
-        .ok()
-        .and_then(|s| if s.is_empty() { None } else { Some(s) });
-    pub static ref ADMIN_API_TOKEN: Option<String> = std::env::var("ZED_ADMIN_API_TOKEN")
-        .ok()
-        .and_then(|s| if s.is_empty() { None } else { Some(s) });
-    pub static ref ZED_APP_VERSION: Option<SemanticVersion> = std::env::var("ZED_APP_VERSION")
-        .ok()
-        .and_then(|v| v.parse().ok());
-    pub static ref ZED_APP_PATH: Option<PathBuf> =
-        std::env::var("ZED_APP_PATH").ok().map(PathBuf::from);
-    pub static ref ZED_ALWAYS_ACTIVE: bool =
-        std::env::var("ZED_ALWAYS_ACTIVE").map_or(false, |e| e.len() > 0);
-}
-
-pub const ZED_SECRET_CLIENT_TOKEN: &str = "618033988749894";
-pub const INITIAL_RECONNECTION_DELAY: Duration = Duration::from_millis(100);
-pub const CONNECTION_TIMEOUT: Duration = Duration::from_secs(5);
-
-actions!(client, [SignIn, SignOut, Reconnect]);
-
-pub fn init_settings(cx: &mut AppContext) {
-    TelemetrySettings::register(cx);
-}
-
-pub fn init(client: &Arc<Client>, cx: &mut AppContext) {
-    init_settings(cx);
-
-    let client = Arc::downgrade(client);
-    cx.on_action({
-        let client = client.clone();
-        move |_: &SignIn, cx| {
-            if let Some(client) = client.upgrade() {
-                cx.spawn(
-                    |cx| async move { client.authenticate_and_connect(true, &cx).log_err().await },
-                )
-                .detach();
-            }
-        }
-    });
-
-    cx.on_action({
-        let client = client.clone();
-        move |_: &SignOut, cx| {
-            if let Some(client) = client.upgrade() {
-                cx.spawn(|cx| async move {
-                    client.disconnect(&cx);
-                })
-                .detach();
-            }
-        }
-    });
-
-    cx.on_action({
-        let client = client.clone();
-        move |_: &Reconnect, cx| {
-            if let Some(client) = client.upgrade() {
-                cx.spawn(|cx| async move {
-                    client.reconnect(&cx);
-                })
-                .detach();
-            }
-        }
-    });
-}
-
-pub struct Client {
-    id: AtomicU64,
-    peer: Arc<Peer>,
-    http: Arc<dyn HttpClient>,
-    telemetry: Arc<Telemetry>,
-    state: RwLock<ClientState>,
-
-    #[allow(clippy::type_complexity)]
-    #[cfg(any(test, feature = "test-support"))]
-    authenticate: RwLock<
-        Option<Box<dyn 'static + Send + Sync + Fn(&AsyncAppContext) -> Task<Result<Credentials>>>>,
-    >,
-
-    #[allow(clippy::type_complexity)]
-    #[cfg(any(test, feature = "test-support"))]
-    establish_connection: RwLock<
-        Option<
-            Box<
-                dyn 'static
-                    + Send
-                    + Sync
-                    + Fn(
-                        &Credentials,
-                        &AsyncAppContext,
-                    ) -> Task<Result<Connection, EstablishConnectionError>>,
-            >,
-        >,
-    >,
-}
-
-#[derive(Error, Debug)]
-pub enum EstablishConnectionError {
-    #[error("upgrade required")]
-    UpgradeRequired,
-    #[error("unauthorized")]
-    Unauthorized,
-    #[error("{0}")]
-    Other(#[from] anyhow::Error),
-    #[error("{0}")]
-    Http(#[from] util::http::Error),
-    #[error("{0}")]
-    Io(#[from] std::io::Error),
-    #[error("{0}")]
-    Websocket(#[from] async_tungstenite::tungstenite::http::Error),
-}
-
-impl From<WebsocketError> for EstablishConnectionError {
-    fn from(error: WebsocketError) -> Self {
-        if let WebsocketError::Http(response) = &error {
-            match response.status() {
-                StatusCode::UNAUTHORIZED => return EstablishConnectionError::Unauthorized,
-                StatusCode::UPGRADE_REQUIRED => return EstablishConnectionError::UpgradeRequired,
-                _ => {}
-            }
-        }
-        EstablishConnectionError::Other(error.into())
-    }
-}
-
-impl EstablishConnectionError {
-    pub fn other(error: impl Into<anyhow::Error> + Send + Sync) -> Self {
-        Self::Other(error.into())
-    }
-}
-
-#[derive(Copy, Clone, Debug, PartialEq)]
-pub enum Status {
-    SignedOut,
-    UpgradeRequired,
-    Authenticating,
-    Connecting,
-    ConnectionError,
-    Connected {
-        peer_id: PeerId,
-        connection_id: ConnectionId,
-    },
-    ConnectionLost,
-    Reauthenticating,
-    Reconnecting,
-    ReconnectionError {
-        next_reconnection: Instant,
-    },
-}
-
-impl Status {
-    pub fn is_connected(&self) -> bool {
-        matches!(self, Self::Connected { .. })
-    }
-
-    pub fn is_signed_out(&self) -> bool {
-        matches!(self, Self::SignedOut | Self::UpgradeRequired)
-    }
-}
-
-struct ClientState {
-    credentials: Option<Credentials>,
-    status: (watch::Sender<Status>, watch::Receiver<Status>),
-    entity_id_extractors: HashMap<TypeId, fn(&dyn AnyTypedEnvelope) -> u64>,
-    _reconnect_task: Option<Task<()>>,
-    reconnect_interval: Duration,
-    entities_by_type_and_remote_id: HashMap<(TypeId, u64), WeakSubscriber>,
-    models_by_message_type: HashMap<TypeId, AnyWeakModel>,
-    entity_types_by_message_type: HashMap<TypeId, TypeId>,
-    #[allow(clippy::type_complexity)]
-    message_handlers: HashMap<
-        TypeId,
-        Arc<
-            dyn Send
-                + Sync
-                + Fn(
-                    AnyModel,
-                    Box<dyn AnyTypedEnvelope>,
-                    &Arc<Client>,
-                    AsyncAppContext,
-                ) -> LocalBoxFuture<'static, Result<()>>,
-        >,
-    >,
-}
-
-enum WeakSubscriber {
-    Entity { handle: AnyWeakModel },
-    Pending(Vec<Box<dyn AnyTypedEnvelope>>),
-}
-
-#[derive(Clone, Debug)]
-pub struct Credentials {
-    pub user_id: u64,
-    pub access_token: String,
-}
-
-impl Default for ClientState {
-    fn default() -> Self {
-        Self {
-            credentials: None,
-            status: watch::channel_with(Status::SignedOut),
-            entity_id_extractors: Default::default(),
-            _reconnect_task: None,
-            reconnect_interval: Duration::from_secs(5),
-            models_by_message_type: Default::default(),
-            entities_by_type_and_remote_id: Default::default(),
-            entity_types_by_message_type: Default::default(),
-            message_handlers: Default::default(),
-        }
-    }
-}
-
-pub enum Subscription {
-    Entity {
-        client: Weak<Client>,
-        id: (TypeId, u64),
-    },
-    Message {
-        client: Weak<Client>,
-        id: TypeId,
-    },
-}
-
-impl Drop for Subscription {
-    fn drop(&mut self) {
-        match self {
-            Subscription::Entity { client, id } => {
-                if let Some(client) = client.upgrade() {
-                    let mut state = client.state.write();
-                    let _ = state.entities_by_type_and_remote_id.remove(id);
-                }
-            }
-            Subscription::Message { client, id } => {
-                if let Some(client) = client.upgrade() {
-                    let mut state = client.state.write();
-                    let _ = state.entity_types_by_message_type.remove(id);
-                    let _ = state.message_handlers.remove(id);
-                }
-            }
-        }
-    }
-}
-
-pub struct PendingEntitySubscription<T: 'static> {
-    client: Arc<Client>,
-    remote_id: u64,
-    _entity_type: PhantomData<T>,
-    consumed: bool,
-}
-
-impl<T: 'static> PendingEntitySubscription<T> {
-    pub fn set_model(mut self, model: &Model<T>, cx: &mut AsyncAppContext) -> Subscription {
-        self.consumed = true;
-        let mut state = self.client.state.write();
-        let id = (TypeId::of::<T>(), self.remote_id);
-        let Some(WeakSubscriber::Pending(messages)) =
-            state.entities_by_type_and_remote_id.remove(&id)
-        else {
-            unreachable!()
-        };
-
-        state.entities_by_type_and_remote_id.insert(
-            id,
-            WeakSubscriber::Entity {
-                handle: model.downgrade().into(),
-            },
-        );
-        drop(state);
-        for message in messages {
-            self.client.handle_message(message, cx);
-        }
-        Subscription::Entity {
-            client: Arc::downgrade(&self.client),
-            id,
-        }
-    }
-}
-
-impl<T: 'static> Drop for PendingEntitySubscription<T> {
-    fn drop(&mut self) {
-        if !self.consumed {
-            let mut state = self.client.state.write();
-            if let Some(WeakSubscriber::Pending(messages)) = state
-                .entities_by_type_and_remote_id
-                .remove(&(TypeId::of::<T>(), self.remote_id))
-            {
-                for message in messages {
-                    log::info!("unhandled message {}", message.payload_type_name());
-                }
-            }
-        }
-    }
-}
-
-#[derive(Copy, Clone)]
-pub struct TelemetrySettings {
-    pub diagnostics: bool,
-    pub metrics: bool,
-}
-
-#[derive(Default, Clone, Serialize, Deserialize, JsonSchema)]
-pub struct TelemetrySettingsContent {
-    pub diagnostics: Option<bool>,
-    pub metrics: Option<bool>,
-}
-
-impl settings::Settings for TelemetrySettings {
-    const KEY: Option<&'static str> = Some("telemetry");
-
-    type FileContent = TelemetrySettingsContent;
-
-    fn load(
-        default_value: &Self::FileContent,
-        user_values: &[&Self::FileContent],
-        _: &mut AppContext,
-    ) -> Result<Self> {
-        Ok(Self {
-            diagnostics: user_values.first().and_then(|v| v.diagnostics).unwrap_or(
-                default_value
-                    .diagnostics
-                    .ok_or_else(Self::missing_default)?,
-            ),
-            metrics: user_values
-                .first()
-                .and_then(|v| v.metrics)
-                .unwrap_or(default_value.metrics.ok_or_else(Self::missing_default)?),
-        })
-    }
-}
-
-impl Client {
-    pub fn new(http: Arc<dyn HttpClient>, cx: &mut AppContext) -> Arc<Self> {
-        Arc::new(Self {
-            id: AtomicU64::new(0),
-            peer: Peer::new(0),
-            telemetry: Telemetry::new(http.clone(), cx),
-            http,
-            state: Default::default(),
-
-            #[cfg(any(test, feature = "test-support"))]
-            authenticate: Default::default(),
-            #[cfg(any(test, feature = "test-support"))]
-            establish_connection: Default::default(),
-        })
-    }
-
-    pub fn id(&self) -> u64 {
-        self.id.load(std::sync::atomic::Ordering::SeqCst)
-    }
-
-    pub fn http_client(&self) -> Arc<dyn HttpClient> {
-        self.http.clone()
-    }
-
-    pub fn set_id(&self, id: u64) -> &Self {
-        self.id.store(id, std::sync::atomic::Ordering::SeqCst);
-        self
-    }
-
-    #[cfg(any(test, feature = "test-support"))]
-    pub fn teardown(&self) {
-        let mut state = self.state.write();
-        state._reconnect_task.take();
-        state.message_handlers.clear();
-        state.models_by_message_type.clear();
-        state.entities_by_type_and_remote_id.clear();
-        state.entity_id_extractors.clear();
-        self.peer.teardown();
-    }
-
-    #[cfg(any(test, feature = "test-support"))]
-    pub fn override_authenticate<F>(&self, authenticate: F) -> &Self
-    where
-        F: 'static + Send + Sync + Fn(&AsyncAppContext) -> Task<Result<Credentials>>,
-    {
-        *self.authenticate.write() = Some(Box::new(authenticate));
-        self
-    }
-
-    #[cfg(any(test, feature = "test-support"))]
-    pub fn override_establish_connection<F>(&self, connect: F) -> &Self
-    where
-        F: 'static
-            + Send
-            + Sync
-            + Fn(&Credentials, &AsyncAppContext) -> Task<Result<Connection, EstablishConnectionError>>,
-    {
-        *self.establish_connection.write() = Some(Box::new(connect));
-        self
-    }
-
-    pub fn user_id(&self) -> Option<u64> {
-        self.state
-            .read()
-            .credentials
-            .as_ref()
-            .map(|credentials| credentials.user_id)
-    }
-
-    pub fn peer_id(&self) -> Option<PeerId> {
-        if let Status::Connected { peer_id, .. } = &*self.status().borrow() {
-            Some(*peer_id)
-        } else {
-            None
-        }
-    }
-
-    pub fn status(&self) -> watch::Receiver<Status> {
-        self.state.read().status.1.clone()
-    }
-
-    fn set_status(self: &Arc<Self>, status: Status, cx: &AsyncAppContext) {
-        log::info!("set status on client {}: {:?}", self.id(), status);
-        let mut state = self.state.write();
-        *state.status.0.borrow_mut() = status;
-
-        match status {
-            Status::Connected { .. } => {
-                state._reconnect_task = None;
-            }
-            Status::ConnectionLost => {
-                let this = self.clone();
-                let reconnect_interval = state.reconnect_interval;
-                state._reconnect_task = Some(cx.spawn(move |cx| async move {
-                    #[cfg(any(test, feature = "test-support"))]
-                    let mut rng = StdRng::seed_from_u64(0);
-                    #[cfg(not(any(test, feature = "test-support")))]
-                    let mut rng = StdRng::from_entropy();
-
-                    let mut delay = INITIAL_RECONNECTION_DELAY;
-                    while let Err(error) = this.authenticate_and_connect(true, &cx).await {
-                        log::error!("failed to connect {}", error);
-                        if matches!(*this.status().borrow(), Status::ConnectionError) {
-                            this.set_status(
-                                Status::ReconnectionError {
-                                    next_reconnection: Instant::now() + delay,
-                                },
-                                &cx,
-                            );
-                            cx.background_executor().timer(delay).await;
-                            delay = delay
-                                .mul_f32(rng.gen_range(1.0..=2.0))
-                                .min(reconnect_interval);
-                        } else {
-                            break;
-                        }
-                    }
-                }));
-            }
-            Status::SignedOut | Status::UpgradeRequired => {
-                cx.update(|cx| self.telemetry.set_authenticated_user_info(None, false, cx))
-                    .log_err();
-                state._reconnect_task.take();
-            }
-            _ => {}
-        }
-    }
-
-    pub fn subscribe_to_entity<T>(
-        self: &Arc<Self>,
-        remote_id: u64,
-    ) -> Result<PendingEntitySubscription<T>>
-    where
-        T: 'static,
-    {
-        let id = (TypeId::of::<T>(), remote_id);
-
-        let mut state = self.state.write();
-        if state.entities_by_type_and_remote_id.contains_key(&id) {
-            return Err(anyhow!("already subscribed to entity"));
-        } else {
-            state
-                .entities_by_type_and_remote_id
-                .insert(id, WeakSubscriber::Pending(Default::default()));
-            Ok(PendingEntitySubscription {
-                client: self.clone(),
-                remote_id,
-                consumed: false,
-                _entity_type: PhantomData,
-            })
-        }
-    }
-
-    #[track_caller]
-    pub fn add_message_handler<M, E, H, F>(
-        self: &Arc<Self>,
-        entity: WeakModel<E>,
-        handler: H,
-    ) -> Subscription
-    where
-        M: EnvelopedMessage,
-        E: 'static,
-        H: 'static
-            + Sync
-            + Fn(Model<E>, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F
-            + Send
-            + Sync,
-        F: 'static + Future<Output = Result<()>>,
-    {
-        let message_type_id = TypeId::of::<M>();
-        let mut state = self.state.write();
-        state
-            .models_by_message_type
-            .insert(message_type_id, entity.into());
-
-        let prev_handler = state.message_handlers.insert(
-            message_type_id,
-            Arc::new(move |subscriber, envelope, client, cx| {
-                let subscriber = subscriber.downcast::<E>().unwrap();
-                let envelope = envelope.into_any().downcast::<TypedEnvelope<M>>().unwrap();
-                handler(subscriber, *envelope, client.clone(), cx).boxed_local()
-            }),
-        );
-        if prev_handler.is_some() {
-            let location = std::panic::Location::caller();
-            panic!(
-                "{}:{} registered handler for the same message {} twice",
-                location.file(),
-                location.line(),
-                std::any::type_name::<M>()
-            );
-        }
-
-        Subscription::Message {
-            client: Arc::downgrade(self),
-            id: message_type_id,
-        }
-    }
-
-    pub fn add_request_handler<M, E, H, F>(
-        self: &Arc<Self>,
-        model: WeakModel<E>,
-        handler: H,
-    ) -> Subscription
-    where
-        M: RequestMessage,
-        E: 'static,
-        H: 'static
-            + Sync
-            + Fn(Model<E>, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F
-            + Send
-            + Sync,
-        F: 'static + Future<Output = Result<M::Response>>,
-    {
-        self.add_message_handler(model, move |handle, envelope, this, cx| {
-            Self::respond_to_request(
-                envelope.receipt(),
-                handler(handle, envelope, this.clone(), cx),
-                this,
-            )
-        })
-    }
-
-    pub fn add_model_message_handler<M, E, H, F>(self: &Arc<Self>, handler: H)
-    where
-        M: EntityMessage,
-        E: 'static,
-        H: 'static + Fn(Model<E>, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F + Send + Sync,
-        F: 'static + Future<Output = Result<()>>,
-    {
-        self.add_entity_message_handler::<M, E, _, _>(move |subscriber, message, client, cx| {
-            handler(subscriber.downcast::<E>().unwrap(), message, client, cx)
-        })
-    }
-
-    fn add_entity_message_handler<M, E, H, F>(self: &Arc<Self>, handler: H)
-    where
-        M: EntityMessage,
-        E: 'static,
-        H: 'static + Fn(AnyModel, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F + Send + Sync,
-        F: 'static + Future<Output = Result<()>>,
-    {
-        let model_type_id = TypeId::of::<E>();
-        let message_type_id = TypeId::of::<M>();
-
-        let mut state = self.state.write();
-        state
-            .entity_types_by_message_type
-            .insert(message_type_id, model_type_id);
-        state
-            .entity_id_extractors
-            .entry(message_type_id)
-            .or_insert_with(|| {
-                |envelope| {
-                    envelope
-                        .as_any()
-                        .downcast_ref::<TypedEnvelope<M>>()
-                        .unwrap()
-                        .payload
-                        .remote_entity_id()
-                }
-            });
-        let prev_handler = state.message_handlers.insert(
-            message_type_id,
-            Arc::new(move |handle, envelope, client, cx| {
-                let envelope = envelope.into_any().downcast::<TypedEnvelope<M>>().unwrap();
-                handler(handle, *envelope, client.clone(), cx).boxed_local()
-            }),
-        );
-        if prev_handler.is_some() {
-            panic!("registered handler for the same message twice");
-        }
-    }
-
-    pub fn add_model_request_handler<M, E, H, F>(self: &Arc<Self>, handler: H)
-    where
-        M: EntityMessage + RequestMessage,
-        E: 'static,
-        H: 'static + Fn(Model<E>, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F + Send + Sync,
-        F: 'static + Future<Output = Result<M::Response>>,
-    {
-        self.add_model_message_handler(move |entity, envelope, client, cx| {
-            Self::respond_to_request::<M, _>(
-                envelope.receipt(),
-                handler(entity, envelope, client.clone(), cx),
-                client,
-            )
-        })
-    }
-
-    async fn respond_to_request<T: RequestMessage, F: Future<Output = Result<T::Response>>>(
-        receipt: Receipt<T>,
-        response: F,
-        client: Arc<Self>,
-    ) -> Result<()> {
-        match response.await {
-            Ok(response) => {
-                client.respond(receipt, response)?;
-                Ok(())
-            }
-            Err(error) => {
-                client.respond_with_error(
-                    receipt,
-                    proto::Error {
-                        message: format!("{:?}", error),
-                    },
-                )?;
-                Err(error)
-            }
-        }
-    }
-
-    pub fn has_keychain_credentials(&self, cx: &AsyncAppContext) -> bool {
-        read_credentials_from_keychain(cx).is_some()
-    }
-
-    #[async_recursion(?Send)]
-    pub async fn authenticate_and_connect(
-        self: &Arc<Self>,
-        try_keychain: bool,
-        cx: &AsyncAppContext,
-    ) -> anyhow::Result<()> {
-        let was_disconnected = match *self.status().borrow() {
-            Status::SignedOut => true,
-            Status::ConnectionError
-            | Status::ConnectionLost
-            | Status::Authenticating { .. }
-            | Status::Reauthenticating { .. }
-            | Status::ReconnectionError { .. } => false,
-            Status::Connected { .. } | Status::Connecting { .. } | Status::Reconnecting { .. } => {
-                return Ok(())
-            }
-            Status::UpgradeRequired => return Err(EstablishConnectionError::UpgradeRequired)?,
-        };
-
-        if was_disconnected {
-            self.set_status(Status::Authenticating, cx);
-        } else {
-            self.set_status(Status::Reauthenticating, cx)
-        }
-
-        let mut read_from_keychain = false;
-        let mut credentials = self.state.read().credentials.clone();
-        if credentials.is_none() && try_keychain {
-            credentials = read_credentials_from_keychain(cx);
-            read_from_keychain = credentials.is_some();
-        }
-        if credentials.is_none() {
-            let mut status_rx = self.status();
-            let _ = status_rx.next().await;
-            futures::select_biased! {
-                authenticate = self.authenticate(cx).fuse() => {
-                    match authenticate {
-                        Ok(creds) => credentials = Some(creds),
-                        Err(err) => {
-                            self.set_status(Status::ConnectionError, cx);
-                            return Err(err);
-                        }
-                    }
-                }
-                _ = status_rx.next().fuse() => {
-                    return Err(anyhow!("authentication canceled"));
-                }
-            }
-        }
-        let credentials = credentials.unwrap();
-        self.set_id(credentials.user_id);
-
-        if was_disconnected {
-            self.set_status(Status::Connecting, cx);
-        } else {
-            self.set_status(Status::Reconnecting, cx);
-        }
-
-        let mut timeout =
-            futures::FutureExt::fuse(cx.background_executor().timer(CONNECTION_TIMEOUT));
-        futures::select_biased! {
-            connection = self.establish_connection(&credentials, cx).fuse() => {
-                match connection {
-                    Ok(conn) => {
-                        self.state.write().credentials = Some(credentials.clone());
-                        if !read_from_keychain && IMPERSONATE_LOGIN.is_none() {
-                            write_credentials_to_keychain(credentials, cx).log_err();
-                        }
-
-                        futures::select_biased! {
-                            result = self.set_connection(conn, cx).fuse() => result,
-                            _ = timeout => {
-                                self.set_status(Status::ConnectionError, cx);
-                                Err(anyhow!("timed out waiting on hello message from server"))
-                            }
-                        }
-                    }
-                    Err(EstablishConnectionError::Unauthorized) => {
-                        self.state.write().credentials.take();
-                        if read_from_keychain {
-                            delete_credentials_from_keychain(cx).log_err();
-                            self.set_status(Status::SignedOut, cx);
-                            self.authenticate_and_connect(false, cx).await
-                        } else {
-                            self.set_status(Status::ConnectionError, cx);
-                            Err(EstablishConnectionError::Unauthorized)?
-                        }
-                    }
-                    Err(EstablishConnectionError::UpgradeRequired) => {
-                        self.set_status(Status::UpgradeRequired, cx);
-                        Err(EstablishConnectionError::UpgradeRequired)?
-                    }
-                    Err(error) => {
-                        self.set_status(Status::ConnectionError, cx);
-                        Err(error)?
-                    }
-                }
-            }
-            _ = &mut timeout => {
-                self.set_status(Status::ConnectionError, cx);
-                Err(anyhow!("timed out trying to establish connection"))
-            }
-        }
-    }
-
-    async fn set_connection(
-        self: &Arc<Self>,
-        conn: Connection,
-        cx: &AsyncAppContext,
-    ) -> Result<()> {
-        let executor = cx.background_executor();
-        log::info!("add connection to peer");
-        let (connection_id, handle_io, mut incoming) = self.peer.add_connection(conn, {
-            let executor = executor.clone();
-            move |duration| executor.timer(duration)
-        });
-        let handle_io = executor.spawn(handle_io);
-
-        let peer_id = async {
-            log::info!("waiting for server hello");
-            let message = incoming
-                .next()
-                .await
-                .ok_or_else(|| anyhow!("no hello message received"))?;
-            log::info!("got server hello");
-            let hello_message_type_name = message.payload_type_name().to_string();
-            let hello = message
-                .into_any()
-                .downcast::<TypedEnvelope<proto::Hello>>()
-                .map_err(|_| {
-                    anyhow!(
-                        "invalid hello message received: {:?}",
-                        hello_message_type_name
-                    )
-                })?;
-            let peer_id = hello
-                .payload
-                .peer_id
-                .ok_or_else(|| anyhow!("invalid peer id"))?;
-            Ok(peer_id)
-        };
-
-        let peer_id = match peer_id.await {
-            Ok(peer_id) => peer_id,
-            Err(error) => {
-                self.peer.disconnect(connection_id);
-                return Err(error);
-            }
-        };
-
-        log::info!(
-            "set status to connected (connection id: {:?}, peer id: {:?})",
-            connection_id,
-            peer_id
-        );
-        self.set_status(
-            Status::Connected {
-                peer_id,
-                connection_id,
-            },
-            cx,
-        );
-
-        cx.spawn({
-            let this = self.clone();
-            |cx| {
-                async move {
-                    while let Some(message) = incoming.next().await {
-                        this.handle_message(message, &cx);
-                        // Don't starve the main thread when receiving lots of messages at once.
-                        smol::future::yield_now().await;
-                    }
-                }
-            }
-        })
-        .detach();
-
-        cx.spawn({
-            let this = self.clone();
-            move |cx| async move {
-                match handle_io.await {
-                    Ok(()) => {
-                        if this.status().borrow().clone()
-                            == (Status::Connected {
-                                connection_id,
-                                peer_id,
-                            })
-                        {
-                            this.set_status(Status::SignedOut, &cx);
-                        }
-                    }
-                    Err(err) => {
-                        log::error!("connection error: {:?}", err);
-                        this.set_status(Status::ConnectionLost, &cx);
-                    }
-                }
-            }
-        })
-        .detach();
-
-        Ok(())
-    }
-
-    fn authenticate(self: &Arc<Self>, cx: &AsyncAppContext) -> Task<Result<Credentials>> {
-        #[cfg(any(test, feature = "test-support"))]
-        if let Some(callback) = self.authenticate.read().as_ref() {
-            return callback(cx);
-        }
-
-        self.authenticate_with_browser(cx)
-    }
-
-    fn establish_connection(
-        self: &Arc<Self>,
-        credentials: &Credentials,
-        cx: &AsyncAppContext,
-    ) -> Task<Result<Connection, EstablishConnectionError>> {
-        #[cfg(any(test, feature = "test-support"))]
-        if let Some(callback) = self.establish_connection.read().as_ref() {
-            return callback(credentials, cx);
-        }
-
-        self.establish_websocket_connection(credentials, cx)
-    }
-
-    async fn get_rpc_url(
-        http: Arc<dyn HttpClient>,
-        release_channel: Option<ReleaseChannel>,
-    ) -> Result<Url> {
-        let mut url = format!("{}/rpc", *ZED_SERVER_URL);
-        if let Some(preview_param) =
-            release_channel.and_then(|channel| channel.release_query_param())
-        {
-            url += "?";
-            url += preview_param;
-        }
-        let response = http.get(&url, Default::default(), false).await?;
-
-        // Normally, ZED_SERVER_URL is set to the URL of zed.dev website.
-        // The website's /rpc endpoint redirects to a collab server's /rpc endpoint,
-        // which requires authorization via an HTTP header.
-        //
-        // For testing purposes, ZED_SERVER_URL can also set to the direct URL of
-        // of a collab server. In that case, a request to the /rpc endpoint will
-        // return an 'unauthorized' response.
-        let collab_url = if response.status().is_redirection() {
-            response
-                .headers()
-                .get("Location")
-                .ok_or_else(|| anyhow!("missing location header in /rpc response"))?
-                .to_str()
-                .map_err(EstablishConnectionError::other)?
-                .to_string()
-        } else if response.status() == StatusCode::UNAUTHORIZED {
-            url
-        } else {
-            Err(anyhow!(
-                "unexpected /rpc response status {}",
-                response.status()
-            ))?
-        };
-
-        Url::parse(&collab_url).context("invalid rpc url")
-    }
-
-    fn establish_websocket_connection(
-        self: &Arc<Self>,
-        credentials: &Credentials,
-        cx: &AsyncAppContext,
-    ) -> Task<Result<Connection, EstablishConnectionError>> {
-        let release_channel = cx.try_read_global(|channel: &ReleaseChannel, _| *channel);
-
-        let request = Request::builder()
-            .header(
-                "Authorization",
-                format!("{} {}", credentials.user_id, credentials.access_token),
-            )
-            .header("x-zed-protocol-version", rpc::PROTOCOL_VERSION);
-
-        let http = self.http.clone();
-        cx.background_executor().spawn(async move {
-            let mut rpc_url = Self::get_rpc_url(http, release_channel).await?;
-            let rpc_host = rpc_url
-                .host_str()
-                .zip(rpc_url.port_or_known_default())
-                .ok_or_else(|| anyhow!("missing host in rpc url"))?;
-            let stream = smol::net::TcpStream::connect(rpc_host).await?;
-
-            log::info!("connected to rpc endpoint {}", rpc_url);
-
-            match rpc_url.scheme() {
-                "https" => {
-                    rpc_url.set_scheme("wss").unwrap();
-                    let request = request.uri(rpc_url.as_str()).body(())?;
-                    let (stream, _) =
-                        async_tungstenite::async_tls::client_async_tls(request, stream).await?;
-                    Ok(Connection::new(
-                        stream
-                            .map_err(|error| anyhow!(error))
-                            .sink_map_err(|error| anyhow!(error)),
-                    ))
-                }
-                "http" => {
-                    rpc_url.set_scheme("ws").unwrap();
-                    let request = request.uri(rpc_url.as_str()).body(())?;
-                    let (stream, _) = async_tungstenite::client_async(request, stream).await?;
-                    Ok(Connection::new(
-                        stream
-                            .map_err(|error| anyhow!(error))
-                            .sink_map_err(|error| anyhow!(error)),
-                    ))
-                }
-                _ => Err(anyhow!("invalid rpc url: {}", rpc_url))?,
-            }
-        })
-    }
-
-    pub fn authenticate_with_browser(
-        self: &Arc<Self>,
-        cx: &AsyncAppContext,
-    ) -> Task<Result<Credentials>> {
-        let http = self.http.clone();
-        cx.spawn(|cx| async move {
-            let background = cx.background_executor().clone();
-
-            let (open_url_tx, open_url_rx) = oneshot::channel::<String>();
-            cx.update(|cx| {
-                cx.spawn(move |cx| async move {
-                    let url = open_url_rx.await?;
-                    cx.update(|cx| cx.open_url(&url))
-                })
-                .detach_and_log_err(cx);
-            })
-            .log_err();
-
-            let credentials = background
-                .clone()
-                .spawn(async move {
-                    // Generate a pair of asymmetric encryption keys. The public key will be used by the
-                    // zed server to encrypt the user's access token, so that it can'be intercepted by
-                    // any other app running on the user's device.
-                    let (public_key, private_key) =
-                        rpc::auth::keypair().expect("failed to generate keypair for auth");
-                    let public_key_string = String::try_from(public_key)
-                        .expect("failed to serialize public key for auth");
-
-                    if let Some((login, token)) =
-                        IMPERSONATE_LOGIN.as_ref().zip(ADMIN_API_TOKEN.as_ref())
-                    {
-                        return Self::authenticate_as_admin(http, login.clone(), token.clone())
-                            .await;
-                    }
-
-                    // Start an HTTP server to receive the redirect from Zed's sign-in page.
-                    let server =
-                        tiny_http::Server::http("127.0.0.1:0").expect("failed to find open port");
-                    let port = server.server_addr().port();
-
-                    // Open the Zed sign-in page in the user's browser, with query parameters that indicate
-                    // that the user is signing in from a Zed app running on the same device.
-                    let mut url = format!(
-                        "{}/native_app_signin?native_app_port={}&native_app_public_key={}",
-                        *ZED_SERVER_URL, port, public_key_string
-                    );
-
-                    if let Some(impersonate_login) = IMPERSONATE_LOGIN.as_ref() {
-                        log::info!("impersonating user @{}", impersonate_login);
-                        write!(&mut url, "&impersonate={}", impersonate_login).unwrap();
-                    }
-
-                    open_url_tx.send(url).log_err();
-
-                    // Receive the HTTP request from the user's browser. Retrieve the user id and encrypted
-                    // access token from the query params.
-                    //
-                    // TODO - Avoid ever starting more than one HTTP server. Maybe switch to using a
-                    // custom URL scheme instead of this local HTTP server.
-                    let (user_id, access_token) = background
-                        .spawn(async move {
-                            for _ in 0..100 {
-                                if let Some(req) = server.recv_timeout(Duration::from_secs(1))? {
-                                    let path = req.url();
-                                    let mut user_id = None;
-                                    let mut access_token = None;
-                                    let url = Url::parse(&format!("http://example.com{}", path))
-                                        .context("failed to parse login notification url")?;
-                                    for (key, value) in url.query_pairs() {
-                                        if key == "access_token" {
-                                            access_token = Some(value.to_string());
-                                        } else if key == "user_id" {
-                                            user_id = Some(value.to_string());
-                                        }
-                                    }
-
-                                    let post_auth_url =
-                                        format!("{}/native_app_signin_succeeded", *ZED_SERVER_URL);
-                                    req.respond(
-                                        tiny_http::Response::empty(302).with_header(
-                                            tiny_http::Header::from_bytes(
-                                                &b"Location"[..],
-                                                post_auth_url.as_bytes(),
-                                            )
-                                            .unwrap(),
-                                        ),
-                                    )
-                                    .context("failed to respond to login http request")?;
-                                    return Ok((
-                                        user_id
-                                            .ok_or_else(|| anyhow!("missing user_id parameter"))?,
-                                        access_token.ok_or_else(|| {
-                                            anyhow!("missing access_token parameter")
-                                        })?,
-                                    ));
-                                }
-                            }
-
-                            Err(anyhow!("didn't receive login redirect"))
-                        })
-                        .await?;
-
-                    let access_token = private_key
-                        .decrypt_string(&access_token)
-                        .context("failed to decrypt access token")?;
-
-                    Ok(Credentials {
-                        user_id: user_id.parse()?,
-                        access_token,
-                    })
-                })
-                .await?;
-
-            cx.update(|cx| cx.activate(true))?;
-            Ok(credentials)
-        })
-    }
-
-    async fn authenticate_as_admin(
-        http: Arc<dyn HttpClient>,
-        login: String,
-        mut api_token: String,
-    ) -> Result<Credentials> {
-        #[derive(Deserialize)]
-        struct AuthenticatedUserResponse {
-            user: User,
-        }
-
-        #[derive(Deserialize)]
-        struct User {
-            id: u64,
-        }
-
-        // Use the collab server's admin API to retrieve the id
-        // of the impersonated user.
-        let mut url = Self::get_rpc_url(http.clone(), None).await?;
-        url.set_path("/user");
-        url.set_query(Some(&format!("github_login={login}")));
-        let request = Request::get(url.as_str())
-            .header("Authorization", format!("token {api_token}"))
-            .body("".into())?;
-
-        let mut response = http.send(request).await?;
-        let mut body = String::new();
-        response.body_mut().read_to_string(&mut body).await?;
-        if !response.status().is_success() {
-            Err(anyhow!(
-                "admin user request failed {} - {}",
-                response.status().as_u16(),
-                body,
-            ))?;
-        }
-        let response: AuthenticatedUserResponse = serde_json::from_str(&body)?;
-
-        // Use the admin API token to authenticate as the impersonated user.
-        api_token.insert_str(0, "ADMIN_TOKEN:");
-        Ok(Credentials {
-            user_id: response.user.id,
-            access_token: api_token,
-        })
-    }
-
-    pub fn disconnect(self: &Arc<Self>, cx: &AsyncAppContext) {
-        self.peer.teardown();
-        self.set_status(Status::SignedOut, cx);
-    }
-
-    pub fn reconnect(self: &Arc<Self>, cx: &AsyncAppContext) {
-        self.peer.teardown();
-        self.set_status(Status::ConnectionLost, cx);
-    }
-
-    fn connection_id(&self) -> Result<ConnectionId> {
-        if let Status::Connected { connection_id, .. } = *self.status().borrow() {
-            Ok(connection_id)
-        } else {
-            Err(anyhow!("not connected"))
-        }
-    }
-
-    pub fn send<T: EnvelopedMessage>(&self, message: T) -> Result<()> {
-        log::debug!("rpc send. client_id:{}, name:{}", self.id(), T::NAME);
-        self.peer.send(self.connection_id()?, message)
-    }
-
-    pub fn request<T: RequestMessage>(
-        &self,
-        request: T,
-    ) -> impl Future<Output = Result<T::Response>> {
-        self.request_envelope(request)
-            .map_ok(|envelope| envelope.payload)
-    }
-
-    pub fn request_envelope<T: RequestMessage>(
-        &self,
-        request: T,
-    ) -> impl Future<Output = Result<TypedEnvelope<T::Response>>> {
-        let client_id = self.id();
-        log::debug!(
-            "rpc request start. client_id:{}. name:{}",
-            client_id,
-            T::NAME
-        );
-        let response = self
-            .connection_id()
-            .map(|conn_id| self.peer.request_envelope(conn_id, request));
-        async move {
-            let response = response?.await;
-            log::debug!(
-                "rpc request finish. client_id:{}. name:{}",
-                client_id,
-                T::NAME
-            );
-            response
-        }
-    }
-
-    fn respond<T: RequestMessage>(&self, receipt: Receipt<T>, response: T::Response) -> Result<()> {
-        log::debug!("rpc respond. client_id:{}. name:{}", self.id(), T::NAME);
-        self.peer.respond(receipt, response)
-    }
-
-    fn respond_with_error<T: RequestMessage>(
-        &self,
-        receipt: Receipt<T>,
-        error: proto::Error,
-    ) -> Result<()> {
-        log::debug!("rpc respond. client_id:{}. name:{}", self.id(), T::NAME);
-        self.peer.respond_with_error(receipt, error)
-    }
-
-    fn handle_message(
-        self: &Arc<Client>,
-        message: Box<dyn AnyTypedEnvelope>,
-        cx: &AsyncAppContext,
-    ) {
-        let mut state = self.state.write();
-        let type_name = message.payload_type_name();
-        let payload_type_id = message.payload_type_id();
-        let sender_id = message.original_sender_id();
-
-        let mut subscriber = None;
-
-        if let Some(handle) = state
-            .models_by_message_type
-            .get(&payload_type_id)
-            .and_then(|handle| handle.upgrade())
-        {
-            subscriber = Some(handle);
-        } else if let Some((extract_entity_id, entity_type_id)) =
-            state.entity_id_extractors.get(&payload_type_id).zip(
-                state
-                    .entity_types_by_message_type
-                    .get(&payload_type_id)
-                    .copied(),
-            )
-        {
-            let entity_id = (extract_entity_id)(message.as_ref());
-
-            match state
-                .entities_by_type_and_remote_id
-                .get_mut(&(entity_type_id, entity_id))
-            {
-                Some(WeakSubscriber::Pending(pending)) => {
-                    pending.push(message);
-                    return;
-                }
-                Some(weak_subscriber @ _) => match weak_subscriber {
-                    WeakSubscriber::Entity { handle } => {
-                        subscriber = handle.upgrade();
-                    }
-
-                    WeakSubscriber::Pending(_) => {}
-                },
-                _ => {}
-            }
-        }
-
-        let subscriber = if let Some(subscriber) = subscriber {
-            subscriber
-        } else {
-            log::info!("unhandled message {}", type_name);
-            self.peer.respond_with_unhandled_message(message).log_err();
-            return;
-        };
-
-        let handler = state.message_handlers.get(&payload_type_id).cloned();
-        // Dropping the state prevents deadlocks if the handler interacts with rpc::Client.
-        // It also ensures we don't hold the lock while yielding back to the executor, as
-        // that might cause the executor thread driving this future to block indefinitely.
-        drop(state);
-
-        if let Some(handler) = handler {
-            let future = handler(subscriber, message, &self, cx.clone());
-            let client_id = self.id();
-            log::debug!(
-                "rpc message received. client_id:{}, sender_id:{:?}, type:{}",
-                client_id,
-                sender_id,
-                type_name
-            );
-            cx.spawn(move |_| async move {
-                    match future.await {
-                        Ok(()) => {
-                            log::debug!(
-                                "rpc message handled. client_id:{}, sender_id:{:?}, type:{}",
-                                client_id,
-                                sender_id,
-                                type_name
-                            );
-                        }
-                        Err(error) => {
-                            log::error!(
-                                "error handling message. client_id:{}, sender_id:{:?}, type:{}, error:{:?}",
-                                client_id,
-                                sender_id,
-                                type_name,
-                                error
-                            );
-                        }
-                    }
-                })
-                .detach();
-        } else {
-            log::info!("unhandled message {}", type_name);
-            self.peer.respond_with_unhandled_message(message).log_err();
-        }
-    }
-
-    pub fn telemetry(&self) -> &Arc<Telemetry> {
-        &self.telemetry
-    }
-}
-
-fn read_credentials_from_keychain(cx: &AsyncAppContext) -> Option<Credentials> {
-    if IMPERSONATE_LOGIN.is_some() {
-        return None;
-    }
-
-    let (user_id, access_token) = cx
-        .update(|cx| cx.read_credentials(&ZED_SERVER_URL).log_err().flatten())
-        .ok()??;
-
-    Some(Credentials {
-        user_id: user_id.parse().ok()?,
-        access_token: String::from_utf8(access_token).ok()?,
-    })
-}
-
-async fn write_credentials_to_keychain(
-    credentials: Credentials,
-    cx: &AsyncAppContext,
-) -> Result<()> {
-    cx.update(move |cx| {
-        cx.write_credentials(
-            &ZED_SERVER_URL,
-            &credentials.user_id.to_string(),
-            credentials.access_token.as_bytes(),
-        )
-    })?
-}
-
-async fn delete_credentials_from_keychain(cx: &AsyncAppContext) -> Result<()> {
-    cx.update(move |cx| cx.delete_credentials(&ZED_SERVER_URL))?
-}
-
-const WORKTREE_URL_PREFIX: &str = "zed://worktrees/";
-
-pub fn encode_worktree_url(id: u64, access_token: &str) -> String {
-    format!("{}{}/{}", WORKTREE_URL_PREFIX, id, access_token)
-}
-
-pub fn decode_worktree_url(url: &str) -> Option<(u64, String)> {
-    let path = url.trim().strip_prefix(WORKTREE_URL_PREFIX)?;
-    let mut parts = path.split('/');
-    let id = parts.next()?.parse::<u64>().ok()?;
-    let access_token = parts.next()?;
-    if access_token.is_empty() {
-        return None;
-    }
-    Some((id, access_token.to_string()))
-}
-
-#[cfg(test)]
-mod tests {
-    use super::*;
-    use crate::test::FakeServer;
-
-    use gpui::{BackgroundExecutor, Context, TestAppContext};
-    use parking_lot::Mutex;
-    use std::future;
-    use util::http::FakeHttpClient;
-
-    #[gpui::test(iterations = 10)]
-    async fn test_reconnection(cx: &mut TestAppContext) {
-        let user_id = 5;
-        let client = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
-        let server = FakeServer::for_client(user_id, &client, cx).await;
-        let mut status = client.status();
-        assert!(matches!(
-            status.next().await,
-            Some(Status::Connected { .. })
-        ));
-        assert_eq!(server.auth_count(), 1);
-
-        server.forbid_connections();
-        server.disconnect();
-        while !matches!(status.next().await, Some(Status::ReconnectionError { .. })) {}
-
-        server.allow_connections();
-        cx.executor().advance_clock(Duration::from_secs(10));
-        while !matches!(status.next().await, Some(Status::Connected { .. })) {}
-        assert_eq!(server.auth_count(), 1); // Client reused the cached credentials when reconnecting
-
-        server.forbid_connections();
-        server.disconnect();
-        while !matches!(status.next().await, Some(Status::ReconnectionError { .. })) {}
-
-        // Clear cached credentials after authentication fails
-        server.roll_access_token();
-        server.allow_connections();
-        cx.executor().run_until_parked();
-        cx.executor().advance_clock(Duration::from_secs(10));
-        while !matches!(status.next().await, Some(Status::Connected { .. })) {}
-        assert_eq!(server.auth_count(), 2); // Client re-authenticated due to an invalid token
-    }
-
-    #[gpui::test(iterations = 10)]
-    async fn test_connection_timeout(executor: BackgroundExecutor, cx: &mut TestAppContext) {
-        let user_id = 5;
-        let client = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
-        let mut status = client.status();
-
-        // Time out when client tries to connect.
-        client.override_authenticate(move |cx| {
-            cx.background_executor().spawn(async move {
-                Ok(Credentials {
-                    user_id,
-                    access_token: "token".into(),
-                })
-            })
-        });
-        client.override_establish_connection(|_, cx| {
-            cx.background_executor().spawn(async move {
-                future::pending::<()>().await;
-                unreachable!()
-            })
-        });
-        let auth_and_connect = cx.spawn({
-            let client = client.clone();
-            |cx| async move { client.authenticate_and_connect(false, &cx).await }
-        });
-        executor.run_until_parked();
-        assert!(matches!(status.next().await, Some(Status::Connecting)));
-
-        executor.advance_clock(CONNECTION_TIMEOUT);
-        assert!(matches!(
-            status.next().await,
-            Some(Status::ConnectionError { .. })
-        ));
-        auth_and_connect.await.unwrap_err();
-
-        // Allow the connection to be established.
-        let server = FakeServer::for_client(user_id, &client, cx).await;
-        assert!(matches!(
-            status.next().await,
-            Some(Status::Connected { .. })
-        ));
-
-        // Disconnect client.
-        server.forbid_connections();
-        server.disconnect();
-        while !matches!(status.next().await, Some(Status::ReconnectionError { .. })) {}
-
-        // Time out when re-establishing the connection.
-        server.allow_connections();
-        client.override_establish_connection(|_, cx| {
-            cx.background_executor().spawn(async move {
-                future::pending::<()>().await;
-                unreachable!()
-            })
-        });
-        executor.advance_clock(2 * INITIAL_RECONNECTION_DELAY);
-        assert!(matches!(
-            status.next().await,
-            Some(Status::Reconnecting { .. })
-        ));
-
-        executor.advance_clock(CONNECTION_TIMEOUT);
-        assert!(matches!(
-            status.next().await,
-            Some(Status::ReconnectionError { .. })
-        ));
-    }
-
-    #[gpui::test(iterations = 10)]
-    async fn test_authenticating_more_than_once(
-        cx: &mut TestAppContext,
-        executor: BackgroundExecutor,
-    ) {
-        let auth_count = Arc::new(Mutex::new(0));
-        let dropped_auth_count = Arc::new(Mutex::new(0));
-        let client = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
-        client.override_authenticate({
-            let auth_count = auth_count.clone();
-            let dropped_auth_count = dropped_auth_count.clone();
-            move |cx| {
-                let auth_count = auth_count.clone();
-                let dropped_auth_count = dropped_auth_count.clone();
-                cx.background_executor().spawn(async move {
-                    *auth_count.lock() += 1;
-                    let _drop = util::defer(move || *dropped_auth_count.lock() += 1);
-                    future::pending::<()>().await;
-                    unreachable!()
-                })
-            }
-        });
-
-        let _authenticate = cx.spawn({
-            let client = client.clone();
-            move |cx| async move { client.authenticate_and_connect(false, &cx).await }
-        });
-        executor.run_until_parked();
-        assert_eq!(*auth_count.lock(), 1);
-        assert_eq!(*dropped_auth_count.lock(), 0);
-
-        let _authenticate = cx.spawn({
-            let client = client.clone();
-            |cx| async move { client.authenticate_and_connect(false, &cx).await }
-        });
-        executor.run_until_parked();
-        assert_eq!(*auth_count.lock(), 2);
-        assert_eq!(*dropped_auth_count.lock(), 1);
-    }
-
-    #[test]
-    fn test_encode_and_decode_worktree_url() {
-        let url = encode_worktree_url(5, "deadbeef");
-        assert_eq!(decode_worktree_url(&url), Some((5, "deadbeef".to_string())));
-        assert_eq!(
-            decode_worktree_url(&format!("\n {}\t", url)),
-            Some((5, "deadbeef".to_string()))
-        );
-        assert_eq!(decode_worktree_url("not://the-right-format"), None);
-    }
-
-    #[gpui::test]
-    async fn test_subscribing_to_entity(cx: &mut TestAppContext) {
-        let user_id = 5;
-        let client = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
-        let server = FakeServer::for_client(user_id, &client, cx).await;
-
-        let (done_tx1, mut done_rx1) = smol::channel::unbounded();
-        let (done_tx2, mut done_rx2) = smol::channel::unbounded();
-        client.add_model_message_handler(
-            move |model: Model<TestModel>, _: TypedEnvelope<proto::JoinProject>, _, mut cx| {
-                match model.update(&mut cx, |model, _| model.id).unwrap() {
-                    1 => done_tx1.try_send(()).unwrap(),
-                    2 => done_tx2.try_send(()).unwrap(),
-                    _ => unreachable!(),
-                }
-                async { Ok(()) }
-            },
-        );
-        let model1 = cx.new_model(|_| TestModel {
-            id: 1,
-            subscription: None,
-        });
-        let model2 = cx.new_model(|_| TestModel {
-            id: 2,
-            subscription: None,
-        });
-        let model3 = cx.new_model(|_| TestModel {
-            id: 3,
-            subscription: None,
-        });
-
-        let _subscription1 = client
-            .subscribe_to_entity(1)
-            .unwrap()
-            .set_model(&model1, &mut cx.to_async());
-        let _subscription2 = client
-            .subscribe_to_entity(2)
-            .unwrap()
-            .set_model(&model2, &mut cx.to_async());
-        // Ensure dropping a subscription for the same entity type still allows receiving of
-        // messages for other entity IDs of the same type.
-        let subscription3 = client
-            .subscribe_to_entity(3)
-            .unwrap()
-            .set_model(&model3, &mut cx.to_async());
-        drop(subscription3);
-
-        server.send(proto::JoinProject { project_id: 1 });
-        server.send(proto::JoinProject { project_id: 2 });
-        done_rx1.next().await.unwrap();
-        done_rx2.next().await.unwrap();
-    }
-
-    #[gpui::test]
-    async fn test_subscribing_after_dropping_subscription(cx: &mut TestAppContext) {
-        let user_id = 5;
-        let client = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
-        let server = FakeServer::for_client(user_id, &client, cx).await;
-
-        let model = cx.new_model(|_| TestModel::default());
-        let (done_tx1, _done_rx1) = smol::channel::unbounded();
-        let (done_tx2, mut done_rx2) = smol::channel::unbounded();
-        let subscription1 = client.add_message_handler(
-            model.downgrade(),
-            move |_, _: TypedEnvelope<proto::Ping>, _, _| {
-                done_tx1.try_send(()).unwrap();
-                async { Ok(()) }
-            },
-        );
-        drop(subscription1);
-        let _subscription2 = client.add_message_handler(
-            model.downgrade(),
-            move |_, _: TypedEnvelope<proto::Ping>, _, _| {
-                done_tx2.try_send(()).unwrap();
-                async { Ok(()) }
-            },
-        );
-        server.send(proto::Ping {});
-        done_rx2.next().await.unwrap();
-    }
-
-    #[gpui::test]
-    async fn test_dropping_subscription_in_handler(cx: &mut TestAppContext) {
-        let user_id = 5;
-        let client = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
-        let server = FakeServer::for_client(user_id, &client, cx).await;
-
-        let model = cx.new_model(|_| TestModel::default());
-        let (done_tx, mut done_rx) = smol::channel::unbounded();
-        let subscription = client.add_message_handler(
-            model.clone().downgrade(),
-            move |model: Model<TestModel>, _: TypedEnvelope<proto::Ping>, _, mut cx| {
-                model
-                    .update(&mut cx, |model, _| model.subscription.take())
-                    .unwrap();
-                done_tx.try_send(()).unwrap();
-                async { Ok(()) }
-            },
-        );
-        model.update(cx, |model, _| {
-            model.subscription = Some(subscription);
-        });
-        server.send(proto::Ping {});
-        done_rx.next().await.unwrap();
-    }
-
-    #[derive(Default)]
-    struct TestModel {
-        id: usize,
-        subscription: Option<Subscription>,
-    }
-}

crates/client2/src/telemetry.rs 🔗

@@ -1,515 +0,0 @@
-use crate::{TelemetrySettings, ZED_SECRET_CLIENT_TOKEN, ZED_SERVER_URL};
-use chrono::{DateTime, Utc};
-use futures::Future;
-use gpui::{serde_json, AppContext, AppMetadata, BackgroundExecutor, Task};
-use lazy_static::lazy_static;
-use parking_lot::Mutex;
-use serde::Serialize;
-use settings::Settings;
-use std::{env, io::Write, mem, path::PathBuf, sync::Arc, time::Duration};
-use sysinfo::{
-    CpuRefreshKind, Pid, PidExt, ProcessExt, ProcessRefreshKind, RefreshKind, System, SystemExt,
-};
-use tempfile::NamedTempFile;
-use util::http::HttpClient;
-use util::{channel::ReleaseChannel, TryFutureExt};
-
-pub struct Telemetry {
-    http_client: Arc<dyn HttpClient>,
-    executor: BackgroundExecutor,
-    state: Mutex<TelemetryState>,
-}
-
-struct TelemetryState {
-    metrics_id: Option<Arc<str>>,      // Per logged-in user
-    installation_id: Option<Arc<str>>, // Per app installation (different for dev, nightly, preview, and stable)
-    session_id: Option<Arc<str>>,      // Per app launch
-    release_channel: Option<&'static str>,
-    app_metadata: AppMetadata,
-    architecture: &'static str,
-    clickhouse_events_queue: Vec<ClickhouseEventWrapper>,
-    flush_clickhouse_events_task: Option<Task<()>>,
-    log_file: Option<NamedTempFile>,
-    is_staff: Option<bool>,
-    first_event_datetime: Option<DateTime<Utc>>,
-}
-
-const CLICKHOUSE_EVENTS_URL_PATH: &'static str = "/api/events";
-
-lazy_static! {
-    static ref CLICKHOUSE_EVENTS_URL: String =
-        format!("{}{}", *ZED_SERVER_URL, CLICKHOUSE_EVENTS_URL_PATH);
-}
-
-#[derive(Serialize, Debug)]
-struct ClickhouseEventRequestBody {
-    token: &'static str,
-    installation_id: Option<Arc<str>>,
-    session_id: Option<Arc<str>>,
-    is_staff: Option<bool>,
-    app_version: Option<String>,
-    os_name: &'static str,
-    os_version: Option<String>,
-    architecture: &'static str,
-    release_channel: Option<&'static str>,
-    events: Vec<ClickhouseEventWrapper>,
-}
-
-#[derive(Serialize, Debug)]
-struct ClickhouseEventWrapper {
-    signed_in: bool,
-    #[serde(flatten)]
-    event: ClickhouseEvent,
-}
-
-#[derive(Serialize, Debug)]
-#[serde(rename_all = "snake_case")]
-pub enum AssistantKind {
-    Panel,
-    Inline,
-}
-
-#[derive(Serialize, Debug)]
-#[serde(tag = "type")]
-pub enum ClickhouseEvent {
-    Editor {
-        operation: &'static str,
-        file_extension: Option<String>,
-        vim_mode: bool,
-        copilot_enabled: bool,
-        copilot_enabled_for_language: bool,
-        milliseconds_since_first_event: i64,
-    },
-    Copilot {
-        suggestion_id: Option<String>,
-        suggestion_accepted: bool,
-        file_extension: Option<String>,
-        milliseconds_since_first_event: i64,
-    },
-    Call {
-        operation: &'static str,
-        room_id: Option<u64>,
-        channel_id: Option<u64>,
-        milliseconds_since_first_event: i64,
-    },
-    Assistant {
-        conversation_id: Option<String>,
-        kind: AssistantKind,
-        model: &'static str,
-        milliseconds_since_first_event: i64,
-    },
-    Cpu {
-        usage_as_percentage: f32,
-        core_count: u32,
-        milliseconds_since_first_event: i64,
-    },
-    Memory {
-        memory_in_bytes: u64,
-        virtual_memory_in_bytes: u64,
-        milliseconds_since_first_event: i64,
-    },
-    App {
-        operation: &'static str,
-        milliseconds_since_first_event: i64,
-    },
-    Setting {
-        setting: &'static str,
-        value: String,
-        milliseconds_since_first_event: i64,
-    },
-}
-
-#[cfg(debug_assertions)]
-const MAX_QUEUE_LEN: usize = 1;
-
-#[cfg(not(debug_assertions))]
-const MAX_QUEUE_LEN: usize = 50;
-
-#[cfg(debug_assertions)]
-const DEBOUNCE_INTERVAL: Duration = Duration::from_secs(1);
-
-#[cfg(not(debug_assertions))]
-const DEBOUNCE_INTERVAL: Duration = Duration::from_secs(60 * 5);
-
-impl Telemetry {
-    pub fn new(client: Arc<dyn HttpClient>, cx: &mut AppContext) -> Arc<Self> {
-        let release_channel = if cx.has_global::<ReleaseChannel>() {
-            Some(cx.global::<ReleaseChannel>().display_name())
-        } else {
-            None
-        };
-
-        // TODO: Replace all hardware stuff with nested SystemSpecs json
-        let this = Arc::new(Self {
-            http_client: client,
-            executor: cx.background_executor().clone(),
-            state: Mutex::new(TelemetryState {
-                app_metadata: cx.app_metadata(),
-                architecture: env::consts::ARCH,
-                release_channel,
-                installation_id: None,
-                metrics_id: None,
-                session_id: None,
-                clickhouse_events_queue: Default::default(),
-                flush_clickhouse_events_task: Default::default(),
-                log_file: None,
-                is_staff: None,
-                first_event_datetime: None,
-            }),
-        });
-
-        // We should only ever have one instance of Telemetry, leak the subscription to keep it alive
-        // rather than store in TelemetryState, complicating spawn as subscriptions are not Send
-        std::mem::forget(cx.on_app_quit({
-            let this = this.clone();
-            move |cx| this.shutdown_telemetry(cx)
-        }));
-
-        this
-    }
-
-    #[cfg(any(test, feature = "test-support"))]
-    fn shutdown_telemetry(self: &Arc<Self>, _: &mut AppContext) -> impl Future<Output = ()> {
-        Task::ready(())
-    }
-
-    // Skip calling this function in tests.
-    // TestAppContext ends up calling this function on shutdown and it panics when trying to find the TelemetrySettings
-    #[cfg(not(any(test, feature = "test-support")))]
-    fn shutdown_telemetry(self: &Arc<Self>, cx: &mut AppContext) -> impl Future<Output = ()> {
-        let telemetry_settings = TelemetrySettings::get_global(cx).clone();
-        self.report_app_event(telemetry_settings, "close", true);
-        Task::ready(())
-    }
-
-    pub fn log_file_path(&self) -> Option<PathBuf> {
-        Some(self.state.lock().log_file.as_ref()?.path().to_path_buf())
-    }
-
-    pub fn start(
-        self: &Arc<Self>,
-        installation_id: Option<String>,
-        session_id: String,
-        cx: &mut AppContext,
-    ) {
-        let mut state = self.state.lock();
-        state.installation_id = installation_id.map(|id| id.into());
-        state.session_id = Some(session_id.into());
-        drop(state);
-
-        let this = self.clone();
-        cx.spawn(|cx| async move {
-            // Avoiding calling `System::new_all()`, as there have been crashes related to it
-            let refresh_kind = RefreshKind::new()
-                .with_memory() // For memory usage
-                .with_processes(ProcessRefreshKind::everything()) // For process usage
-                .with_cpu(CpuRefreshKind::everything()); // For core count
-
-            let mut system = System::new_with_specifics(refresh_kind);
-
-            // Avoiding calling `refresh_all()`, just update what we need
-            system.refresh_specifics(refresh_kind);
-
-            // Waiting some amount of time before the first query is important to get a reasonable value
-            // https://docs.rs/sysinfo/0.29.10/sysinfo/trait.ProcessExt.html#tymethod.cpu_usage
-            const DURATION_BETWEEN_SYSTEM_EVENTS: Duration = Duration::from_secs(4 * 60);
-
-            loop {
-                smol::Timer::after(DURATION_BETWEEN_SYSTEM_EVENTS).await;
-
-                system.refresh_specifics(refresh_kind);
-
-                let current_process = Pid::from_u32(std::process::id());
-                let Some(process) = system.processes().get(&current_process) else {
-                    let process = current_process;
-                    log::error!("Failed to find own process {process:?} in system process table");
-                    // TODO: Fire an error telemetry event
-                    return;
-                };
-
-                let telemetry_settings = if let Ok(telemetry_settings) =
-                    cx.update(|cx| *TelemetrySettings::get_global(cx))
-                {
-                    telemetry_settings
-                } else {
-                    break;
-                };
-
-                this.report_memory_event(
-                    telemetry_settings,
-                    process.memory(),
-                    process.virtual_memory(),
-                );
-                this.report_cpu_event(
-                    telemetry_settings,
-                    process.cpu_usage(),
-                    system.cpus().len() as u32,
-                );
-            }
-        })
-        .detach();
-    }
-
-    pub fn set_authenticated_user_info(
-        self: &Arc<Self>,
-        metrics_id: Option<String>,
-        is_staff: bool,
-        cx: &AppContext,
-    ) {
-        if !TelemetrySettings::get_global(cx).metrics {
-            return;
-        }
-
-        let mut state = self.state.lock();
-        let metrics_id: Option<Arc<str>> = metrics_id.map(|id| id.into());
-        state.metrics_id = metrics_id.clone();
-        state.is_staff = Some(is_staff);
-        drop(state);
-    }
-
-    pub fn report_editor_event(
-        self: &Arc<Self>,
-        telemetry_settings: TelemetrySettings,
-        file_extension: Option<String>,
-        vim_mode: bool,
-        operation: &'static str,
-        copilot_enabled: bool,
-        copilot_enabled_for_language: bool,
-    ) {
-        let event = ClickhouseEvent::Editor {
-            file_extension,
-            vim_mode,
-            operation,
-            copilot_enabled,
-            copilot_enabled_for_language,
-            milliseconds_since_first_event: self.milliseconds_since_first_event(),
-        };
-
-        self.report_clickhouse_event(event, telemetry_settings, false)
-    }
-
-    pub fn report_copilot_event(
-        self: &Arc<Self>,
-        telemetry_settings: TelemetrySettings,
-        suggestion_id: Option<String>,
-        suggestion_accepted: bool,
-        file_extension: Option<String>,
-    ) {
-        let event = ClickhouseEvent::Copilot {
-            suggestion_id,
-            suggestion_accepted,
-            file_extension,
-            milliseconds_since_first_event: self.milliseconds_since_first_event(),
-        };
-
-        self.report_clickhouse_event(event, telemetry_settings, false)
-    }
-
-    pub fn report_assistant_event(
-        self: &Arc<Self>,
-        telemetry_settings: TelemetrySettings,
-        conversation_id: Option<String>,
-        kind: AssistantKind,
-        model: &'static str,
-    ) {
-        let event = ClickhouseEvent::Assistant {
-            conversation_id,
-            kind,
-            model,
-            milliseconds_since_first_event: self.milliseconds_since_first_event(),
-        };
-
-        self.report_clickhouse_event(event, telemetry_settings, false)
-    }
-
-    pub fn report_call_event(
-        self: &Arc<Self>,
-        telemetry_settings: TelemetrySettings,
-        operation: &'static str,
-        room_id: Option<u64>,
-        channel_id: Option<u64>,
-    ) {
-        let event = ClickhouseEvent::Call {
-            operation,
-            room_id,
-            channel_id,
-            milliseconds_since_first_event: self.milliseconds_since_first_event(),
-        };
-
-        self.report_clickhouse_event(event, telemetry_settings, false)
-    }
-
-    pub fn report_cpu_event(
-        self: &Arc<Self>,
-        telemetry_settings: TelemetrySettings,
-        usage_as_percentage: f32,
-        core_count: u32,
-    ) {
-        let event = ClickhouseEvent::Cpu {
-            usage_as_percentage,
-            core_count,
-            milliseconds_since_first_event: self.milliseconds_since_first_event(),
-        };
-
-        self.report_clickhouse_event(event, telemetry_settings, false)
-    }
-
-    pub fn report_memory_event(
-        self: &Arc<Self>,
-        telemetry_settings: TelemetrySettings,
-        memory_in_bytes: u64,
-        virtual_memory_in_bytes: u64,
-    ) {
-        let event = ClickhouseEvent::Memory {
-            memory_in_bytes,
-            virtual_memory_in_bytes,
-            milliseconds_since_first_event: self.milliseconds_since_first_event(),
-        };
-
-        self.report_clickhouse_event(event, telemetry_settings, false)
-    }
-
-    pub fn report_app_event(
-        self: &Arc<Self>,
-        telemetry_settings: TelemetrySettings,
-        operation: &'static str,
-        immediate_flush: bool,
-    ) {
-        let event = ClickhouseEvent::App {
-            operation,
-            milliseconds_since_first_event: self.milliseconds_since_first_event(),
-        };
-
-        self.report_clickhouse_event(event, telemetry_settings, immediate_flush)
-    }
-
-    pub fn report_setting_event(
-        self: &Arc<Self>,
-        telemetry_settings: TelemetrySettings,
-        setting: &'static str,
-        value: String,
-    ) {
-        let event = ClickhouseEvent::Setting {
-            setting,
-            value,
-            milliseconds_since_first_event: self.milliseconds_since_first_event(),
-        };
-
-        self.report_clickhouse_event(event, telemetry_settings, false)
-    }
-
-    fn milliseconds_since_first_event(&self) -> i64 {
-        let mut state = self.state.lock();
-        match state.first_event_datetime {
-            Some(first_event_datetime) => {
-                let now: DateTime<Utc> = Utc::now();
-                now.timestamp_millis() - first_event_datetime.timestamp_millis()
-            }
-            None => {
-                state.first_event_datetime = Some(Utc::now());
-                0
-            }
-        }
-    }
-
-    fn report_clickhouse_event(
-        self: &Arc<Self>,
-        event: ClickhouseEvent,
-        telemetry_settings: TelemetrySettings,
-        immediate_flush: bool,
-    ) {
-        if !telemetry_settings.metrics {
-            return;
-        }
-
-        let mut state = self.state.lock();
-        let signed_in = state.metrics_id.is_some();
-        state
-            .clickhouse_events_queue
-            .push(ClickhouseEventWrapper { signed_in, event });
-
-        if state.installation_id.is_some() {
-            if immediate_flush || state.clickhouse_events_queue.len() >= MAX_QUEUE_LEN {
-                drop(state);
-                self.flush_clickhouse_events();
-            } else {
-                let this = self.clone();
-                let executor = self.executor.clone();
-                state.flush_clickhouse_events_task = Some(self.executor.spawn(async move {
-                    executor.timer(DEBOUNCE_INTERVAL).await;
-                    this.flush_clickhouse_events();
-                }));
-            }
-        }
-    }
-
-    pub fn metrics_id(self: &Arc<Self>) -> Option<Arc<str>> {
-        self.state.lock().metrics_id.clone()
-    }
-
-    pub fn installation_id(self: &Arc<Self>) -> Option<Arc<str>> {
-        self.state.lock().installation_id.clone()
-    }
-
-    pub fn is_staff(self: &Arc<Self>) -> Option<bool> {
-        self.state.lock().is_staff
-    }
-
-    fn flush_clickhouse_events(self: &Arc<Self>) {
-        let mut state = self.state.lock();
-        state.first_event_datetime = None;
-        let mut events = mem::take(&mut state.clickhouse_events_queue);
-        state.flush_clickhouse_events_task.take();
-        drop(state);
-
-        let this = self.clone();
-        self.executor
-            .spawn(
-                async move {
-                    let mut json_bytes = Vec::new();
-
-                    if let Some(file) = &mut this.state.lock().log_file {
-                        let file = file.as_file_mut();
-                        for event in &mut events {
-                            json_bytes.clear();
-                            serde_json::to_writer(&mut json_bytes, event)?;
-                            file.write_all(&json_bytes)?;
-                            file.write(b"\n")?;
-                        }
-                    }
-
-                    {
-                        let state = this.state.lock();
-                        let request_body = ClickhouseEventRequestBody {
-                            token: ZED_SECRET_CLIENT_TOKEN,
-                            installation_id: state.installation_id.clone(),
-                            session_id: state.session_id.clone(),
-                            is_staff: state.is_staff.clone(),
-                            app_version: state
-                                .app_metadata
-                                .app_version
-                                .map(|version| version.to_string()),
-                            os_name: state.app_metadata.os_name,
-                            os_version: state
-                                .app_metadata
-                                .os_version
-                                .map(|version| version.to_string()),
-                            architecture: state.architecture,
-
-                            release_channel: state.release_channel,
-                            events,
-                        };
-                        json_bytes.clear();
-                        serde_json::to_writer(&mut json_bytes, &request_body)?;
-                    }
-
-                    this.http_client
-                        .post_json(CLICKHOUSE_EVENTS_URL.as_str(), json_bytes.into())
-                        .await?;
-                    anyhow::Ok(())
-                }
-                .log_err(),
-            )
-            .detach();
-    }
-}

crates/client2/src/test.rs 🔗

@@ -1,214 +0,0 @@
-use crate::{Client, Connection, Credentials, EstablishConnectionError, UserStore};
-use anyhow::{anyhow, Result};
-use futures::{stream::BoxStream, StreamExt};
-use gpui::{BackgroundExecutor, Context, Model, TestAppContext};
-use parking_lot::Mutex;
-use rpc::{
-    proto::{self, GetPrivateUserInfo, GetPrivateUserInfoResponse},
-    ConnectionId, Peer, Receipt, TypedEnvelope,
-};
-use std::sync::Arc;
-
-pub struct FakeServer {
-    peer: Arc<Peer>,
-    state: Arc<Mutex<FakeServerState>>,
-    user_id: u64,
-    executor: BackgroundExecutor,
-}
-
-#[derive(Default)]
-struct FakeServerState {
-    incoming: Option<BoxStream<'static, Box<dyn proto::AnyTypedEnvelope>>>,
-    connection_id: Option<ConnectionId>,
-    forbid_connections: bool,
-    auth_count: usize,
-    access_token: usize,
-}
-
-impl FakeServer {
-    pub async fn for_client(
-        client_user_id: u64,
-        client: &Arc<Client>,
-        cx: &TestAppContext,
-    ) -> Self {
-        let server = Self {
-            peer: Peer::new(0),
-            state: Default::default(),
-            user_id: client_user_id,
-            executor: cx.executor(),
-        };
-
-        client
-            .override_authenticate({
-                let state = Arc::downgrade(&server.state);
-                move |cx| {
-                    let state = state.clone();
-                    cx.spawn(move |_| async move {
-                        let state = state.upgrade().ok_or_else(|| anyhow!("server dropped"))?;
-                        let mut state = state.lock();
-                        state.auth_count += 1;
-                        let access_token = state.access_token.to_string();
-                        Ok(Credentials {
-                            user_id: client_user_id,
-                            access_token,
-                        })
-                    })
-                }
-            })
-            .override_establish_connection({
-                let peer = Arc::downgrade(&server.peer);
-                let state = Arc::downgrade(&server.state);
-                move |credentials, cx| {
-                    let peer = peer.clone();
-                    let state = state.clone();
-                    let credentials = credentials.clone();
-                    cx.spawn(move |cx| async move {
-                        let state = state.upgrade().ok_or_else(|| anyhow!("server dropped"))?;
-                        let peer = peer.upgrade().ok_or_else(|| anyhow!("server dropped"))?;
-                        if state.lock().forbid_connections {
-                            Err(EstablishConnectionError::Other(anyhow!(
-                                "server is forbidding connections"
-                            )))?
-                        }
-
-                        assert_eq!(credentials.user_id, client_user_id);
-
-                        if credentials.access_token != state.lock().access_token.to_string() {
-                            Err(EstablishConnectionError::Unauthorized)?
-                        }
-
-                        let (client_conn, server_conn, _) =
-                            Connection::in_memory(cx.background_executor().clone());
-                        let (connection_id, io, incoming) =
-                            peer.add_test_connection(server_conn, cx.background_executor().clone());
-                        cx.background_executor().spawn(io).detach();
-                        {
-                            let mut state = state.lock();
-                            state.connection_id = Some(connection_id);
-                            state.incoming = Some(incoming);
-                        }
-                        peer.send(
-                            connection_id,
-                            proto::Hello {
-                                peer_id: Some(connection_id.into()),
-                            },
-                        )
-                        .unwrap();
-
-                        Ok(client_conn)
-                    })
-                }
-            });
-
-        client
-            .authenticate_and_connect(false, &cx.to_async())
-            .await
-            .unwrap();
-
-        server
-    }
-
-    pub fn disconnect(&self) {
-        if self.state.lock().connection_id.is_some() {
-            self.peer.disconnect(self.connection_id());
-            let mut state = self.state.lock();
-            state.connection_id.take();
-            state.incoming.take();
-        }
-    }
-
-    pub fn auth_count(&self) -> usize {
-        self.state.lock().auth_count
-    }
-
-    pub fn roll_access_token(&self) {
-        self.state.lock().access_token += 1;
-    }
-
-    pub fn forbid_connections(&self) {
-        self.state.lock().forbid_connections = true;
-    }
-
-    pub fn allow_connections(&self) {
-        self.state.lock().forbid_connections = false;
-    }
-
-    pub fn send<T: proto::EnvelopedMessage>(&self, message: T) {
-        self.peer.send(self.connection_id(), message).unwrap();
-    }
-
-    #[allow(clippy::await_holding_lock)]
-    pub async fn receive<M: proto::EnvelopedMessage>(&self) -> Result<TypedEnvelope<M>> {
-        self.executor.start_waiting();
-
-        loop {
-            let message = self
-                .state
-                .lock()
-                .incoming
-                .as_mut()
-                .expect("not connected")
-                .next()
-                .await
-                .ok_or_else(|| anyhow!("other half hung up"))?;
-            self.executor.finish_waiting();
-            let type_name = message.payload_type_name();
-            let message = message.into_any();
-
-            if message.is::<TypedEnvelope<M>>() {
-                return Ok(*message.downcast().unwrap());
-            }
-
-            if message.is::<TypedEnvelope<GetPrivateUserInfo>>() {
-                self.respond(
-                    message
-                        .downcast::<TypedEnvelope<GetPrivateUserInfo>>()
-                        .unwrap()
-                        .receipt(),
-                    GetPrivateUserInfoResponse {
-                        metrics_id: "the-metrics-id".into(),
-                        staff: false,
-                        flags: Default::default(),
-                    },
-                );
-                continue;
-            }
-
-            panic!(
-                "fake server received unexpected message type: {:?}",
-                type_name
-            );
-        }
-    }
-
-    pub fn respond<T: proto::RequestMessage>(&self, receipt: Receipt<T>, response: T::Response) {
-        self.peer.respond(receipt, response).unwrap()
-    }
-
-    fn connection_id(&self) -> ConnectionId {
-        self.state.lock().connection_id.expect("not connected")
-    }
-
-    pub async fn build_user_store(
-        &self,
-        client: Arc<Client>,
-        cx: &mut TestAppContext,
-    ) -> Model<UserStore> {
-        let user_store = cx.new_model(|cx| UserStore::new(client, cx));
-        assert_eq!(
-            self.receive::<proto::GetUsers>()
-                .await
-                .unwrap()
-                .payload
-                .user_ids,
-            &[self.user_id]
-        );
-        user_store
-    }
-}
-
-impl Drop for FakeServer {
-    fn drop(&mut self) {
-        self.disconnect();
-    }
-}

crates/client2/src/user.rs 🔗

@@ -1,694 +0,0 @@
-use super::{proto, Client, Status, TypedEnvelope};
-use anyhow::{anyhow, Context, Result};
-use collections::{hash_map::Entry, HashMap, HashSet};
-use feature_flags::FeatureFlagAppExt;
-use futures::{channel::mpsc, Future, StreamExt};
-use gpui::{AsyncAppContext, EventEmitter, Model, ModelContext, SharedString, Task};
-use postage::{sink::Sink, watch};
-use rpc::proto::{RequestMessage, UsersResponse};
-use std::sync::{Arc, Weak};
-use text::ReplicaId;
-use util::TryFutureExt as _;
-
-pub type UserId = u64;
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq)]
-pub struct ParticipantIndex(pub u32);
-
-#[derive(Default, Debug)]
-pub struct User {
-    pub id: UserId,
-    pub github_login: String,
-    pub avatar_uri: SharedString,
-}
-
-#[derive(Clone, Debug, PartialEq, Eq)]
-pub struct Collaborator {
-    pub peer_id: proto::PeerId,
-    pub replica_id: ReplicaId,
-    pub user_id: UserId,
-}
-
-impl PartialOrd for User {
-    fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
-        Some(self.cmp(other))
-    }
-}
-
-impl Ord for User {
-    fn cmp(&self, other: &Self) -> std::cmp::Ordering {
-        self.github_login.cmp(&other.github_login)
-    }
-}
-
-impl PartialEq for User {
-    fn eq(&self, other: &Self) -> bool {
-        self.id == other.id && self.github_login == other.github_login
-    }
-}
-
-impl Eq for User {}
-
-#[derive(Debug, PartialEq)]
-pub struct Contact {
-    pub user: Arc<User>,
-    pub online: bool,
-    pub busy: bool,
-}
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq)]
-pub enum ContactRequestStatus {
-    None,
-    RequestSent,
-    RequestReceived,
-    RequestAccepted,
-}
-
-pub struct UserStore {
-    users: HashMap<u64, Arc<User>>,
-    participant_indices: HashMap<u64, ParticipantIndex>,
-    update_contacts_tx: mpsc::UnboundedSender<UpdateContacts>,
-    current_user: watch::Receiver<Option<Arc<User>>>,
-    contacts: Vec<Arc<Contact>>,
-    incoming_contact_requests: Vec<Arc<User>>,
-    outgoing_contact_requests: Vec<Arc<User>>,
-    pending_contact_requests: HashMap<u64, usize>,
-    invite_info: Option<InviteInfo>,
-    client: Weak<Client>,
-    _maintain_contacts: Task<()>,
-    _maintain_current_user: Task<Result<()>>,
-}
-
-#[derive(Clone)]
-pub struct InviteInfo {
-    pub count: u32,
-    pub url: Arc<str>,
-}
-
-pub enum Event {
-    Contact {
-        user: Arc<User>,
-        kind: ContactEventKind,
-    },
-    ShowContacts,
-    ParticipantIndicesChanged,
-}
-
-#[derive(Clone, Copy)]
-pub enum ContactEventKind {
-    Requested,
-    Accepted,
-    Cancelled,
-}
-
-impl EventEmitter<Event> for UserStore {}
-
-enum UpdateContacts {
-    Update(proto::UpdateContacts),
-    Wait(postage::barrier::Sender),
-    Clear(postage::barrier::Sender),
-}
-
-impl UserStore {
-    pub fn new(client: Arc<Client>, cx: &mut ModelContext<Self>) -> Self {
-        let (mut current_user_tx, current_user_rx) = watch::channel();
-        let (update_contacts_tx, mut update_contacts_rx) = mpsc::unbounded();
-        let rpc_subscriptions = vec![
-            client.add_message_handler(cx.weak_model(), Self::handle_update_contacts),
-            client.add_message_handler(cx.weak_model(), Self::handle_update_invite_info),
-            client.add_message_handler(cx.weak_model(), Self::handle_show_contacts),
-        ];
-        Self {
-            users: Default::default(),
-            current_user: current_user_rx,
-            contacts: Default::default(),
-            incoming_contact_requests: Default::default(),
-            participant_indices: Default::default(),
-            outgoing_contact_requests: Default::default(),
-            invite_info: None,
-            client: Arc::downgrade(&client),
-            update_contacts_tx,
-            _maintain_contacts: cx.spawn(|this, mut cx| async move {
-                let _subscriptions = rpc_subscriptions;
-                while let Some(message) = update_contacts_rx.next().await {
-                    if let Ok(task) =
-                        this.update(&mut cx, |this, cx| this.update_contacts(message, cx))
-                    {
-                        task.log_err().await;
-                    } else {
-                        break;
-                    }
-                }
-            }),
-            _maintain_current_user: cx.spawn(|this, mut cx| async move {
-                let mut status = client.status();
-                while let Some(status) = status.next().await {
-                    match status {
-                        Status::Connected { .. } => {
-                            if let Some(user_id) = client.user_id() {
-                                let fetch_user = if let Ok(fetch_user) = this
-                                    .update(&mut cx, |this, cx| {
-                                        this.get_user(user_id, cx).log_err()
-                                    }) {
-                                    fetch_user
-                                } else {
-                                    break;
-                                };
-                                let fetch_metrics_id =
-                                    client.request(proto::GetPrivateUserInfo {}).log_err();
-                                let (user, info) = futures::join!(fetch_user, fetch_metrics_id);
-
-                                cx.update(|cx| {
-                                    if let Some(info) = info {
-                                        cx.update_flags(info.staff, info.flags);
-                                        client.telemetry.set_authenticated_user_info(
-                                            Some(info.metrics_id.clone()),
-                                            info.staff,
-                                            cx,
-                                        )
-                                    }
-                                })?;
-
-                                current_user_tx.send(user).await.ok();
-
-                                this.update(&mut cx, |_, cx| cx.notify())?;
-                            }
-                        }
-                        Status::SignedOut => {
-                            current_user_tx.send(None).await.ok();
-                            this.update(&mut cx, |this, cx| {
-                                cx.notify();
-                                this.clear_contacts()
-                            })?
-                            .await;
-                        }
-                        Status::ConnectionLost => {
-                            this.update(&mut cx, |this, cx| {
-                                cx.notify();
-                                this.clear_contacts()
-                            })?
-                            .await;
-                        }
-                        _ => {}
-                    }
-                }
-                Ok(())
-            }),
-            pending_contact_requests: Default::default(),
-        }
-    }
-
-    #[cfg(feature = "test-support")]
-    pub fn clear_cache(&mut self) {
-        self.users.clear();
-    }
-
-    async fn handle_update_invite_info(
-        this: Model<Self>,
-        message: TypedEnvelope<proto::UpdateInviteInfo>,
-        _: Arc<Client>,
-        mut cx: AsyncAppContext,
-    ) -> Result<()> {
-        this.update(&mut cx, |this, cx| {
-            this.invite_info = Some(InviteInfo {
-                url: Arc::from(message.payload.url),
-                count: message.payload.count,
-            });
-            cx.notify();
-        })?;
-        Ok(())
-    }
-
-    async fn handle_show_contacts(
-        this: Model<Self>,
-        _: TypedEnvelope<proto::ShowContacts>,
-        _: Arc<Client>,
-        mut cx: AsyncAppContext,
-    ) -> Result<()> {
-        this.update(&mut cx, |_, cx| cx.emit(Event::ShowContacts))?;
-        Ok(())
-    }
-
-    pub fn invite_info(&self) -> Option<&InviteInfo> {
-        self.invite_info.as_ref()
-    }
-
-    async fn handle_update_contacts(
-        this: Model<Self>,
-        message: TypedEnvelope<proto::UpdateContacts>,
-        _: Arc<Client>,
-        mut cx: AsyncAppContext,
-    ) -> Result<()> {
-        this.update(&mut cx, |this, _| {
-            this.update_contacts_tx
-                .unbounded_send(UpdateContacts::Update(message.payload))
-                .unwrap();
-        })?;
-        Ok(())
-    }
-
-    fn update_contacts(
-        &mut self,
-        message: UpdateContacts,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<()>> {
-        match message {
-            UpdateContacts::Wait(barrier) => {
-                drop(barrier);
-                Task::ready(Ok(()))
-            }
-            UpdateContacts::Clear(barrier) => {
-                self.contacts.clear();
-                self.incoming_contact_requests.clear();
-                self.outgoing_contact_requests.clear();
-                drop(barrier);
-                Task::ready(Ok(()))
-            }
-            UpdateContacts::Update(message) => {
-                let mut user_ids = HashSet::default();
-                for contact in &message.contacts {
-                    user_ids.insert(contact.user_id);
-                }
-                user_ids.extend(message.incoming_requests.iter().map(|req| req.requester_id));
-                user_ids.extend(message.outgoing_requests.iter());
-
-                let load_users = self.get_users(user_ids.into_iter().collect(), cx);
-                cx.spawn(|this, mut cx| async move {
-                    load_users.await?;
-
-                    // Users are fetched in parallel above and cached in call to get_users
-                    // No need to paralellize here
-                    let mut updated_contacts = Vec::new();
-                    let this = this
-                        .upgrade()
-                        .ok_or_else(|| anyhow!("can't upgrade user store handle"))?;
-                    for contact in message.contacts {
-                        updated_contacts.push(Arc::new(
-                            Contact::from_proto(contact, &this, &mut cx).await?,
-                        ));
-                    }
-
-                    let mut incoming_requests = Vec::new();
-                    for request in message.incoming_requests {
-                        incoming_requests.push({
-                            this.update(&mut cx, |this, cx| {
-                                this.get_user(request.requester_id, cx)
-                            })?
-                            .await?
-                        });
-                    }
-
-                    let mut outgoing_requests = Vec::new();
-                    for requested_user_id in message.outgoing_requests {
-                        outgoing_requests.push(
-                            this.update(&mut cx, |this, cx| this.get_user(requested_user_id, cx))?
-                                .await?,
-                        );
-                    }
-
-                    let removed_contacts =
-                        HashSet::<u64>::from_iter(message.remove_contacts.iter().copied());
-                    let removed_incoming_requests =
-                        HashSet::<u64>::from_iter(message.remove_incoming_requests.iter().copied());
-                    let removed_outgoing_requests =
-                        HashSet::<u64>::from_iter(message.remove_outgoing_requests.iter().copied());
-
-                    this.update(&mut cx, |this, cx| {
-                        // Remove contacts
-                        this.contacts
-                            .retain(|contact| !removed_contacts.contains(&contact.user.id));
-                        // Update existing contacts and insert new ones
-                        for updated_contact in updated_contacts {
-                            match this.contacts.binary_search_by_key(
-                                &&updated_contact.user.github_login,
-                                |contact| &contact.user.github_login,
-                            ) {
-                                Ok(ix) => this.contacts[ix] = updated_contact,
-                                Err(ix) => this.contacts.insert(ix, updated_contact),
-                            }
-                        }
-
-                        // Remove incoming contact requests
-                        this.incoming_contact_requests.retain(|user| {
-                            if removed_incoming_requests.contains(&user.id) {
-                                cx.emit(Event::Contact {
-                                    user: user.clone(),
-                                    kind: ContactEventKind::Cancelled,
-                                });
-                                false
-                            } else {
-                                true
-                            }
-                        });
-                        // Update existing incoming requests and insert new ones
-                        for user in incoming_requests {
-                            match this
-                                .incoming_contact_requests
-                                .binary_search_by_key(&&user.github_login, |contact| {
-                                    &contact.github_login
-                                }) {
-                                Ok(ix) => this.incoming_contact_requests[ix] = user,
-                                Err(ix) => this.incoming_contact_requests.insert(ix, user),
-                            }
-                        }
-
-                        // Remove outgoing contact requests
-                        this.outgoing_contact_requests
-                            .retain(|user| !removed_outgoing_requests.contains(&user.id));
-                        // Update existing incoming requests and insert new ones
-                        for request in outgoing_requests {
-                            match this
-                                .outgoing_contact_requests
-                                .binary_search_by_key(&&request.github_login, |contact| {
-                                    &contact.github_login
-                                }) {
-                                Ok(ix) => this.outgoing_contact_requests[ix] = request,
-                                Err(ix) => this.outgoing_contact_requests.insert(ix, request),
-                            }
-                        }
-
-                        cx.notify();
-                    })?;
-
-                    Ok(())
-                })
-            }
-        }
-    }
-
-    pub fn contacts(&self) -> &[Arc<Contact>] {
-        &self.contacts
-    }
-
-    pub fn has_contact(&self, user: &Arc<User>) -> bool {
-        self.contacts
-            .binary_search_by_key(&&user.github_login, |contact| &contact.user.github_login)
-            .is_ok()
-    }
-
-    pub fn incoming_contact_requests(&self) -> &[Arc<User>] {
-        &self.incoming_contact_requests
-    }
-
-    pub fn outgoing_contact_requests(&self) -> &[Arc<User>] {
-        &self.outgoing_contact_requests
-    }
-
-    pub fn is_contact_request_pending(&self, user: &User) -> bool {
-        self.pending_contact_requests.contains_key(&user.id)
-    }
-
-    pub fn contact_request_status(&self, user: &User) -> ContactRequestStatus {
-        if self
-            .contacts
-            .binary_search_by_key(&&user.github_login, |contact| &contact.user.github_login)
-            .is_ok()
-        {
-            ContactRequestStatus::RequestAccepted
-        } else if self
-            .outgoing_contact_requests
-            .binary_search_by_key(&&user.github_login, |user| &user.github_login)
-            .is_ok()
-        {
-            ContactRequestStatus::RequestSent
-        } else if self
-            .incoming_contact_requests
-            .binary_search_by_key(&&user.github_login, |user| &user.github_login)
-            .is_ok()
-        {
-            ContactRequestStatus::RequestReceived
-        } else {
-            ContactRequestStatus::None
-        }
-    }
-
-    pub fn request_contact(
-        &mut self,
-        responder_id: u64,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<()>> {
-        self.perform_contact_request(responder_id, proto::RequestContact { responder_id }, cx)
-    }
-
-    pub fn remove_contact(
-        &mut self,
-        user_id: u64,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<()>> {
-        self.perform_contact_request(user_id, proto::RemoveContact { user_id }, cx)
-    }
-
-    pub fn has_incoming_contact_request(&self, user_id: u64) -> bool {
-        self.incoming_contact_requests
-            .iter()
-            .any(|user| user.id == user_id)
-    }
-
-    pub fn respond_to_contact_request(
-        &mut self,
-        requester_id: u64,
-        accept: bool,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<()>> {
-        self.perform_contact_request(
-            requester_id,
-            proto::RespondToContactRequest {
-                requester_id,
-                response: if accept {
-                    proto::ContactRequestResponse::Accept
-                } else {
-                    proto::ContactRequestResponse::Decline
-                } as i32,
-            },
-            cx,
-        )
-    }
-
-    pub fn dismiss_contact_request(
-        &mut self,
-        requester_id: u64,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<()>> {
-        let client = self.client.upgrade();
-        cx.spawn(move |_, _| async move {
-            client
-                .ok_or_else(|| anyhow!("can't upgrade client reference"))?
-                .request(proto::RespondToContactRequest {
-                    requester_id,
-                    response: proto::ContactRequestResponse::Dismiss as i32,
-                })
-                .await?;
-            Ok(())
-        })
-    }
-
-    fn perform_contact_request<T: RequestMessage>(
-        &mut self,
-        user_id: u64,
-        request: T,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<()>> {
-        let client = self.client.upgrade();
-        *self.pending_contact_requests.entry(user_id).or_insert(0) += 1;
-        cx.notify();
-
-        cx.spawn(move |this, mut cx| async move {
-            let response = client
-                .ok_or_else(|| anyhow!("can't upgrade client reference"))?
-                .request(request)
-                .await;
-            this.update(&mut cx, |this, cx| {
-                if let Entry::Occupied(mut request_count) =
-                    this.pending_contact_requests.entry(user_id)
-                {
-                    *request_count.get_mut() -= 1;
-                    if *request_count.get() == 0 {
-                        request_count.remove();
-                    }
-                }
-                cx.notify();
-            })?;
-            response?;
-            Ok(())
-        })
-    }
-
-    pub fn clear_contacts(&mut self) -> impl Future<Output = ()> {
-        let (tx, mut rx) = postage::barrier::channel();
-        self.update_contacts_tx
-            .unbounded_send(UpdateContacts::Clear(tx))
-            .unwrap();
-        async move {
-            rx.next().await;
-        }
-    }
-
-    pub fn contact_updates_done(&mut self) -> impl Future<Output = ()> {
-        let (tx, mut rx) = postage::barrier::channel();
-        self.update_contacts_tx
-            .unbounded_send(UpdateContacts::Wait(tx))
-            .unwrap();
-        async move {
-            rx.next().await;
-        }
-    }
-
-    pub fn get_users(
-        &mut self,
-        user_ids: Vec<u64>,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<Vec<Arc<User>>>> {
-        let mut user_ids_to_fetch = user_ids.clone();
-        user_ids_to_fetch.retain(|id| !self.users.contains_key(id));
-
-        cx.spawn(|this, mut cx| async move {
-            if !user_ids_to_fetch.is_empty() {
-                this.update(&mut cx, |this, cx| {
-                    this.load_users(
-                        proto::GetUsers {
-                            user_ids: user_ids_to_fetch,
-                        },
-                        cx,
-                    )
-                })?
-                .await?;
-            }
-
-            this.update(&mut cx, |this, _| {
-                user_ids
-                    .iter()
-                    .map(|user_id| {
-                        this.users
-                            .get(user_id)
-                            .cloned()
-                            .ok_or_else(|| anyhow!("user {} not found", user_id))
-                    })
-                    .collect()
-            })?
-        })
-    }
-
-    pub fn fuzzy_search_users(
-        &mut self,
-        query: String,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<Vec<Arc<User>>>> {
-        self.load_users(proto::FuzzySearchUsers { query }, cx)
-    }
-
-    pub fn get_cached_user(&self, user_id: u64) -> Option<Arc<User>> {
-        self.users.get(&user_id).cloned()
-    }
-
-    pub fn get_user(
-        &mut self,
-        user_id: u64,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<Arc<User>>> {
-        if let Some(user) = self.users.get(&user_id).cloned() {
-            return Task::ready(Ok(user));
-        }
-
-        let load_users = self.get_users(vec![user_id], cx);
-        cx.spawn(move |this, mut cx| async move {
-            load_users.await?;
-            this.update(&mut cx, |this, _| {
-                this.users
-                    .get(&user_id)
-                    .cloned()
-                    .ok_or_else(|| anyhow!("server responded with no users"))
-            })?
-        })
-    }
-
-    pub fn current_user(&self) -> Option<Arc<User>> {
-        self.current_user.borrow().clone()
-    }
-
-    pub fn watch_current_user(&self) -> watch::Receiver<Option<Arc<User>>> {
-        self.current_user.clone()
-    }
-
-    fn load_users(
-        &mut self,
-        request: impl RequestMessage<Response = UsersResponse>,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<Vec<Arc<User>>>> {
-        let client = self.client.clone();
-        cx.spawn(|this, mut cx| async move {
-            if let Some(rpc) = client.upgrade() {
-                let response = rpc.request(request).await.context("error loading users")?;
-                let users = response
-                    .users
-                    .into_iter()
-                    .map(|user| User::new(user))
-                    .collect::<Vec<_>>();
-
-                this.update(&mut cx, |this, _| {
-                    for user in &users {
-                        this.users.insert(user.id, user.clone());
-                    }
-                })
-                .ok();
-
-                Ok(users)
-            } else {
-                Ok(Vec::new())
-            }
-        })
-    }
-
-    pub fn set_participant_indices(
-        &mut self,
-        participant_indices: HashMap<u64, ParticipantIndex>,
-        cx: &mut ModelContext<Self>,
-    ) {
-        if participant_indices != self.participant_indices {
-            self.participant_indices = participant_indices;
-            cx.emit(Event::ParticipantIndicesChanged);
-        }
-    }
-
-    pub fn participant_indices(&self) -> &HashMap<u64, ParticipantIndex> {
-        &self.participant_indices
-    }
-}
-
-impl User {
-    fn new(message: proto::User) -> Arc<Self> {
-        Arc::new(User {
-            id: message.id,
-            github_login: message.github_login,
-            avatar_uri: message.avatar_url.into(),
-        })
-    }
-}
-
-impl Contact {
-    async fn from_proto(
-        contact: proto::Contact,
-        user_store: &Model<UserStore>,
-        cx: &mut AsyncAppContext,
-    ) -> Result<Self> {
-        let user = user_store
-            .update(cx, |user_store, cx| {
-                user_store.get_user(contact.user_id, cx)
-            })?
-            .await?;
-        Ok(Self {
-            user,
-            online: contact.online,
-            busy: contact.busy,
-        })
-    }
-}
-
-impl Collaborator {
-    pub fn from_proto(message: proto::Collaborator) -> Result<Self> {
-        Ok(Self {
-            peer_id: message.peer_id.ok_or_else(|| anyhow!("invalid peer id"))?,
-            replica_id: message.replica_id as ReplicaId,
-            user_id: message.user_id as UserId,
-        })
-    }
-}

crates/collab2/Cargo.toml 🔗

@@ -63,9 +63,9 @@ uuid.workspace = true
 audio = { path = "../audio" }
 collections = { path = "../collections", features = ["test-support"] }
 gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
-call = { package = "call2", path = "../call2", features = ["test-support"] }
-client = { package = "client2", path = "../client2", features = ["test-support"] }
-channel = { package = "channel2", path = "../channel2" }
+call = { path = "../call", features = ["test-support"] }
+client = { path = "../client", features = ["test-support"] }
+channel = { path = "../channel" }
 editor = { path = "../editor", features = ["test-support"] }
 language = { path = "../language", features = ["test-support"] }
 fs = { package = "fs2", path = "../fs2", features = ["test-support"] }

crates/collab_ui/Cargo.toml 🔗

@@ -24,9 +24,9 @@ test-support = [
 [dependencies]
 auto_update = { path = "../auto_update" }
 db = { package = "db2", path = "../db2" }
-call = { package = "call2", path = "../call2" }
-client = { package = "client2", path = "../client2" }
-channel = { package = "channel2", path = "../channel2" }
+call = { path = "../call" }
+client = { path = "../client" }
+channel = { path = "../channel" }
 clock = { path = "../clock" }
 collections = { path = "../collections" }
 # context_menu = { path = "../context_menu" }
@@ -65,8 +65,8 @@ time.workspace = true
 smallvec.workspace = true
 
 [dev-dependencies]
-call = { package = "call2", path = "../call2", features = ["test-support"] }
-client = { package = "client2", path = "../client2", features = ["test-support"] }
+call = { path = "../call", features = ["test-support"] }
+client = { path = "../client", features = ["test-support"] }
 collections = { path = "../collections", features = ["test-support"] }
 editor = { path = "../editor", features = ["test-support"] }
 gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }

crates/diagnostics/Cargo.toml 🔗

@@ -31,7 +31,7 @@ smallvec.workspace = true
 postage.workspace = true
 
 [dev-dependencies]
-client = { package = "client2", path = "../client2", features = ["test-support"] }
+client = { path = "../client", features = ["test-support"] }
 editor = { path = "../editor", features = ["test-support"] }
 language = { path = "../language", features = ["test-support"] }
 lsp = { path = "../lsp", features = ["test-support"] }

crates/editor/Cargo.toml 🔗

@@ -23,7 +23,7 @@ test-support = [
 ]
 
 [dependencies]
-client = { package = "client2", path = "../client2" }
+client = { path = "../client" }
 clock = { path = "../clock" }
 copilot = { path = "../copilot" }
 db = { package="db2", path = "../db2" }

crates/feedback/Cargo.toml 🔗

@@ -11,7 +11,7 @@ path = "src/feedback.rs"
 test-support = []
 
 [dependencies]
-client = { package = "client2", path = "../client2" }
+client = { path = "../client" }
 db = { package = "db2", path = "../db2" }
 editor = { path = "../editor" }
 gpui = { package = "gpui2", path = "../gpui2" }

crates/language/Cargo.toml 🔗

@@ -61,7 +61,7 @@ tree-sitter-typescript = { workspace = true, optional = true }
 pulldown-cmark = { version = "0.9.2", default-features = false }
 
 [dev-dependencies]
-client = { package = "client2", path = "../client2", features = ["test-support"] }
+client = { path = "../client", features = ["test-support"] }
 collections = { path = "../collections", features = ["test-support"] }
 gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
 lsp = { path = "../lsp", features = ["test-support"] }

crates/language_tools/Cargo.toml 🔗

@@ -26,7 +26,7 @@ anyhow.workspace = true
 tree-sitter.workspace = true
 
 [dev-dependencies]
-client = { package = "client2", path = "../client2", features = ["test-support"] }
+client = { path = "../client", features = ["test-support"] }
 editor = { path = "../editor", features = ["test-support"] }
 gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
 util = { path = "../util", features = ["test-support"] }

crates/multi_buffer/Cargo.toml 🔗

@@ -20,7 +20,7 @@ test-support = [
 ]
 
 [dependencies]
-client = { package = "client2", path = "../client2" }
+client = { path = "../client" }
 clock = { path = "../clock" }
 collections = { path = "../collections" }
 git = { package = "git3", path = "../git3" }

crates/notifications2/Cargo.toml 🔗

@@ -17,8 +17,8 @@ test-support = [
 ]
 
 [dependencies]
-channel = { package = "channel2", path = "../channel2" }
-client = { package = "client2", path = "../client2" }
+channel = { path = "../channel" }
+client = { path = "../client" }
 clock = { path = "../clock" }
 collections = { path = "../collections" }
 db = { package = "db2", path = "../db2" }
@@ -34,7 +34,7 @@ anyhow.workspace = true
 time.workspace = true
 
 [dev-dependencies]
-client = { package = "client2", path = "../client2", features = ["test-support"] }
+client = { path = "../client", features = ["test-support"] }
 collections = { path = "../collections", features = ["test-support"] }
 gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
 rpc = { package = "rpc2", path = "../rpc2", features = ["test-support"] }

crates/prettier2/Cargo.toml 🔗

@@ -12,7 +12,7 @@ doctest = false
 test-support = []
 
 [dependencies]
-client = { package = "client2", path = "../client2" }
+client = { path = "../client" }
 collections = { path = "../collections"}
 language = { path = "../language" }
 gpui = { package = "gpui2", path = "../gpui2" }

crates/project/Cargo.toml 🔗

@@ -22,7 +22,7 @@ test-support = [
 [dependencies]
 text = { package = "text2", path = "../text2" }
 copilot = { path = "../copilot" }
-client = { package = "client2", path = "../client2" }
+client = { path = "../client" }
 clock = { path = "../clock" }
 collections = { path = "../collections" }
 db = { package = "db2", path = "../db2" }
@@ -69,7 +69,7 @@ itertools = "0.10"
 ctor.workspace = true
 env_logger.workspace = true
 pretty_assertions.workspace = true
-client = { package = "client2", path = "../client2", features = ["test-support"] }
+client = { path = "../client", features = ["test-support"] }
 collections = { path = "../collections", features = ["test-support"] }
 db = { package = "db2", path = "../db2", features = ["test-support"] }
 fs = { package = "fs2", path = "../fs2",  features = ["test-support"] }

crates/project_panel/Cargo.toml 🔗

@@ -33,7 +33,7 @@ pretty_assertions.workspace = true
 unicase = "2.6"
 
 [dev-dependencies]
-client = { path = "../client2", package = "client2", features = ["test-support"] }
+client = { path = "../client", features = ["test-support"] }
 language = { path = "../language", features = ["test-support"] }
 editor = { path = "../editor", features = ["test-support"] }
 gpui = { path = "../gpui2", package = "gpui2", features = ["test-support"] }

crates/search/Cargo.toml 🔗

@@ -32,7 +32,7 @@ smallvec.workspace = true
 smol.workspace = true
 serde_json.workspace = true
 [dev-dependencies]
-client = { package = "client2", path = "../client2", features = ["test-support"] }
+client = { path = "../client", features = ["test-support"] }
 editor = { path = "../editor", features = ["test-support"] }
 gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
 

crates/semantic_index2/Cargo.toml 🔗

@@ -48,7 +48,7 @@ rpc = { package = "rpc2", path = "../rpc2", features = ["test-support"] }
 workspace = { path = "../workspace", features = ["test-support"] }
 settings = { package = "settings2", path = "../settings2", features = ["test-support"]}
 rust-embed = { version = "8.0", features = ["include-exclude"] }
-client = { package = "client2", path = "../client2" }
+client = { path = "../client" }
 node_runtime = { path = "../node_runtime"}
 
 pretty_assertions.workspace = true

crates/terminal_view/Cargo.toml 🔗

@@ -40,7 +40,7 @@ serde_derive.workspace = true
 [dev-dependencies]
 editor = { path = "../editor", features = ["test-support"] }
 gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
-client = { package = "client2", path = "../client2", features = ["test-support"]}
+client = { path = "../client", features = ["test-support"]}
 project = { path = "../project", features = ["test-support"]}
 workspace = { path = "../workspace", features = ["test-support"] }
 rand.workspace = true

crates/theme_selector/Cargo.toml 🔗

@@ -9,7 +9,7 @@ path = "src/theme_selector.rs"
 doctest = false
 
 [dependencies]
-client = { package = "client2", path = "../client2" }
+client = { path = "../client" }
 editor = { path = "../editor" }
 feature_flags = { path = "../feature_flags" }
 fs = { package = "fs2", path = "../fs2" }

crates/welcome/Cargo.toml 🔗

@@ -11,7 +11,7 @@ path = "src/welcome.rs"
 test-support = []
 
 [dependencies]
-client = { package = "client2", path = "../client2" }
+client = { path = "../client" }
 editor = { path = "../editor" }
 fs = { package = "fs2", path = "../fs2" }
 fuzzy = {  path = "../fuzzy" }

crates/workspace/Cargo.toml 🔗

@@ -20,8 +20,8 @@ test-support = [
 
 [dependencies]
 db = { path = "../db2", package = "db2" }
-call = { path = "../call2", package = "call2" }
-client = { path = "../client2", package = "client2" }
+call = { path = "../call" }
+client = { path = "../client" }
 collections = { path = "../collections" }
 # context_menu = { path = "../context_menu" }
 fs = { path = "../fs2", package = "fs2" }
@@ -54,8 +54,8 @@ smallvec.workspace = true
 uuid.workspace = true
 
 [dev-dependencies]
-call = { path = "../call2", package = "call2", features = ["test-support"] }
-client = { path = "../client2", package = "client2", features = ["test-support"] }
+call = { path = "../call", features = ["test-support"] }
+client = { path = "../client", features = ["test-support"] }
 gpui = { path = "../gpui2", package = "gpui2", features = ["test-support"] }
 project = { path = "../project", features = ["test-support"] }
 settings = { path = "../settings2", package = "settings2", features = ["test-support"] }

crates/zed/Cargo.toml 🔗

@@ -20,14 +20,14 @@ audio = { package = "audio2", path = "../audio2" }
 activity_indicator = { path = "../activity_indicator"}
 auto_update = { path = "../auto_update" }
 breadcrumbs = { path = "../breadcrumbs" }
-call = { package = "call2", path = "../call2" }
-channel = { package = "channel2", path = "../channel2" }
+call = { path = "../call" }
+channel = { path = "../channel" }
 cli = { path = "../cli" }
 collab_ui = { path = "../collab_ui" }
 collections = { path = "../collections" }
 command_palette = { path = "../command_palette" }
 # component_test = { path = "../component_test" }
-client = { package = "client2", path = "../client2" }
+client = { path = "../client" }
 # clock = { path = "../clock" }
 copilot = { path = "../copilot" }
 copilot_button = { path = "../copilot_button" }
@@ -144,7 +144,7 @@ urlencoding = "2.1.2"
 uuid.workspace = true
 
 [dev-dependencies]
-call = { package = "call2", path = "../call2", features = ["test-support"] }
+call = { path = "../call", features = ["test-support"] }
 # client = { path = "../client", features = ["test-support"] }
 # editor = { path = "../editor", features = ["test-support"] }
 # gpui = { path = "../gpui", features = ["test-support"] }