WIP: Merge branch 'main' into zed2-workspace

Nathan Sobo created

Change summary

Cargo.lock                                     |   25 
crates/ai2/src/auth.rs                         |   10 
crates/ai2/src/providers/open_ai/completion.rs |   63 
crates/ai2/src/providers/open_ai/embedding.rs  |   58 
crates/ai2/src/test.rs                         |   14 
crates/audio2/src/audio2.rs                    |   72 
crates/call2/src/call2.rs                      |    4 
crates/call2/src/room.rs                       |   33 
crates/client2/src/client2.rs                  |  104 
crates/client2/src/telemetry.rs                |    6 
crates/client2/src/test.rs                     |   10 
crates/copilot2/src/copilot2.rs                |   14 
crates/db2/src/db2.rs                          |    2 
crates/fs2/src/fs2.rs                          |    8 
crates/fuzzy2/src/paths.rs                     |    4 
crates/fuzzy2/src/strings.rs                   |    4 
crates/gpui/src/executor.rs                    |    8 
crates/gpui2/src/app.rs                        |  381 
crates/gpui2/src/app/async_context.rs          |  209 
crates/gpui2/src/app/entity_map.rs             |   84 
crates/gpui2/src/app/model_context.rs          |   60 
crates/gpui2/src/app/test_context.rs           |  167 
crates/gpui2/src/element.rs                    |   24 
crates/gpui2/src/executor.rs                   |  152 
crates/gpui2/src/gpui2.rs                      |  189 
crates/gpui2/src/interactive.rs                |   50 
crates/gpui2/src/platform.rs                   |   18 
crates/gpui2/src/platform/mac/dispatcher.rs    |   46 
crates/gpui2/src/platform/mac/platform.rs      |   71 
crates/gpui2/src/platform/mac/window.rs        |   57 
crates/gpui2/src/platform/test/dispatcher.rs   |  121 
crates/gpui2/src/platform/test/platform.rs     |   20 
crates/gpui2/src/subscription.rs               |    6 
crates/gpui2/src/view.rs                       |    8 
crates/gpui2/src/window.rs                     |  196 
crates/gpui2_macros/src/test.rs                |   12 
crates/install_cli2/src/install_cli2.rs        |    4 
crates/journal2/src/journal2.rs                |    2 
crates/language2/src/buffer.rs                 |   18 
crates/language2/src/buffer_tests.rs           |   11 
crates/language2/src/language2.rs              |    8 
crates/language2/src/outline.rs                |    4 
crates/live_kit_client2/examples/test_app.rs   |    4 
crates/live_kit_client2/src/test.rs            |    6 
crates/lsp2/src/lsp2.rs                        |   29 
crates/multi_buffer2/src/multi_buffer2.rs      |    4 
crates/prettier2/src/prettier2.rs              |    2 
crates/project/src/project_tests.rs            |  116 
crates/project2/Cargo.toml                     |    1 
crates/project2/src/lsp_command.rs             |   24 
crates/project2/src/project2.rs                |  112 
crates/project2/src/project_tests.rs           | 8170 ++++++++++---------
crates/project2/src/worktree.rs                |  134 
crates/rpc2/src/conn.rs                        |    4 
crates/rpc2/src/peer.rs                        |    5 
crates/settings2/src/settings_file.rs          |    9 
crates/sqlez/src/thread_safe_connection.rs     |    6 
crates/storybook2/src/stories/colors.rs        |   13 
crates/terminal2/src/terminal2.rs              |   22 
crates/text2/Cargo.toml                        |   37 
crates/text2/src/anchor.rs                     |  144 
crates/text2/src/locator.rs                    |  125 
crates/text2/src/network.rs                    |   69 
crates/text2/src/operation_queue.rs            |  153 
crates/text2/src/patch.rs                      |  594 +
crates/text2/src/selection.rs                  |  123 
crates/text2/src/subscription.rs               |   48 
crates/text2/src/tests.rs                      |  764 +
crates/text2/src/text2.rs                      | 2682 ++++++
crates/text2/src/undo_map.rs                   |  112 
crates/theme2/src/default_colors.rs            |  220 
crates/theme2/src/scale.rs                     |  174 
crates/ui2/src/components/panes.rs             |    2 
crates/ui2/src/components/tab.rs               |    2 
crates/zed2/Cargo.toml                         |    4 
crates/zed2/src/main.rs                        |   24 
76 files changed, 10,552 insertions(+), 5,743 deletions(-)

Detailed changes

Cargo.lock 🔗

@@ -8806,6 +8806,29 @@ dependencies = [
  "util",
 ]
 
+[[package]]
+name = "text2"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "clock",
+ "collections",
+ "ctor",
+ "digest 0.9.0",
+ "env_logger 0.9.3",
+ "gpui2",
+ "lazy_static",
+ "log",
+ "parking_lot 0.11.2",
+ "postage",
+ "rand 0.8.5",
+ "regex",
+ "rope",
+ "smallvec",
+ "sum_tree",
+ "util",
+]
+
 [[package]]
 name = "textwrap"
 version = "0.16.0"
@@ -11089,7 +11112,7 @@ dependencies = [
  "smol",
  "sum_tree",
  "tempdir",
- "text",
+ "text2",
  "theme2",
  "thiserror",
  "tiny_http",

crates/ai2/src/auth.rs 🔗

@@ -1,4 +1,3 @@
-use async_trait::async_trait;
 use gpui2::AppContext;
 
 #[derive(Clone, Debug)]
@@ -8,10 +7,9 @@ pub enum ProviderCredential {
     NotNeeded,
 }
 
-#[async_trait]
-pub trait CredentialProvider: Send + Sync {
+pub trait CredentialProvider {
     fn has_credentials(&self) -> bool;
-    async fn retrieve_credentials(&self, cx: &mut AppContext) -> ProviderCredential;
-    async fn save_credentials(&self, cx: &mut AppContext, credential: ProviderCredential);
-    async fn delete_credentials(&self, cx: &mut AppContext);
+    fn retrieve_credentials(&self, cx: &mut AppContext) -> ProviderCredential;
+    fn save_credentials(&self, cx: &mut AppContext, credential: ProviderCredential);
+    fn delete_credentials(&self, cx: &mut AppContext);
 }

crates/ai2/src/providers/open_ai/completion.rs 🔗

@@ -1,10 +1,9 @@
 use anyhow::{anyhow, Result};
-use async_trait::async_trait;
 use futures::{
     future::BoxFuture, io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, FutureExt,
     Stream, StreamExt,
 };
-use gpui2::{AppContext, Executor};
+use gpui2::{AppContext, BackgroundExecutor};
 use isahc::{http::StatusCode, Request, RequestExt};
 use parking_lot::RwLock;
 use serde::{Deserialize, Serialize};
@@ -105,7 +104,7 @@ pub struct OpenAIResponseStreamEvent {
 
 pub async fn stream_completion(
     credential: ProviderCredential,
-    executor: Arc<Executor>,
+    executor: Arc<BackgroundExecutor>,
     request: Box<dyn CompletionRequest>,
 ) -> Result<impl Stream<Item = Result<OpenAIResponseStreamEvent>>> {
     let api_key = match credential {
@@ -198,11 +197,11 @@ pub async fn stream_completion(
 pub struct OpenAICompletionProvider {
     model: OpenAILanguageModel,
     credential: Arc<RwLock<ProviderCredential>>,
-    executor: Arc<Executor>,
+    executor: Arc<BackgroundExecutor>,
 }
 
 impl OpenAICompletionProvider {
-    pub fn new(model_name: &str, executor: Arc<Executor>) -> Self {
+    pub fn new(model_name: &str, executor: Arc<BackgroundExecutor>) -> Self {
         let model = OpenAILanguageModel::load(model_name);
         let credential = Arc::new(RwLock::new(ProviderCredential::NoCredentials));
         Self {
@@ -213,7 +212,6 @@ impl OpenAICompletionProvider {
     }
 }
 
-#[async_trait]
 impl CredentialProvider for OpenAICompletionProvider {
     fn has_credentials(&self) -> bool {
         match *self.credential.read() {
@@ -221,52 +219,45 @@ impl CredentialProvider for OpenAICompletionProvider {
             _ => false,
         }
     }
-    async fn retrieve_credentials(&self, cx: &mut AppContext) -> ProviderCredential {
-        let existing_credential = self.credential.read().clone();
-
-        let retrieved_credential = cx
-            .run_on_main(move |cx| match existing_credential {
-                ProviderCredential::Credentials { .. } => {
-                    return existing_credential.clone();
-                }
-                _ => {
-                    if let Some(api_key) = env::var("OPENAI_API_KEY").log_err() {
-                        return ProviderCredential::Credentials { api_key };
-                    }
 
-                    if let Some(Some((_, api_key))) = cx.read_credentials(OPENAI_API_URL).log_err()
-                    {
-                        if let Some(api_key) = String::from_utf8(api_key).log_err() {
-                            return ProviderCredential::Credentials { api_key };
-                        } else {
-                            return ProviderCredential::NoCredentials;
-                        }
+    fn retrieve_credentials(&self, cx: &mut AppContext) -> ProviderCredential {
+        let existing_credential = self.credential.read().clone();
+        let retrieved_credential = match existing_credential {
+            ProviderCredential::Credentials { .. } => existing_credential.clone(),
+            _ => {
+                if let Some(api_key) = env::var("OPENAI_API_KEY").log_err() {
+                    ProviderCredential::Credentials { api_key }
+                } else if let Some(Some((_, api_key))) =
+                    cx.read_credentials(OPENAI_API_URL).log_err()
+                {
+                    if let Some(api_key) = String::from_utf8(api_key).log_err() {
+                        ProviderCredential::Credentials { api_key }
                     } else {
-                        return ProviderCredential::NoCredentials;
+                        ProviderCredential::NoCredentials
                     }
+                } else {
+                    ProviderCredential::NoCredentials
                 }
-            })
-            .await;
-
+            }
+        };
         *self.credential.write() = retrieved_credential.clone();
         retrieved_credential
     }
 
-    async fn save_credentials(&self, cx: &mut AppContext, credential: ProviderCredential) {
+    fn save_credentials(&self, cx: &mut AppContext, credential: ProviderCredential) {
         *self.credential.write() = credential.clone();
         let credential = credential.clone();
-        cx.run_on_main(move |cx| match credential {
+        match credential {
             ProviderCredential::Credentials { api_key } => {
                 cx.write_credentials(OPENAI_API_URL, "Bearer", api_key.as_bytes())
                     .log_err();
             }
             _ => {}
-        })
-        .await;
+        }
     }
-    async fn delete_credentials(&self, cx: &mut AppContext) {
-        cx.run_on_main(move |cx| cx.delete_credentials(OPENAI_API_URL).log_err())
-            .await;
+
+    fn delete_credentials(&self, cx: &mut AppContext) {
+        cx.delete_credentials(OPENAI_API_URL).log_err();
         *self.credential.write() = ProviderCredential::NoCredentials;
     }
 }

crates/ai2/src/providers/open_ai/embedding.rs 🔗

@@ -1,7 +1,7 @@
 use anyhow::{anyhow, Result};
 use async_trait::async_trait;
 use futures::AsyncReadExt;
-use gpui2::Executor;
+use gpui2::BackgroundExecutor;
 use gpui2::{serde_json, AppContext};
 use isahc::http::StatusCode;
 use isahc::prelude::Configurable;
@@ -35,7 +35,7 @@ pub struct OpenAIEmbeddingProvider {
     model: OpenAILanguageModel,
     credential: Arc<RwLock<ProviderCredential>>,
     pub client: Arc<dyn HttpClient>,
-    pub executor: Arc<Executor>,
+    pub executor: Arc<BackgroundExecutor>,
     rate_limit_count_rx: watch::Receiver<Option<Instant>>,
     rate_limit_count_tx: Arc<Mutex<watch::Sender<Option<Instant>>>>,
 }
@@ -66,7 +66,7 @@ struct OpenAIEmbeddingUsage {
 }
 
 impl OpenAIEmbeddingProvider {
-    pub fn new(client: Arc<dyn HttpClient>, executor: Arc<Executor>) -> Self {
+    pub fn new(client: Arc<dyn HttpClient>, executor: Arc<BackgroundExecutor>) -> Self {
         let (rate_limit_count_tx, rate_limit_count_rx) = watch::channel_with(None);
         let rate_limit_count_tx = Arc::new(Mutex::new(rate_limit_count_tx));
 
@@ -146,7 +146,6 @@ impl OpenAIEmbeddingProvider {
     }
 }
 
-#[async_trait]
 impl CredentialProvider for OpenAIEmbeddingProvider {
     fn has_credentials(&self) -> bool {
         match *self.credential.read() {
@@ -154,52 +153,45 @@ impl CredentialProvider for OpenAIEmbeddingProvider {
             _ => false,
         }
     }
-    async fn retrieve_credentials(&self, cx: &mut AppContext) -> ProviderCredential {
+    fn retrieve_credentials(&self, cx: &mut AppContext) -> ProviderCredential {
         let existing_credential = self.credential.read().clone();
 
-        let retrieved_credential = cx
-            .run_on_main(move |cx| match existing_credential {
-                ProviderCredential::Credentials { .. } => {
-                    return existing_credential.clone();
-                }
-                _ => {
-                    if let Some(api_key) = env::var("OPENAI_API_KEY").log_err() {
-                        return ProviderCredential::Credentials { api_key };
-                    }
-
-                    if let Some(Some((_, api_key))) = cx.read_credentials(OPENAI_API_URL).log_err()
-                    {
-                        if let Some(api_key) = String::from_utf8(api_key).log_err() {
-                            return ProviderCredential::Credentials { api_key };
-                        } else {
-                            return ProviderCredential::NoCredentials;
-                        }
+        let retrieved_credential = match existing_credential {
+            ProviderCredential::Credentials { .. } => existing_credential.clone(),
+            _ => {
+                if let Some(api_key) = env::var("OPENAI_API_KEY").log_err() {
+                    ProviderCredential::Credentials { api_key }
+                } else if let Some(Some((_, api_key))) =
+                    cx.read_credentials(OPENAI_API_URL).log_err()
+                {
+                    if let Some(api_key) = String::from_utf8(api_key).log_err() {
+                        ProviderCredential::Credentials { api_key }
                     } else {
-                        return ProviderCredential::NoCredentials;
+                        ProviderCredential::NoCredentials
                     }
+                } else {
+                    ProviderCredential::NoCredentials
                 }
-            })
-            .await;
+            }
+        };
 
         *self.credential.write() = retrieved_credential.clone();
         retrieved_credential
     }
 
-    async fn save_credentials(&self, cx: &mut AppContext, credential: ProviderCredential) {
+    fn save_credentials(&self, cx: &mut AppContext, credential: ProviderCredential) {
         *self.credential.write() = credential.clone();
-        let credential = credential.clone();
-        cx.run_on_main(move |cx| match credential {
+        match credential {
             ProviderCredential::Credentials { api_key } => {
                 cx.write_credentials(OPENAI_API_URL, "Bearer", api_key.as_bytes())
                     .log_err();
             }
             _ => {}
-        })
-        .await;
+        }
     }
-    async fn delete_credentials(&self, cx: &mut AppContext) {
-        cx.run_on_main(move |cx| cx.delete_credentials(OPENAI_API_URL).log_err())
-            .await;
+
+    fn delete_credentials(&self, cx: &mut AppContext) {
+        cx.delete_credentials(OPENAI_API_URL).log_err();
         *self.credential.write() = ProviderCredential::NoCredentials;
     }
 }

crates/ai2/src/test.rs 🔗

@@ -100,16 +100,15 @@ impl FakeEmbeddingProvider {
     }
 }
 
-#[async_trait]
 impl CredentialProvider for FakeEmbeddingProvider {
     fn has_credentials(&self) -> bool {
         true
     }
-    async fn retrieve_credentials(&self, _cx: &mut AppContext) -> ProviderCredential {
+    fn retrieve_credentials(&self, _cx: &mut AppContext) -> ProviderCredential {
         ProviderCredential::NotNeeded
     }
-    async fn save_credentials(&self, _cx: &mut AppContext, _credential: ProviderCredential) {}
-    async fn delete_credentials(&self, _cx: &mut AppContext) {}
+    fn save_credentials(&self, _cx: &mut AppContext, _credential: ProviderCredential) {}
+    fn delete_credentials(&self, _cx: &mut AppContext) {}
 }
 
 #[async_trait]
@@ -162,16 +161,15 @@ impl FakeCompletionProvider {
     }
 }
 
-#[async_trait]
 impl CredentialProvider for FakeCompletionProvider {
     fn has_credentials(&self) -> bool {
         true
     }
-    async fn retrieve_credentials(&self, _cx: &mut AppContext) -> ProviderCredential {
+    fn retrieve_credentials(&self, _cx: &mut AppContext) -> ProviderCredential {
         ProviderCredential::NotNeeded
     }
-    async fn save_credentials(&self, _cx: &mut AppContext, _credential: ProviderCredential) {}
-    async fn delete_credentials(&self, _cx: &mut AppContext) {}
+    fn save_credentials(&self, _cx: &mut AppContext, _credential: ProviderCredential) {}
+    fn delete_credentials(&self, _cx: &mut AppContext) {}
 }
 
 impl CompletionProvider for FakeCompletionProvider {

crates/audio2/src/audio2.rs 🔗

@@ -1,14 +1,13 @@
 use assets::SoundRegistry;
-use futures::{channel::mpsc, StreamExt};
-use gpui2::{AppContext, AssetSource, Executor};
+use gpui2::{AppContext, AssetSource};
 use rodio::{OutputStream, OutputStreamHandle};
 use util::ResultExt;
 
 mod assets;
 
 pub fn init(source: impl AssetSource, cx: &mut AppContext) {
-    cx.set_global(Audio::new(cx.executor()));
     cx.set_global(SoundRegistry::new(source));
+    cx.set_global(Audio::new());
 }
 
 pub enum Sound {
@@ -34,15 +33,18 @@ impl Sound {
 }
 
 pub struct Audio {
-    tx: mpsc::UnboundedSender<Box<dyn FnOnce(&mut AudioState) + Send>>,
-}
-
-struct AudioState {
     _output_stream: Option<OutputStream>,
     output_handle: Option<OutputStreamHandle>,
 }
 
-impl AudioState {
+impl Audio {
+    pub fn new() -> Self {
+        Self {
+            _output_stream: None,
+            output_handle: None,
+        }
+    }
+
     fn ensure_output_exists(&mut self) -> Option<&OutputStreamHandle> {
         if self.output_handle.is_none() {
             let (_output_stream, output_handle) = OutputStream::try_default().log_err().unzip();
@@ -53,59 +55,27 @@ impl AudioState {
         self.output_handle.as_ref()
     }
 
-    fn take(&mut self) {
-        self._output_stream.take();
-        self.output_handle.take();
-    }
-}
-
-impl Audio {
-    pub fn new(executor: &Executor) -> Self {
-        let (tx, mut rx) = mpsc::unbounded::<Box<dyn FnOnce(&mut AudioState) + Send>>();
-        executor
-            .spawn_on_main(|| async move {
-                let mut audio = AudioState {
-                    _output_stream: None,
-                    output_handle: None,
-                };
-
-                while let Some(f) = rx.next().await {
-                    (f)(&mut audio);
-                }
-            })
-            .detach();
-
-        Self { tx }
-    }
-
     pub fn play_sound(sound: Sound, cx: &mut AppContext) {
         if !cx.has_global::<Self>() {
             return;
         }
 
-        let Some(source) = SoundRegistry::global(cx).get(sound.file()).log_err() else {
-            return;
-        };
-
-        let this = cx.global::<Self>();
-        this.tx
-            .unbounded_send(Box::new(move |state| {
-                if let Some(output_handle) = state.ensure_output_exists() {
-                    output_handle.play_raw(source).log_err();
-                }
-            }))
-            .ok();
+        cx.update_global::<Self, _>(|this, cx| {
+            let output_handle = this.ensure_output_exists()?;
+            let source = SoundRegistry::global(cx).get(sound.file()).log_err()?;
+            output_handle.play_raw(source).log_err()?;
+            Some(())
+        });
     }
 
-    pub fn end_call(cx: &AppContext) {
+    pub fn end_call(cx: &mut AppContext) {
         if !cx.has_global::<Self>() {
             return;
         }
 
-        let this = cx.global::<Self>();
-
-        this.tx
-            .unbounded_send(Box::new(move |state| state.take()))
-            .ok();
+        cx.update_global::<Self, _>(|this, _| {
+            this._output_stream.take();
+            this.output_handle.take();
+        });
     }
 }

crates/call2/src/call2.rs 🔗

@@ -196,7 +196,7 @@ impl ActiveCall {
                 })
                 .shared();
             self.pending_room_creation = Some(room.clone());
-            cx.executor().spawn(async move {
+            cx.background_executor().spawn(async move {
                 room.await.map_err(|err| anyhow!("{:?}", err))?;
                 anyhow::Ok(())
             })
@@ -230,7 +230,7 @@ impl ActiveCall {
         };
 
         let client = self.client.clone();
-        cx.executor().spawn(async move {
+        cx.background_executor().spawn(async move {
             client
                 .request(proto::CancelCall {
                     room_id,

crates/call2/src/room.rs 🔗

@@ -134,7 +134,7 @@ impl Room {
                 }
             });
 
-            let _maintain_video_tracks = cx.spawn_on_main({
+            let _maintain_video_tracks = cx.spawn({
                 let room = room.clone();
                 move |this, mut cx| async move {
                     let mut track_video_changes = room.remote_video_track_updates();
@@ -153,7 +153,7 @@ impl Room {
                 }
             });
 
-            let _maintain_audio_tracks = cx.spawn_on_main({
+            let _maintain_audio_tracks = cx.spawn({
                 let room = room.clone();
                 |this, mut cx| async move {
                     let mut track_audio_changes = room.remote_audio_track_updates();
@@ -326,7 +326,7 @@ impl Room {
     fn app_will_quit(&mut self, cx: &mut ModelContext<Self>) -> impl Future<Output = ()> {
         let task = if self.status.is_online() {
             let leave = self.leave_internal(cx);
-            Some(cx.executor().spawn(async move {
+            Some(cx.background_executor().spawn(async move {
                 leave.await.log_err();
             }))
         } else {
@@ -394,7 +394,7 @@ impl Room {
         self.clear_state(cx);
 
         let leave_room = self.client.request(proto::LeaveRoom {});
-        cx.executor().spawn(async move {
+        cx.background_executor().spawn(async move {
             leave_room.await?;
             anyhow::Ok(())
         })
@@ -449,7 +449,8 @@ impl Room {
 
                 // Wait for client to re-establish a connection to the server.
                 {
-                    let mut reconnection_timeout = cx.executor().timer(RECONNECT_TIMEOUT).fuse();
+                    let mut reconnection_timeout =
+                        cx.background_executor().timer(RECONNECT_TIMEOUT).fuse();
                     let client_reconnection = async {
                         let mut remaining_attempts = 3;
                         while remaining_attempts > 0 {
@@ -1195,7 +1196,7 @@ impl Room {
         };
 
         cx.notify();
-        cx.executor().spawn_on_main(move || async move {
+        cx.background_executor().spawn(async move {
             client
                 .request(proto::UpdateParticipantLocation {
                     room_id,
@@ -1300,7 +1301,9 @@ impl Room {
                                 live_kit.room.unpublish_track(publication);
                             } else {
                                 if muted {
-                                    cx.executor().spawn(publication.set_mute(muted)).detach();
+                                    cx.background_executor()
+                                        .spawn(publication.set_mute(muted))
+                                        .detach();
                                 }
                                 live_kit.microphone_track = LocalTrack::Published {
                                     track_publication: publication,
@@ -1343,7 +1346,7 @@ impl Room {
             return Task::ready(Err(anyhow!("live-kit was not initialized")));
         };
 
-        cx.spawn_on_main(move |this, mut cx| async move {
+        cx.spawn(move |this, mut cx| async move {
             let publish_track = async {
                 let displays = displays.await?;
                 let display = displays
@@ -1386,7 +1389,9 @@ impl Room {
                                 live_kit.room.unpublish_track(publication);
                             } else {
                                 if muted {
-                                    cx.executor().spawn(publication.set_mute(muted)).detach();
+                                    cx.background_executor()
+                                        .spawn(publication.set_mute(muted))
+                                        .detach();
                                 }
                                 live_kit.screen_track = LocalTrack::Published {
                                     track_publication: publication,
@@ -1453,14 +1458,11 @@ impl Room {
                     .remote_audio_track_publications(&participant.user.id.to_string())
                 {
                     let deafened = live_kit.deafened;
-                    tasks.push(
-                        cx.executor()
-                            .spawn_on_main(move || track.set_enabled(!deafened)),
-                    );
+                    tasks.push(cx.foreground_executor().spawn(track.set_enabled(!deafened)));
                 }
             }
 
-            Ok(cx.executor().spawn_on_main(|| async {
+            Ok(cx.foreground_executor().spawn(async move {
                 if let Some(mute_task) = mute_task {
                     mute_task.await?;
                 }
@@ -1551,7 +1553,8 @@ impl LiveKitRoom {
                 *muted = should_mute;
                 cx.notify();
                 Ok((
-                    cx.executor().spawn(track_publication.set_mute(*muted)),
+                    cx.background_executor()
+                        .spawn(track_publication.set_mute(*muted)),
                     old_muted,
                 ))
             }

crates/client2/src/client2.rs 🔗

@@ -11,7 +11,8 @@ use async_tungstenite::tungstenite::{
     http::{Request, StatusCode},
 };
 use futures::{
-    future::BoxFuture, AsyncReadExt, FutureExt, SinkExt, StreamExt, TryFutureExt as _, TryStreamExt,
+    future::LocalBoxFuture, AsyncReadExt, FutureExt, SinkExt, StreamExt, TryFutureExt as _,
+    TryStreamExt,
 };
 use gpui2::{
     serde_json, AnyModel, AnyWeakModel, AppContext, AsyncAppContext, Model, SemanticVersion, Task,
@@ -240,7 +241,7 @@ struct ClientState {
                     Box<dyn AnyTypedEnvelope>,
                     &Arc<Client>,
                     AsyncAppContext,
-                ) -> BoxFuture<'static, Result<()>>,
+                ) -> LocalBoxFuture<'static, Result<()>>,
         >,
     >,
 }
@@ -310,10 +311,7 @@ pub struct PendingEntitySubscription<T: 'static> {
     consumed: bool,
 }
 
-impl<T> PendingEntitySubscription<T>
-where
-    T: 'static + Send,
-{
+impl<T: 'static> PendingEntitySubscription<T> {
     pub fn set_model(mut self, model: &Model<T>, cx: &mut AsyncAppContext) -> Subscription {
         self.consumed = true;
         let mut state = self.client.state.write();
@@ -341,10 +339,7 @@ where
     }
 }
 
-impl<T> Drop for PendingEntitySubscription<T>
-where
-    T: 'static,
-{
+impl<T: 'static> Drop for PendingEntitySubscription<T> {
     fn drop(&mut self) {
         if !self.consumed {
             let mut state = self.client.state.write();
@@ -505,7 +500,7 @@ impl Client {
                                 },
                                 &cx,
                             );
-                            cx.executor().timer(delay).await;
+                            cx.background_executor().timer(delay).await;
                             delay = delay
                                 .mul_f32(rng.gen_range(1.0..=2.0))
                                 .min(reconnect_interval);
@@ -529,7 +524,7 @@ impl Client {
         remote_id: u64,
     ) -> Result<PendingEntitySubscription<T>>
     where
-        T: 'static + Send,
+        T: 'static,
     {
         let id = (TypeId::of::<T>(), remote_id);
 
@@ -557,9 +552,13 @@ impl Client {
     ) -> Subscription
     where
         M: EnvelopedMessage,
-        E: 'static + Send,
-        H: 'static + Send + Sync + Fn(Model<E>, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F,
-        F: 'static + Future<Output = Result<()>> + Send,
+        E: 'static,
+        H: 'static
+            + Sync
+            + Fn(Model<E>, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F
+            + Send
+            + Sync,
+        F: 'static + Future<Output = Result<()>>,
     {
         let message_type_id = TypeId::of::<M>();
 
@@ -573,7 +572,7 @@ impl Client {
             Arc::new(move |subscriber, envelope, client, cx| {
                 let subscriber = subscriber.downcast::<E>().unwrap();
                 let envelope = envelope.into_any().downcast::<TypedEnvelope<M>>().unwrap();
-                handler(subscriber, *envelope, client.clone(), cx).boxed()
+                handler(subscriber, *envelope, client.clone(), cx).boxed_local()
             }),
         );
         if prev_handler.is_some() {
@@ -599,9 +598,13 @@ impl Client {
     ) -> Subscription
     where
         M: RequestMessage,
-        E: 'static + Send,
-        H: 'static + Send + Sync + Fn(Model<E>, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F,
-        F: 'static + Future<Output = Result<M::Response>> + Send,
+        E: 'static,
+        H: 'static
+            + Sync
+            + Fn(Model<E>, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F
+            + Send
+            + Sync,
+        F: 'static + Future<Output = Result<M::Response>>,
     {
         self.add_message_handler(model, move |handle, envelope, this, cx| {
             Self::respond_to_request(
@@ -615,9 +618,9 @@ impl Client {
     pub fn add_model_message_handler<M, E, H, F>(self: &Arc<Self>, handler: H)
     where
         M: EntityMessage,
-        E: 'static + Send,
-        H: 'static + Send + Sync + Fn(Model<E>, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F,
-        F: 'static + Future<Output = Result<()>> + Send,
+        E: 'static,
+        H: 'static + Fn(Model<E>, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F + Send + Sync,
+        F: 'static + Future<Output = Result<()>>,
     {
         self.add_entity_message_handler::<M, E, _, _>(move |subscriber, message, client, cx| {
             handler(subscriber.downcast::<E>().unwrap(), message, client, cx)
@@ -627,9 +630,9 @@ impl Client {
     fn add_entity_message_handler<M, E, H, F>(self: &Arc<Self>, handler: H)
     where
         M: EntityMessage,
-        E: 'static + Send,
-        H: 'static + Send + Sync + Fn(AnyModel, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F,
-        F: 'static + Future<Output = Result<()>> + Send,
+        E: 'static,
+        H: 'static + Fn(AnyModel, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F + Send + Sync,
+        F: 'static + Future<Output = Result<()>>,
     {
         let model_type_id = TypeId::of::<E>();
         let message_type_id = TypeId::of::<M>();
@@ -655,7 +658,7 @@ impl Client {
             message_type_id,
             Arc::new(move |handle, envelope, client, cx| {
                 let envelope = envelope.into_any().downcast::<TypedEnvelope<M>>().unwrap();
-                handler(handle, *envelope, client.clone(), cx).boxed()
+                handler(handle, *envelope, client.clone(), cx).boxed_local()
             }),
         );
         if prev_handler.is_some() {
@@ -666,9 +669,9 @@ impl Client {
     pub fn add_model_request_handler<M, E, H, F>(self: &Arc<Self>, handler: H)
     where
         M: EntityMessage + RequestMessage,
-        E: 'static + Send,
-        H: 'static + Send + Sync + Fn(Model<E>, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F,
-        F: 'static + Future<Output = Result<M::Response>> + Send,
+        E: 'static,
+        H: 'static + Fn(Model<E>, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F + Send + Sync,
+        F: 'static + Future<Output = Result<M::Response>>,
     {
         self.add_model_message_handler(move |entity, envelope, client, cx| {
             Self::respond_to_request::<M, _>(
@@ -705,7 +708,7 @@ impl Client {
         read_credentials_from_keychain(cx).await.is_some()
     }
 
-    #[async_recursion]
+    #[async_recursion(?Send)]
     pub async fn authenticate_and_connect(
         self: &Arc<Self>,
         try_keychain: bool,
@@ -763,7 +766,8 @@ impl Client {
             self.set_status(Status::Reconnecting, cx);
         }
 
-        let mut timeout = futures::FutureExt::fuse(cx.executor().timer(CONNECTION_TIMEOUT));
+        let mut timeout =
+            futures::FutureExt::fuse(cx.background_executor().timer(CONNECTION_TIMEOUT));
         futures::select_biased! {
             connection = self.establish_connection(&credentials, cx).fuse() => {
                 match connection {
@@ -814,7 +818,7 @@ impl Client {
         conn: Connection,
         cx: &AsyncAppContext,
     ) -> Result<()> {
-        let executor = cx.executor();
+        let executor = cx.background_executor();
         log::info!("add connection to peer");
         let (connection_id, handle_io, mut incoming) = self.peer.add_connection(conn, {
             let executor = executor.clone();
@@ -978,7 +982,7 @@ impl Client {
             .header("x-zed-protocol-version", rpc2::PROTOCOL_VERSION);
 
         let http = self.http.clone();
-        cx.executor().spawn(async move {
+        cx.background_executor().spawn(async move {
             let mut rpc_url = Self::get_rpc_url(http, use_preview_server).await?;
             let rpc_host = rpc_url
                 .host_str()
@@ -1049,7 +1053,7 @@ impl Client {
                 write!(&mut url, "&impersonate={}", impersonate_login).unwrap();
             }
 
-            cx.run_on_main(move |cx| cx.open_url(&url))?.await;
+            cx.update(|cx| cx.open_url(&url))?;
 
             // Receive the HTTP request from the user's browser. Retrieve the user id and encrypted
             // access token from the query params.
@@ -1100,7 +1104,7 @@ impl Client {
             let access_token = private_key
                 .decrypt_string(&access_token)
                 .context("failed to decrypt access token")?;
-            cx.run_on_main(|cx| cx.activate(true))?.await;
+            cx.update(|cx| cx.activate(true))?;
 
             Ok(Credentials {
                 user_id: user_id.parse()?,
@@ -1292,7 +1296,7 @@ impl Client {
                 sender_id,
                 type_name
             );
-            cx.spawn_on_main(move |_| async move {
+            cx.spawn(move |_| async move {
                     match future.await {
                         Ok(()) => {
                             log::debug!(
@@ -1331,9 +1335,8 @@ async fn read_credentials_from_keychain(cx: &AsyncAppContext) -> Option<Credenti
     }
 
     let (user_id, access_token) = cx
-        .run_on_main(|cx| cx.read_credentials(&ZED_SERVER_URL).log_err().flatten())
-        .ok()?
-        .await?;
+        .update(|cx| cx.read_credentials(&ZED_SERVER_URL).log_err().flatten())
+        .ok()??;
 
     Some(Credentials {
         user_id: user_id.parse().ok()?,
@@ -1345,19 +1348,17 @@ async fn write_credentials_to_keychain(
     credentials: Credentials,
     cx: &AsyncAppContext,
 ) -> Result<()> {
-    cx.run_on_main(move |cx| {
+    cx.update(move |cx| {
         cx.write_credentials(
             &ZED_SERVER_URL,
             &credentials.user_id.to_string(),
             credentials.access_token.as_bytes(),
         )
     })?
-    .await
 }
 
 async fn delete_credentials_from_keychain(cx: &AsyncAppContext) -> Result<()> {
-    cx.run_on_main(move |cx| cx.delete_credentials(&ZED_SERVER_URL))?
-        .await
+    cx.update(move |cx| cx.delete_credentials(&ZED_SERVER_URL))?
 }
 
 const WORKTREE_URL_PREFIX: &str = "zed://worktrees/";
@@ -1382,7 +1383,7 @@ mod tests {
     use super::*;
     use crate::test::FakeServer;
 
-    use gpui2::{Context, Executor, TestAppContext};
+    use gpui2::{BackgroundExecutor, Context, TestAppContext};
     use parking_lot::Mutex;
     use std::future;
     use util::http::FakeHttpClient;
@@ -1422,14 +1423,14 @@ mod tests {
     }
 
     #[gpui2::test(iterations = 10)]
-    async fn test_connection_timeout(executor: Executor, cx: &mut TestAppContext) {
+    async fn test_connection_timeout(executor: BackgroundExecutor, cx: &mut TestAppContext) {
         let user_id = 5;
         let client = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
         let mut status = client.status();
 
         // Time out when client tries to connect.
         client.override_authenticate(move |cx| {
-            cx.executor().spawn(async move {
+            cx.background_executor().spawn(async move {
                 Ok(Credentials {
                     user_id,
                     access_token: "token".into(),
@@ -1437,7 +1438,7 @@ mod tests {
             })
         });
         client.override_establish_connection(|_, cx| {
-            cx.executor().spawn(async move {
+            cx.background_executor().spawn(async move {
                 future::pending::<()>().await;
                 unreachable!()
             })
@@ -1471,7 +1472,7 @@ mod tests {
         // Time out when re-establishing the connection.
         server.allow_connections();
         client.override_establish_connection(|_, cx| {
-            cx.executor().spawn(async move {
+            cx.background_executor().spawn(async move {
                 future::pending::<()>().await;
                 unreachable!()
             })
@@ -1490,7 +1491,10 @@ mod tests {
     }
 
     #[gpui2::test(iterations = 10)]
-    async fn test_authenticating_more_than_once(cx: &mut TestAppContext, executor: Executor) {
+    async fn test_authenticating_more_than_once(
+        cx: &mut TestAppContext,
+        executor: BackgroundExecutor,
+    ) {
         let auth_count = Arc::new(Mutex::new(0));
         let dropped_auth_count = Arc::new(Mutex::new(0));
         let client = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
@@ -1500,7 +1504,7 @@ mod tests {
             move |cx| {
                 let auth_count = auth_count.clone();
                 let dropped_auth_count = dropped_auth_count.clone();
-                cx.executor().spawn(async move {
+                cx.background_executor().spawn(async move {
                     *auth_count.lock() += 1;
                     let _drop = util::defer(move || *dropped_auth_count.lock() += 1);
                     future::pending::<()>().await;

crates/client2/src/telemetry.rs 🔗

@@ -1,5 +1,5 @@
 use crate::{TelemetrySettings, ZED_SECRET_CLIENT_TOKEN, ZED_SERVER_URL};
-use gpui2::{serde_json, AppContext, AppMetadata, Executor, Task};
+use gpui2::{serde_json, AppContext, AppMetadata, BackgroundExecutor, Task};
 use lazy_static::lazy_static;
 use parking_lot::Mutex;
 use serde::Serialize;
@@ -14,7 +14,7 @@ use util::{channel::ReleaseChannel, TryFutureExt};
 
 pub struct Telemetry {
     http_client: Arc<dyn HttpClient>,
-    executor: Executor,
+    executor: BackgroundExecutor,
     state: Mutex<TelemetryState>,
 }
 
@@ -123,7 +123,7 @@ impl Telemetry {
         // TODO: Replace all hardware stuff with nested SystemSpecs json
         let this = Arc::new(Self {
             http_client: client,
-            executor: cx.executor().clone(),
+            executor: cx.background_executor().clone(),
             state: Mutex::new(TelemetryState {
                 app_metadata: cx.app_metadata(),
                 architecture: env::consts::ARCH,

crates/client2/src/test.rs 🔗

@@ -1,7 +1,7 @@
 use crate::{Client, Connection, Credentials, EstablishConnectionError, UserStore};
 use anyhow::{anyhow, Result};
 use futures::{stream::BoxStream, StreamExt};
-use gpui2::{Context, Executor, Model, TestAppContext};
+use gpui2::{BackgroundExecutor, Context, Model, TestAppContext};
 use parking_lot::Mutex;
 use rpc2::{
     proto::{self, GetPrivateUserInfo, GetPrivateUserInfoResponse},
@@ -14,7 +14,7 @@ pub struct FakeServer {
     peer: Arc<Peer>,
     state: Arc<Mutex<FakeServerState>>,
     user_id: u64,
-    executor: Executor,
+    executor: BackgroundExecutor,
 }
 
 #[derive(Default)]
@@ -79,10 +79,10 @@ impl FakeServer {
                         }
 
                         let (client_conn, server_conn, _) =
-                            Connection::in_memory(cx.executor().clone());
+                            Connection::in_memory(cx.background_executor().clone());
                         let (connection_id, io, incoming) =
-                            peer.add_test_connection(server_conn, cx.executor().clone());
-                        cx.executor().spawn(io).detach();
+                            peer.add_test_connection(server_conn, cx.background_executor().clone());
+                        cx.background_executor().spawn(io).detach();
                         {
                             let mut state = state.lock();
                             state.connection_id = Some(connection_id);

crates/copilot2/src/copilot2.rs 🔗

@@ -208,7 +208,7 @@ impl RegisteredBuffer {
                 let new_snapshot = buffer.update(&mut cx, |buffer, _| buffer.snapshot()).ok()?;
 
                 let content_changes = cx
-                    .executor()
+                    .background_executor()
                     .spawn({
                         let new_snapshot = new_snapshot.clone();
                         async move {
@@ -535,7 +535,7 @@ impl Copilot {
                 }
             };
 
-            cx.executor()
+            cx.background_executor()
                 .spawn(task.map_err(|err| anyhow!("{:?}", err)))
         } else {
             // If we're downloading, wait until download is finished
@@ -549,7 +549,7 @@ impl Copilot {
         self.update_sign_in_status(request::SignInStatus::NotSignedIn, cx);
         if let CopilotServer::Running(RunningCopilotServer { lsp: server, .. }) = &self.server {
             let server = server.clone();
-            cx.executor().spawn(async move {
+            cx.background_executor().spawn(async move {
                 server
                     .request::<request::SignOut>(request::SignOutParams {})
                     .await?;
@@ -579,7 +579,7 @@ impl Copilot {
 
         cx.notify();
 
-        cx.executor().spawn(start_task)
+        cx.background_executor().spawn(start_task)
     }
 
     pub fn language_server(&self) -> Option<(&LanguageServerName, &Arc<LanguageServer>)> {
@@ -760,7 +760,7 @@ impl Copilot {
                 .request::<request::NotifyAccepted>(request::NotifyAcceptedParams {
                     uuid: completion.uuid.clone(),
                 });
-        cx.executor().spawn(async move {
+        cx.background_executor().spawn(async move {
             request.await?;
             Ok(())
         })
@@ -784,7 +784,7 @@ impl Copilot {
                         .map(|completion| completion.uuid.clone())
                         .collect(),
                 });
-        cx.executor().spawn(async move {
+        cx.background_executor().spawn(async move {
             request.await?;
             Ok(())
         })
@@ -827,7 +827,7 @@ impl Copilot {
             .map(|file| file.path().to_path_buf())
             .unwrap_or_default();
 
-        cx.executor().spawn(async move {
+        cx.background_executor().spawn(async move {
             let (version, snapshot) = snapshot.await?;
             let result = lsp
                 .request::<R>(request::GetCompletionsParams {

crates/db2/src/db2.rs 🔗

@@ -185,7 +185,7 @@ pub fn write_and_log<F>(cx: &mut AppContext, db_write: impl FnOnce() -> F + Send
 where
     F: Future<Output = anyhow::Result<()>> + Send,
 {
-    cx.executor()
+    cx.background_executor()
         .spawn(async move { db_write().await.log_err() })
         .detach()
 }

crates/fs2/src/fs2.rs 🔗

@@ -288,7 +288,7 @@ impl Fs for RealFs {
 pub struct FakeFs {
     // Use an unfair lock to ensure tests are deterministic.
     state: Mutex<FakeFsState>,
-    executor: gpui2::Executor,
+    executor: gpui2::BackgroundExecutor,
 }
 
 #[cfg(any(test, feature = "test-support"))]
@@ -434,7 +434,7 @@ lazy_static::lazy_static! {
 
 #[cfg(any(test, feature = "test-support"))]
 impl FakeFs {
-    pub fn new(executor: gpui2::Executor) -> Arc<Self> {
+    pub fn new(executor: gpui2::BackgroundExecutor) -> Arc<Self> {
         Arc::new(Self {
             executor,
             state: Mutex::new(FakeFsState {
@@ -1222,11 +1222,11 @@ pub fn copy_recursive<'a>(
 #[cfg(test)]
 mod tests {
     use super::*;
-    use gpui2::Executor;
+    use gpui2::BackgroundExecutor;
     use serde_json::json;
 
     #[gpui2::test]
-    async fn test_fake_fs(executor: Executor) {
+    async fn test_fake_fs(executor: BackgroundExecutor) {
         let fs = FakeFs::new(executor.clone());
         fs.insert_tree(
             "/root",

crates/fuzzy2/src/paths.rs 🔗

@@ -1,4 +1,4 @@
-use gpui2::Executor;
+use gpui2::BackgroundExecutor;
 use std::{
     borrow::Cow,
     cmp::{self, Ordering},
@@ -134,7 +134,7 @@ pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>(
     smart_case: bool,
     max_results: usize,
     cancel_flag: &AtomicBool,
-    executor: Executor,
+    executor: BackgroundExecutor,
 ) -> Vec<PathMatch> {
     let path_count: usize = candidate_sets.iter().map(|s| s.len()).sum();
     if path_count == 0 {

crates/fuzzy2/src/strings.rs 🔗

@@ -2,7 +2,7 @@ use crate::{
     matcher::{Match, MatchCandidate, Matcher},
     CharBag,
 };
-use gpui2::Executor;
+use gpui2::BackgroundExecutor;
 use std::{
     borrow::Cow,
     cmp::{self, Ordering},
@@ -83,7 +83,7 @@ pub async fn match_strings(
     smart_case: bool,
     max_results: usize,
     cancel_flag: &AtomicBool,
-    executor: Executor,
+    executor: BackgroundExecutor,
 ) -> Vec<StringMatch> {
     if candidates.is_empty() || max_results == 0 {
         return Default::default();

crates/gpui/src/executor.rs 🔗

@@ -84,7 +84,7 @@ struct DeterministicState {
 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
 pub enum ExecutorEvent {
     PollRunnable { id: usize },
-    EnqueuRunnable { id: usize },
+    EnqueueRunnable { id: usize },
 }
 
 #[cfg(any(test, feature = "test-support"))]
@@ -199,7 +199,7 @@ impl Deterministic {
         let unparker = self.parker.lock().unparker();
         let (runnable, task) = async_task::spawn_local(future, move |runnable| {
             let mut state = state.lock();
-            state.push_to_history(ExecutorEvent::EnqueuRunnable { id });
+            state.push_to_history(ExecutorEvent::EnqueueRunnable { id });
             state
                 .scheduled_from_foreground
                 .entry(cx_id)
@@ -229,7 +229,7 @@ impl Deterministic {
             let mut state = state.lock();
             state
                 .poll_history
-                .push(ExecutorEvent::EnqueuRunnable { id });
+                .push(ExecutorEvent::EnqueueRunnable { id });
             state
                 .scheduled_from_background
                 .push(BackgroundRunnable { id, runnable });
@@ -616,7 +616,7 @@ impl ExecutorEvent {
     pub fn id(&self) -> usize {
         match self {
             ExecutorEvent::PollRunnable { id } => *id,
-            ExecutorEvent::EnqueuRunnable { id } => *id,
+            ExecutorEvent::EnqueueRunnable { id } => *id,
         }
     }
 }

crates/gpui2/src/app.rs 🔗

@@ -11,35 +11,34 @@ use refineable::Refineable;
 use smallvec::SmallVec;
 #[cfg(any(test, feature = "test-support"))]
 pub use test_context::*;
-use uuid::Uuid;
 
 use crate::{
     current_platform, image_cache::ImageCache, Action, AnyBox, AnyView, AnyWindowHandle,
-    AppMetadata, AssetSource, ClipboardItem, Context, DispatchPhase, DisplayId, Entity, Executor,
-    FocusEvent, FocusHandle, FocusId, KeyBinding, Keymap, LayoutId, MainThread, MainThreadOnly,
+    AppMetadata, AssetSource, BackgroundExecutor, ClipboardItem, Context, DispatchPhase, DisplayId,
+    Entity, FocusEvent, FocusHandle, FocusId, ForegroundExecutor, KeyBinding, Keymap, LayoutId,
     Pixels, Platform, PlatformDisplay, Point, Render, SharedString, SubscriberSet, Subscription,
     SvgRenderer, Task, TextStyle, TextStyleRefinement, TextSystem, View, Window, WindowContext,
     WindowHandle, WindowId,
 };
 use anyhow::{anyhow, Result};
 use collections::{HashMap, HashSet, VecDeque};
-use futures::{future::BoxFuture, Future};
+use futures::{future::LocalBoxFuture, Future};
 use parking_lot::Mutex;
 use slotmap::SlotMap;
 use std::{
     any::{type_name, Any, TypeId},
-    borrow::Borrow,
+    cell::RefCell,
     marker::PhantomData,
     mem,
     ops::{Deref, DerefMut},
     path::PathBuf,
-    rc::Rc,
-    sync::{atomic::Ordering::SeqCst, Arc, Weak},
+    rc::{Rc, Weak},
+    sync::{atomic::Ordering::SeqCst, Arc},
     time::Duration,
 };
 use util::http::{self, HttpClient};
 
-pub struct App(Arc<Mutex<AppContext>>);
+pub struct App(Rc<RefCell<AppContext>>);
 
 /// Represents an application before it is fully launched. Once your app is
 /// configured, you'll start the app with `App::run`.
@@ -57,13 +56,12 @@ impl App {
     /// app is fully launched.
     pub fn run<F>(self, on_finish_launching: F)
     where
-        F: 'static + FnOnce(&mut MainThread<AppContext>),
+        F: 'static + FnOnce(&mut AppContext),
     {
         let this = self.0.clone();
-        let platform = self.0.lock().platform.clone();
-        platform.borrow_on_main_thread().run(Box::new(move || {
-            let cx = &mut *this.lock();
-            let cx = unsafe { mem::transmute::<&mut AppContext, &mut MainThread<AppContext>>(cx) };
+        let platform = self.0.borrow().platform.clone();
+        platform.run(Box::new(move || {
+            let cx = &mut *this.borrow_mut();
             on_finish_launching(cx);
         }));
     }
@@ -74,16 +72,12 @@ impl App {
     where
         F: 'static + FnMut(Vec<String>, &mut AppContext),
     {
-        let this = Arc::downgrade(&self.0);
-        self.0
-            .lock()
-            .platform
-            .borrow_on_main_thread()
-            .on_open_urls(Box::new(move |urls| {
-                if let Some(app) = this.upgrade() {
-                    callback(urls, &mut app.lock());
-                }
-            }));
+        let this = Rc::downgrade(&self.0);
+        self.0.borrow().platform.on_open_urls(Box::new(move |urls| {
+            if let Some(app) = this.upgrade() {
+                callback(urls, &mut *app.borrow_mut());
+            }
+        }));
         self
     }
 
@@ -91,49 +85,50 @@ impl App {
     where
         F: 'static + FnMut(&mut AppContext),
     {
-        let this = Arc::downgrade(&self.0);
-        self.0
-            .lock()
-            .platform
-            .borrow_on_main_thread()
-            .on_reopen(Box::new(move || {
-                if let Some(app) = this.upgrade() {
-                    callback(&mut app.lock());
-                }
-            }));
+        let this = Rc::downgrade(&self.0);
+        self.0.borrow_mut().platform.on_reopen(Box::new(move || {
+            if let Some(app) = this.upgrade() {
+                callback(&mut app.borrow_mut());
+            }
+        }));
         self
     }
 
     pub fn metadata(&self) -> AppMetadata {
-        self.0.lock().app_metadata.clone()
+        self.0.borrow().app_metadata.clone()
     }
 
-    pub fn executor(&self) -> Executor {
-        self.0.lock().executor.clone()
+    pub fn background_executor(&self) -> BackgroundExecutor {
+        self.0.borrow().background_executor.clone()
+    }
+
+    pub fn foreground_executor(&self) -> ForegroundExecutor {
+        self.0.borrow().foreground_executor.clone()
     }
 
     pub fn text_system(&self) -> Arc<TextSystem> {
-        self.0.lock().text_system.clone()
+        self.0.borrow().text_system.clone()
     }
 }
 
 type ActionBuilder = fn(json: Option<serde_json::Value>) -> anyhow::Result<Box<dyn Action>>;
-type FrameCallback = Box<dyn FnOnce(&mut WindowContext) + Send>;
-type Handler = Box<dyn FnMut(&mut AppContext) -> bool + Send + 'static>;
-type Listener = Box<dyn FnMut(&dyn Any, &mut AppContext) -> bool + Send + 'static>;
-type QuitHandler = Box<dyn FnMut(&mut AppContext) -> BoxFuture<'static, ()> + Send + 'static>;
-type ReleaseListener = Box<dyn FnOnce(&mut dyn Any, &mut AppContext) + Send + 'static>;
+type FrameCallback = Box<dyn FnOnce(&mut WindowContext)>;
+type Handler = Box<dyn FnMut(&mut AppContext) -> bool + 'static>;
+type Listener = Box<dyn FnMut(&dyn Any, &mut AppContext) -> bool + 'static>;
+type QuitHandler = Box<dyn FnMut(&mut AppContext) -> LocalBoxFuture<'static, ()> + 'static>;
+type ReleaseListener = Box<dyn FnMut(&mut dyn Any, &mut AppContext) + 'static>;
 
 pub struct AppContext {
-    this: Weak<Mutex<AppContext>>,
-    pub(crate) platform: MainThreadOnly<dyn Platform>,
+    this: Weak<RefCell<AppContext>>,
+    pub(crate) platform: Rc<dyn Platform>,
     app_metadata: AppMetadata,
     text_system: Arc<TextSystem>,
     flushing_effects: bool,
     pending_updates: usize,
     pub(crate) active_drag: Option<AnyDrag>,
     pub(crate) next_frame_callbacks: HashMap<DisplayId, Vec<FrameCallback>>,
-    pub(crate) executor: Executor,
+    pub(crate) background_executor: BackgroundExecutor,
+    pub(crate) foreground_executor: ForegroundExecutor,
     pub(crate) svg_renderer: SvgRenderer,
     asset_source: Arc<dyn AssetSource>,
     pub(crate) image_cache: ImageCache,
@@ -143,7 +138,7 @@ pub struct AppContext {
     pub(crate) windows: SlotMap<WindowId, Option<Window>>,
     pub(crate) keymap: Arc<Mutex<Keymap>>,
     pub(crate) global_action_listeners:
-        HashMap<TypeId, Vec<Box<dyn Fn(&dyn Action, DispatchPhase, &mut Self) + Send>>>,
+        HashMap<TypeId, Vec<Box<dyn Fn(&dyn Action, DispatchPhase, &mut Self)>>>,
     action_builders: HashMap<SharedString, ActionBuilder>,
     pending_effects: VecDeque<Effect>,
     pub(crate) pending_notifications: HashSet<EntityId>,
@@ -159,11 +154,12 @@ pub struct AppContext {
 
 impl AppContext {
     pub(crate) fn new(
-        platform: Arc<dyn Platform>,
+        platform: Rc<dyn Platform>,
         asset_source: Arc<dyn AssetSource>,
         http_client: Arc<dyn HttpClient>,
-    ) -> Arc<Mutex<Self>> {
-        let executor = platform.executor();
+    ) -> Rc<RefCell<Self>> {
+        let executor = platform.background_executor();
+        let foreground_executor = platform.foreground_executor();
         assert!(
             executor.is_main_thread(),
             "must construct App on main thread"
@@ -178,16 +174,17 @@ impl AppContext {
             app_version: platform.app_version().ok(),
         };
 
-        Arc::new_cyclic(|this| {
-            Mutex::new(AppContext {
+        Rc::new_cyclic(|this| {
+            RefCell::new(AppContext {
                 this: this.clone(),
                 text_system,
-                platform: MainThreadOnly::new(platform, executor.clone()),
+                platform,
                 app_metadata,
                 flushing_effects: false,
                 pending_updates: 0,
                 next_frame_callbacks: Default::default(),
-                executor,
+                background_executor: executor,
+                foreground_executor,
                 svg_renderer: SvgRenderer::new(asset_source.clone()),
                 asset_source,
                 image_cache: ImageCache::new(http_client),
@@ -228,7 +225,7 @@ impl AppContext {
 
         let futures = futures::future::join_all(futures);
         if self
-            .executor
+            .background_executor
             .block_with_timeout(Duration::from_millis(100), futures)
             .is_err()
         {
@@ -247,7 +244,6 @@ impl AppContext {
     pub fn refresh(&mut self) {
         self.pending_effects.push_back(Effect::Refresh);
     }
-
     pub(crate) fn update<R>(&mut self, update: impl FnOnce(&mut Self) -> R) -> R {
         self.pending_updates += 1;
         let result = update(self);
@@ -267,6 +263,92 @@ impl AppContext {
             .collect()
     }
 
+    pub(crate) fn update_window<R>(
+        &mut self,
+        id: WindowId,
+        update: impl FnOnce(&mut WindowContext) -> R,
+    ) -> Result<R> {
+        self.update(|cx| {
+            let mut window = cx
+                .windows
+                .get_mut(id)
+                .ok_or_else(|| anyhow!("window not found"))?
+                .take()
+                .unwrap();
+
+            let result = update(&mut WindowContext::new(cx, &mut window));
+
+            cx.windows
+                .get_mut(id)
+                .ok_or_else(|| anyhow!("window not found"))?
+                .replace(window);
+
+            Ok(result)
+        })
+    }
+
+    /// Opens a new window with the given option and the root view returned by the given function.
+    /// The function is invoked with a `WindowContext`, which can be used to interact with window-specific
+    /// functionality.
+    pub fn open_window<V: Render>(
+        &mut self,
+        options: crate::WindowOptions,
+        build_root_view: impl FnOnce(&mut WindowContext) -> View<V> + 'static,
+    ) -> WindowHandle<V> {
+        self.update(|cx| {
+            let id = cx.windows.insert(None);
+            let handle = WindowHandle::new(id);
+            let mut window = Window::new(handle.into(), options, cx);
+            let root_view = build_root_view(&mut WindowContext::new(cx, &mut window));
+            window.root_view.replace(root_view.into());
+            cx.windows.get_mut(id).unwrap().replace(window);
+            handle
+        })
+    }
+
+    pub(crate) fn platform(&self) -> &Rc<dyn Platform> {
+        &self.platform
+    }
+
+    /// Instructs the platform to activate the application by bringing it to the foreground.
+    pub fn activate(&self, ignoring_other_apps: bool) {
+        self.platform().activate(ignoring_other_apps);
+    }
+
+    /// Writes data to the platform clipboard.
+    pub fn write_to_clipboard(&self, item: ClipboardItem) {
+        self.platform().write_to_clipboard(item)
+    }
+
+    /// Reads data from the platform clipboard.
+    pub fn read_from_clipboard(&self) -> Option<ClipboardItem> {
+        self.platform().read_from_clipboard()
+    }
+
+    /// Writes credentials to the platform keychain.
+    pub fn write_credentials(&self, url: &str, username: &str, password: &[u8]) -> Result<()> {
+        self.platform().write_credentials(url, username, password)
+    }
+
+    /// Reads credentials from the platform keychain.
+    pub fn read_credentials(&self, url: &str) -> Result<Option<(String, Vec<u8>)>> {
+        self.platform().read_credentials(url)
+    }
+
+    /// Deletes credentials from the platform keychain.
+    pub fn delete_credentials(&self, url: &str) -> Result<()> {
+        self.platform().delete_credentials(url)
+    }
+
+    /// Directs the platform's default browser to open the given URL.
+    pub fn open_url(&self, url: &str) {
+        self.platform().open_url(url);
+    }
+
+    pub fn path_for_auxiliary_executable(&self, name: &str) -> Result<PathBuf> {
+        self.platform().path_for_auxiliary_executable(name)
+    }
+
     pub(crate) fn push_effect(&mut self, effect: Effect) {
         match &effect {
             Effect::Notify { emitter } => {
@@ -350,8 +432,8 @@ impl AppContext {
             for (entity_id, mut entity) in dropped {
                 self.observers.remove(&entity_id);
                 self.event_listeners.remove(&entity_id);
-                for release_callback in self.release_listeners.remove(&entity_id) {
-                    release_callback(&mut entity, self);
+                for mut release_callback in self.release_listeners.remove(&entity_id) {
+                    release_callback(entity.as_mut(), self);
                 }
             }
         }
@@ -444,7 +526,7 @@ impl AppContext {
             .retain(&type_id, |observer| observer(self));
     }
 
-    fn apply_defer_effect(&mut self, callback: Box<dyn FnOnce(&mut Self) + Send + 'static>) {
+    fn apply_defer_effect(&mut self, callback: Box<dyn FnOnce(&mut Self) + 'static>) {
         callback(self);
     }
 
@@ -453,68 +535,34 @@ impl AppContext {
     pub fn to_async(&self) -> AsyncAppContext {
         AsyncAppContext {
             app: unsafe { mem::transmute(self.this.clone()) },
-            executor: self.executor.clone(),
+            background_executor: self.background_executor.clone(),
+            foreground_executor: self.foreground_executor.clone(),
         }
     }
 
     /// Obtains a reference to the executor, which can be used to spawn futures.
-    pub fn executor(&self) -> &Executor {
-        &self.executor
-    }
-
-    /// Runs the given closure on the main thread, where interaction with the platform
-    /// is possible. The given closure will be invoked with a `MainThread<AppContext>`, which
-    /// has platform-specific methods that aren't present on `AppContext`.
-    pub fn run_on_main<R>(
-        &mut self,
-        f: impl FnOnce(&mut MainThread<AppContext>) -> R + Send + 'static,
-    ) -> Task<R>
-    where
-        R: Send + 'static,
-    {
-        if self.executor.is_main_thread() {
-            Task::ready(f(unsafe {
-                mem::transmute::<&mut AppContext, &mut MainThread<AppContext>>(self)
-            }))
-        } else {
-            let this = self.this.upgrade().unwrap();
-            self.executor.run_on_main(move || {
-                let cx = &mut *this.lock();
-                cx.update(|cx| f(unsafe { mem::transmute::<&mut Self, &mut MainThread<Self>>(cx) }))
-            })
-        }
+    pub fn background_executor(&self) -> &BackgroundExecutor {
+        &self.background_executor
     }
 
-    /// Spawns the future returned by the given function on the main thread, where interaction with
-    /// the platform is possible. The given closure will be invoked with a `MainThread<AsyncAppContext>`,
-    /// which has platform-specific methods that aren't present on `AsyncAppContext`. The future will be
-    /// polled exclusively on the main thread.
-    // todo!("I think we need somehow to prevent the MainThread<AsyncAppContext> from implementing Send")
-    pub fn spawn_on_main<F, R>(
-        &self,
-        f: impl FnOnce(MainThread<AsyncAppContext>) -> F + Send + 'static,
-    ) -> Task<R>
-    where
-        F: Future<Output = R> + 'static,
-        R: Send + 'static,
-    {
-        let cx = self.to_async();
-        self.executor.spawn_on_main(move || f(MainThread(cx)))
+    /// Obtains a reference to the executor, which can be used to spawn futures.
+    pub fn foreground_executor(&self) -> &ForegroundExecutor {
+        &self.foreground_executor
     }
 
     /// Spawns the future returned by the given function on the thread pool. The closure will be invoked
     /// with AsyncAppContext, which allows the application state to be accessed across await points.
     pub fn spawn<Fut, R>(&self, f: impl FnOnce(AsyncAppContext) -> Fut) -> Task<R>
     where
-        Fut: Future<Output = R> + Send + 'static,
-        R: Send + 'static,
+        Fut: Future<Output = R> + 'static,
+        R: 'static,
     {
-        self.executor.spawn(f(self.to_async()))
+        self.foreground_executor.spawn(f(self.to_async()))
     }
 
     /// Schedules the given function to be run at the end of the current effect cycle, allowing entities
     /// that are currently on the stack to be returned to the app.
-    pub fn defer(&mut self, f: impl FnOnce(&mut AppContext) + 'static + Send) {
+    pub fn defer(&mut self, f: impl FnOnce(&mut AppContext) + 'static) {
         self.push_effect(Effect::Defer {
             callback: Box::new(f),
         });
@@ -573,7 +621,7 @@ impl AppContext {
 
     /// Access the global of the given type mutably. A default value is assigned if a global of this type has not
     /// yet been assigned.
-    pub fn default_global<G: 'static + Default + Send>(&mut self) -> &mut G {
+    pub fn default_global<G: 'static + Default>(&mut self) -> &mut G {
         let global_type = TypeId::of::<G>();
         self.push_effect(Effect::NotifyGlobalObservers { global_type });
         self.globals_by_type
@@ -584,7 +632,7 @@ impl AppContext {
     }
 
     /// Set the value of the global of the given type.
-    pub fn set_global<G: Any + Send>(&mut self, global: G) {
+    pub fn set_global<G: Any>(&mut self, global: G) {
         let global_type = TypeId::of::<G>();
         self.push_effect(Effect::NotifyGlobalObservers { global_type });
         self.globals_by_type.insert(global_type, Box::new(global));
@@ -602,7 +650,7 @@ impl AppContext {
     /// Register a callback to be invoked when a global of the given type is updated.
     pub fn observe_global<G: 'static>(
         &mut self,
-        mut f: impl FnMut(&mut Self) + Send + 'static,
+        mut f: impl FnMut(&mut Self) + 'static,
     ) -> Subscription {
         self.global_observers.insert(
             TypeId::of::<G>(),
@@ -667,7 +715,7 @@ impl AppContext {
     }
 
     /// Register a global listener for actions invoked via the keyboard.
-    pub fn on_action<A: Action>(&mut self, listener: impl Fn(&A, &mut Self) + Send + 'static) {
+    pub fn on_action<A: Action>(&mut self, listener: impl Fn(&A, &mut Self) + 'static) {
         self.global_action_listeners
             .entry(TypeId::of::<A>())
             .or_default()
@@ -712,7 +760,7 @@ impl Context for AppContext {
     /// Build an entity that is owned by the application. The given function will be invoked with
     /// a `ModelContext` and must return an object representing the entity. A `Model` will be returned
     /// which can be used to access the entity in a context.
-    fn build_model<T: 'static + Send>(
+    fn build_model<T: 'static>(
         &mut self,
         build_model: impl FnOnce(&mut Self::ModelContext<'_, T>) -> T,
     ) -> Model<T> {
@@ -762,107 +810,6 @@ impl Context for AppContext {
     }
 }
 
-impl<C> MainThread<C>
-where
-    C: Borrow<AppContext>,
-{
-    pub(crate) fn platform(&self) -> &dyn Platform {
-        self.0.borrow().platform.borrow_on_main_thread()
-    }
-
-    /// Instructs the platform to activate the application by bringing it to the foreground.
-    pub fn activate(&self, ignoring_other_apps: bool) {
-        self.platform().activate(ignoring_other_apps);
-    }
-
-    /// Writes data to the platform clipboard.
-    pub fn write_to_clipboard(&self, item: ClipboardItem) {
-        self.platform().write_to_clipboard(item)
-    }
-
-    /// Reads data from the platform clipboard.
-    pub fn read_from_clipboard(&self) -> Option<ClipboardItem> {
-        self.platform().read_from_clipboard()
-    }
-
-    /// Writes credentials to the platform keychain.
-    pub fn write_credentials(&self, url: &str, username: &str, password: &[u8]) -> Result<()> {
-        self.platform().write_credentials(url, username, password)
-    }
-
-    /// Reads credentials from the platform keychain.
-    pub fn read_credentials(&self, url: &str) -> Result<Option<(String, Vec<u8>)>> {
-        self.platform().read_credentials(url)
-    }
-
-    /// Deletes credentials from the platform keychain.
-    pub fn delete_credentials(&self, url: &str) -> Result<()> {
-        self.platform().delete_credentials(url)
-    }
-
-    /// Directs the platform's default browser to open the given URL.
-    pub fn open_url(&self, url: &str) {
-        self.platform().open_url(url);
-    }
-
-    pub fn path_for_auxiliary_executable(&self, name: &str) -> Result<PathBuf> {
-        self.platform().path_for_auxiliary_executable(name)
-    }
-
-    pub fn displays(&self) -> Vec<Rc<dyn PlatformDisplay>> {
-        self.platform().displays()
-    }
-
-    pub fn display_for_uuid(&self, uuid: Uuid) -> Option<Rc<dyn PlatformDisplay>> {
-        self.platform()
-            .displays()
-            .into_iter()
-            .find(|display| display.uuid().ok() == Some(uuid))
-    }
-}
-
-impl MainThread<AppContext> {
-    fn update<R>(&mut self, update: impl FnOnce(&mut Self) -> R) -> R {
-        self.0.update(|cx| {
-            update(unsafe {
-                std::mem::transmute::<&mut AppContext, &mut MainThread<AppContext>>(cx)
-            })
-        })
-    }
-
-    /// Opens a new window with the given option and the root view returned by the given function.
-    /// The function is invoked with a `WindowContext`, which can be used to interact with window-specific
-    /// functionality.
-    pub fn open_window<V: Render>(
-        &mut self,
-        options: crate::WindowOptions,
-        build_root_view: impl FnOnce(&mut MainThread<WindowContext>) -> View<V> + Send + 'static,
-    ) -> WindowHandle<V> {
-        self.update(|cx| {
-            let id = cx.windows.insert(None);
-            let handle = WindowHandle::new(id);
-            let mut window = Window::new(handle.into(), options, cx);
-            let mut window_context = MainThread(WindowContext::new(cx, &mut window));
-            let root_view = build_root_view(&mut window_context);
-            window.root_view.replace(root_view.into());
-            cx.windows.get_mut(id).unwrap().replace(window);
-            handle
-        })
-    }
-
-    /// Update the global of the given type with a closure. Unlike `global_mut`, this method provides
-    /// your closure with mutable access to the `MainThread<AppContext>` and the global simultaneously.
-    pub fn update_global<G: 'static + Send, R>(
-        &mut self,
-        update: impl FnOnce(&mut G, &mut MainThread<AppContext>) -> R,
-    ) -> R {
-        self.0.update_global(|global, cx| {
-            let cx = unsafe { mem::transmute::<&mut AppContext, &mut MainThread<AppContext>>(cx) };
-            update(global, cx)
-        })
-    }
-}
-
 /// These effects are processed at the end of each application update cycle.
 pub(crate) enum Effect {
     Notify {
@@ -870,7 +817,7 @@ pub(crate) enum Effect {
     },
     Emit {
         emitter: EntityId,
-        event: Box<dyn Any + Send + 'static>,
+        event: Box<dyn Any>,
     },
     FocusChanged {
         window_handle: AnyWindowHandle,
@@ -881,7 +828,7 @@ pub(crate) enum Effect {
         global_type: TypeId,
     },
     Defer {
-        callback: Box<dyn FnOnce(&mut AppContext) + Send + 'static>,
+        callback: Box<dyn FnOnce(&mut AppContext) + 'static>,
     },
 }
 
@@ -920,15 +867,3 @@ pub(crate) struct AnyDrag {
     pub view: AnyView,
     pub cursor_offset: Point<Pixels>,
 }
-
-#[cfg(test)]
-mod tests {
-    use super::AppContext;
-
-    #[test]
-    fn test_app_context_send_sync() {
-        // This will not compile if `AppContext` does not implement `Send`
-        fn assert_send<T: Send>() {}
-        assert_send::<AppContext>();
-    }
-}

crates/gpui2/src/app/async_context.rs 🔗

@@ -1,16 +1,17 @@
 use crate::{
-    AnyView, AnyWindowHandle, AppContext, Context, Executor, MainThread, Model, ModelContext,
-    Render, Result, Task, View, ViewContext, VisualContext, WindowContext, WindowHandle,
+    AnyView, AnyWindowHandle, AppContext, BackgroundExecutor, Context, ForegroundExecutor, Model,
+    ModelContext, Render, Result, Task, View, ViewContext, VisualContext, WindowContext,
+    WindowHandle,
 };
-use anyhow::Context as _;
+use anyhow::{anyhow, Context as _};
 use derive_more::{Deref, DerefMut};
-use parking_lot::Mutex;
-use std::{future::Future, mem, sync::Weak};
+use std::{cell::RefCell, future::Future, mem, rc::Weak};
 
 #[derive(Clone)]
 pub struct AsyncAppContext {
-    pub(crate) app: Weak<Mutex<AppContext>>,
-    pub(crate) executor: Executor,
+    pub(crate) app: Weak<RefCell<AppContext>>,
+    pub(crate) background_executor: BackgroundExecutor,
+    pub(crate) foreground_executor: ForegroundExecutor,
 }
 
 impl Context for AsyncAppContext {
@@ -23,11 +24,14 @@ impl Context for AsyncAppContext {
         build_model: impl FnOnce(&mut Self::ModelContext<'_, T>) -> T,
     ) -> Self::Result<Model<T>>
     where
-        T: 'static + Send,
+        T: 'static,
     {
-        let app = self.app.upgrade().context("app was released")?;
-        let mut lock = app.lock(); // Need this to compile
-        Ok(lock.build_model(build_model))
+        let app = self
+            .app
+            .upgrade()
+            .ok_or_else(|| anyhow!("app was released"))?;
+        let mut app = app.borrow_mut();
+        Ok(app.build_model(build_model))
     }
 
     fn update_model<T: 'static, R>(
@@ -35,9 +39,12 @@ impl Context for AsyncAppContext {
         handle: &Model<T>,
         update: impl FnOnce(&mut T, &mut Self::ModelContext<'_, T>) -> R,
     ) -> Self::Result<R> {
-        let app = self.app.upgrade().context("app was released")?;
-        let mut lock = app.lock(); // Need this to compile
-        Ok(lock.update_model(handle, update))
+        let app = self
+            .app
+            .upgrade()
+            .ok_or_else(|| anyhow!("app was released"))?;
+        let mut app = app.borrow_mut();
+        Ok(app.update_model(handle, update))
     }
 
     fn update_window<T, F>(&mut self, window: AnyWindowHandle, f: F) -> Result<T>
@@ -45,72 +52,76 @@ impl Context for AsyncAppContext {
         F: FnOnce(AnyView, &mut Self::WindowContext<'_>) -> T,
     {
         let app = self.app.upgrade().context("app was released")?;
-        let mut lock = app.lock(); // Need this to compile
+        let mut lock = app.borrow_mut();
         lock.update_window(window, f)
     }
 }
 
 impl AsyncAppContext {
     pub fn refresh(&mut self) -> Result<()> {
-        let app = self.app.upgrade().context("app was released")?;
-        let mut lock = app.lock(); // Need this to compile
+        let app = self
+            .app
+            .upgrade()
+            .ok_or_else(|| anyhow!("app was released"))?;
+        let mut lock = app.borrow_mut();
         lock.refresh();
         Ok(())
     }
 
-    pub fn executor(&self) -> &Executor {
-        &self.executor
+    pub fn background_executor(&self) -> &BackgroundExecutor {
+        &self.background_executor
     }
 
-    pub fn update<R>(&self, f: impl FnOnce(&mut AppContext) -> R) -> Result<R> {
-        let app = self.app.upgrade().context("app was released")?;
-        let mut lock = app.lock();
-        Ok(f(&mut *lock))
+    pub fn foreground_executor(&self) -> &ForegroundExecutor {
+        &self.foreground_executor
     }
 
-    pub fn spawn<Fut, R>(&self, f: impl FnOnce(AsyncAppContext) -> Fut + Send + 'static) -> Task<R>
-    where
-        Fut: Future<Output = R> + Send + 'static,
-        R: Send + 'static,
-    {
-        let this = self.clone();
-        self.executor.spawn(async move { f(this).await })
+    pub fn update<R>(&self, f: impl FnOnce(&mut AppContext) -> R) -> Result<R> {
+        let app = self
+            .app
+            .upgrade()
+            .ok_or_else(|| anyhow!("app was released"))?;
+        let mut lock = app.borrow_mut();
+        Ok(f(&mut *lock))
     }
 
-    pub fn spawn_on_main<Fut, R>(
+    pub fn update_window<R>(
         &self,
-        f: impl FnOnce(MainThread<AsyncAppContext>) -> Fut + Send + 'static,
-    ) -> Task<R>
-    where
-        Fut: Future<Output = R> + 'static,
-        R: Send + 'static,
-    {
-        let this = self.clone();
-        self.executor.spawn_on_main(|| f(MainThread(this)))
+        handle: AnyWindowHandle,
+        update: impl FnOnce(&mut WindowContext) -> R,
+    ) -> Result<R> {
+        let app = self
+            .app
+            .upgrade()
+            .ok_or_else(|| anyhow!("app was released"))?;
+        let mut app_context = app.borrow_mut();
+        app_context.update_window(handle.id, update)
     }
 
-    pub fn run_on_main<R>(
-        &self,
-        f: impl FnOnce(&mut MainThread<AppContext>) -> R + Send + 'static,
-    ) -> Result<Task<R>>
+    pub fn spawn<Fut, R>(&self, f: impl FnOnce(AsyncAppContext) -> Fut) -> Task<R>
     where
-        R: Send + 'static,
+        Fut: Future<Output = R> + 'static,
+        R: 'static,
     {
-        let app = self.app.upgrade().context("app was released")?;
-        let mut app_context = app.lock();
-        Ok(app_context.run_on_main(f))
+        self.foreground_executor.spawn(f(self.clone()))
     }
 
     pub fn has_global<G: 'static>(&self) -> Result<bool> {
-        let app = self.app.upgrade().context("app was released")?;
-        let lock = app.lock(); // Need this to compile
-        Ok(lock.has_global::<G>())
+        let app = self
+            .app
+            .upgrade()
+            .ok_or_else(|| anyhow!("app was released"))?;
+        let app = app.borrow_mut();
+        Ok(app.has_global::<G>())
     }
 
     pub fn read_global<G: 'static, R>(&self, read: impl FnOnce(&G, &AppContext) -> R) -> Result<R> {
-        let app = self.app.upgrade().context("app was released")?;
-        let lock = app.lock(); // Need this to compile
-        Ok(read(lock.global(), &lock))
+        let app = self
+            .app
+            .upgrade()
+            .ok_or_else(|| anyhow!("app was released"))?;
+        let app = app.borrow_mut(); // Need this to compile
+        Ok(read(app.global(), &app))
     }
 
     pub fn try_read_global<G: 'static, R>(
@@ -118,40 +129,20 @@ impl AsyncAppContext {
         read: impl FnOnce(&G, &AppContext) -> R,
     ) -> Option<R> {
         let app = self.app.upgrade()?;
-        let lock = app.lock(); // Need this to compile
-        Some(read(lock.try_global()?, &lock))
+        let app = app.borrow_mut();
+        Some(read(app.try_global()?, &app))
     }
 
     pub fn update_global<G: 'static, R>(
         &mut self,
         update: impl FnOnce(&mut G, &mut AppContext) -> R,
     ) -> Result<R> {
-        let app = self.app.upgrade().context("app was released")?;
-        let mut lock = app.lock(); // Need this to compile
-        Ok(lock.update_global(update))
-    }
-}
-
-impl MainThread<AsyncAppContext> {
-    pub fn update<R>(&self, f: impl FnOnce(&mut MainThread<AppContext>) -> R) -> Result<R> {
-        let app = self.app.upgrade().context("app was released")?;
-        let cx = &mut *app.lock();
-        let cx = unsafe { mem::transmute::<&mut AppContext, &mut MainThread<AppContext>>(cx) };
-        Ok(f(cx))
-    }
-
-    /// Opens a new window with the given option and the root view returned by the given function.
-    /// The function is invoked with a `WindowContext`, which can be used to interact with window-specific
-    /// functionality.
-    pub fn open_window<V: Render>(
-        &mut self,
-        options: crate::WindowOptions,
-        build_root_view: impl FnOnce(&mut MainThread<WindowContext>) -> View<V> + Send + 'static,
-    ) -> Result<WindowHandle<V>> {
-        let app = self.app.upgrade().context("app was released")?;
-        let cx = &mut *app.lock();
-        let cx = unsafe { mem::transmute::<&mut AppContext, &mut MainThread<AppContext>>(cx) };
-        Ok(cx.open_window(options, build_root_view))
+        let app = self
+            .app
+            .upgrade()
+            .ok_or_else(|| anyhow!("app was released"))?;
+        let mut app = app.borrow_mut();
+        Ok(app.update_global(update))
     }
 }
 
@@ -177,7 +168,7 @@ impl AsyncWindowContext {
 
     pub fn on_next_frame(&mut self, f: impl FnOnce(&mut WindowContext) + Send + 'static) {
         self.app
-            .update_window(self.window, |_root, cx| cx.on_next_frame(f))
+            .update_window(self.window, |cx| cx.on_next_frame(f))
             .ok();
     }
 
@@ -186,7 +177,7 @@ impl AsyncWindowContext {
         read: impl FnOnce(&G, &WindowContext) -> R,
     ) -> Result<R> {
         self.app
-            .update_window(self.window, |_, cx| read(cx.global(), cx))
+            .update_window(self.window, |cx| read(cx.global(), cx))
     }
 
     pub fn update_global<G, R>(
@@ -197,7 +188,7 @@ impl AsyncWindowContext {
         G: 'static,
     {
         self.app
-            .update_window(self.window, |_, cx| cx.update_global(update))
+            .update_window(self.window, |cx| cx.update_global(update))
     }
 
     pub fn spawn<Fut, R>(
@@ -209,30 +200,7 @@ impl AsyncWindowContext {
         R: Send + 'static,
     {
         let this = self.clone();
-        self.executor.spawn(async move { f(this).await })
-    }
-
-    pub fn spawn_on_main<Fut, R>(
-        &self,
-        f: impl FnOnce(AsyncWindowContext) -> Fut + Send + 'static,
-    ) -> Task<R>
-    where
-        Fut: Future<Output = R> + 'static,
-        R: Send + 'static,
-    {
-        let this = self.clone();
-        self.executor.spawn_on_main(|| f(this))
-    }
-
-    pub fn run_on_main<R>(
-        &mut self,
-        f: impl FnOnce(&mut MainThread<WindowContext>) -> R + Send + 'static,
-    ) -> Task<Result<R>>
-    where
-        R: Send + 'static,
-    {
-        self.update(|_, cx| cx.run_on_main(f))
-            .unwrap_or_else(|error| Task::ready(Err(error)))
+        self.foreground_executor.spawn(async move { f(this).await })
     }
 }
 
@@ -247,10 +215,10 @@ impl Context for AsyncWindowContext {
         build_model: impl FnOnce(&mut Self::ModelContext<'_, T>) -> T,
     ) -> Result<Model<T>>
     where
-        T: 'static + Send,
+        T: 'static,
     {
         self.app
-            .update_window(self.window, |_, cx| cx.build_model(build_model))
+            .update_window(self.window, |cx| cx.build_model(build_model))
     }
 
     fn update_model<T: 'static, R>(
@@ -259,7 +227,7 @@ impl Context for AsyncWindowContext {
         update: impl FnOnce(&mut T, &mut Self::ModelContext<'_, T>) -> R,
     ) -> Result<R> {
         self.app
-            .update_window(self.window, |_, cx| cx.update_model(handle, update))
+            .update_window(self.window, |cx| cx.update_model(handle, update))
     }
 
     fn update_window<T, F>(&mut self, window: AnyWindowHandle, update: F) -> Result<T>
@@ -278,10 +246,10 @@ impl VisualContext for AsyncWindowContext {
         build_view_state: impl FnOnce(&mut Self::ViewContext<'_, V>) -> V,
     ) -> Self::Result<View<V>>
     where
-        V: 'static + Send,
+        V: 'static,
     {
         self.app
-            .update_window(self.window, |_, cx| cx.build_view(build_view_state))
+            .update_window(self.window, |cx| cx.build_view(build_view_state))
     }
 
     fn update_view<V: 'static, R>(
@@ -290,7 +258,7 @@ impl VisualContext for AsyncWindowContext {
         update: impl FnOnce(&mut V, &mut Self::ViewContext<'_, V>) -> R,
     ) -> Self::Result<R> {
         self.app
-            .update_window(self.window, |_, cx| cx.update_view(view, update))
+            .update_window(self.window, |cx| cx.update_view(view, update))
     }
 
     fn replace_root_view<V>(
@@ -301,17 +269,6 @@ impl VisualContext for AsyncWindowContext {
         V: 'static + Send + Render,
     {
         self.app
-            .update_window(self.window, |_, cx| cx.replace_root_view(build_view))
-    }
-}
-
-#[cfg(test)]
-mod tests {
-    use super::*;
-
-    #[test]
-    fn test_async_app_context_send_sync() {
-        fn assert_send_sync<T: Send + Sync>() {}
-        assert_send_sync::<AsyncAppContext>();
+            .update_window(self.window, |cx| cx.replace_root_view(build_view))
     }
 }

crates/gpui2/src/app/entity_map.rs 🔗

@@ -59,7 +59,7 @@ impl EntityMap {
     /// Insert an entity into a slot obtained by calling `reserve`.
     pub fn insert<T>(&mut self, slot: Slot<T>, entity: T) -> Model<T>
     where
-        T: 'static + Send,
+        T: 'static,
     {
         let model = slot.0;
         self.entities.insert(model.entity_id, Box::new(entity));
@@ -106,7 +106,12 @@ impl EntityMap {
         dropped_entity_ids
             .into_iter()
             .map(|entity_id| {
-                ref_counts.counts.remove(entity_id);
+                let count = ref_counts.counts.remove(entity_id).unwrap();
+                debug_assert_eq!(
+                    count.load(SeqCst),
+                    0,
+                    "dropped an entity that was referenced"
+                );
                 (entity_id, self.entities.remove(entity_id).unwrap())
             })
             .collect()
@@ -216,7 +221,7 @@ impl Drop for AnyModel {
             let count = entity_map
                 .counts
                 .get(self.entity_id)
-                .expect("Detected over-release of a model.");
+                .expect("detected over-release of a handle.");
             let prev_count = count.fetch_sub(1, SeqCst);
             assert_ne!(prev_count, 0, "Detected over-release of a model.");
             if prev_count == 1 {
@@ -400,12 +405,16 @@ impl AnyWeakModel {
     }
 
     pub fn upgrade(&self) -> Option<AnyModel> {
-        let entity_map = self.entity_ref_counts.upgrade()?;
-        entity_map
-            .read()
-            .counts
-            .get(self.entity_id)?
-            .fetch_add(1, SeqCst);
+        let ref_counts = &self.entity_ref_counts.upgrade()?;
+        let ref_counts = ref_counts.read();
+        let ref_count = ref_counts.counts.get(self.entity_id)?;
+
+        // entity_id is in dropped_entity_ids
+        if ref_count.load(SeqCst) == 0 {
+            return None;
+        }
+        ref_count.fetch_add(1, SeqCst);
+
         Some(AnyModel {
             entity_id: self.entity_id,
             entity_type: self.entity_type,
@@ -504,3 +513,60 @@ impl<T> PartialEq<Model<T>> for WeakModel<T> {
         self.entity_id() == other.any_model.entity_id()
     }
 }
+
+#[cfg(test)]
+mod test {
+    use crate::EntityMap;
+
+    struct TestEntity {
+        pub i: i32,
+    }
+
+    #[test]
+    fn test_entity_map_slot_assignment_before_cleanup() {
+        // Tests that slots are not re-used before take_dropped.
+        let mut entity_map = EntityMap::new();
+
+        let slot = entity_map.reserve::<TestEntity>();
+        entity_map.insert(slot, TestEntity { i: 1 });
+
+        let slot = entity_map.reserve::<TestEntity>();
+        entity_map.insert(slot, TestEntity { i: 2 });
+
+        let dropped = entity_map.take_dropped();
+        assert_eq!(dropped.len(), 2);
+
+        assert_eq!(
+            dropped
+                .into_iter()
+                .map(|(_, entity)| entity.downcast::<TestEntity>().unwrap().i)
+                .collect::<Vec<i32>>(),
+            vec![1, 2],
+        );
+    }
+
+    #[test]
+    fn test_entity_map_weak_upgrade_before_cleanup() {
+        // Tests that weak handles are not upgraded before take_dropped
+        let mut entity_map = EntityMap::new();
+
+        let slot = entity_map.reserve::<TestEntity>();
+        let handle = entity_map.insert(slot, TestEntity { i: 1 });
+        let weak = handle.downgrade();
+        drop(handle);
+
+        let strong = weak.upgrade();
+        assert_eq!(strong, None);
+
+        let dropped = entity_map.take_dropped();
+        assert_eq!(dropped.len(), 1);
+
+        assert_eq!(
+            dropped
+                .into_iter()
+                .map(|(_, entity)| entity.downcast::<TestEntity>().unwrap().i)
+                .collect::<Vec<i32>>(),
+            vec![1],
+        );
+    }
+}

crates/gpui2/src/app/model_context.rs 🔗

@@ -1,6 +1,6 @@
 use crate::{
     AnyView, AnyWindowHandle, AppContext, AsyncAppContext, Context, Effect, Entity, EntityId,
-    EventEmitter, MainThread, Model, Subscription, Task, WeakModel, WindowContext,
+    EventEmitter, Model, Subscription, Task, WeakModel, WindowContext,
 };
 use anyhow::Result;
 use derive_more::{Deref, DerefMut};
@@ -38,15 +38,15 @@ impl<'a, T: 'static> ModelContext<'a, T> {
         self.model_state.clone()
     }
 
-    pub fn observe<T2, E>(
+    pub fn observe<W, E>(
         &mut self,
         entity: &E,
-        mut on_notify: impl FnMut(&mut T, E, &mut ModelContext<'_, T>) + Send + 'static,
+        mut on_notify: impl FnMut(&mut T, E, &mut ModelContext<'_, T>) + 'static,
     ) -> Subscription
     where
-        T: 'static + Send,
-        T2: 'static,
-        E: Entity<T2>,
+        T: 'static,
+        W: 'static,
+        E: Entity<W>,
     {
         let this = self.weak_model();
         let entity_id = entity.entity_id();
@@ -67,10 +67,10 @@ impl<'a, T: 'static> ModelContext<'a, T> {
     pub fn subscribe<T2, E>(
         &mut self,
         entity: &E,
-        mut on_event: impl FnMut(&mut T, E, &T2::Event, &mut ModelContext<'_, T>) + Send + 'static,
+        mut on_event: impl FnMut(&mut T, E, &T2::Event, &mut ModelContext<'_, T>) + 'static,
     ) -> Subscription
     where
-        T: 'static + Send,
+        T: 'static,
         T2: 'static + EventEmitter,
         E: Entity<T2>,
     {
@@ -93,7 +93,7 @@ impl<'a, T: 'static> ModelContext<'a, T> {
 
     pub fn on_release(
         &mut self,
-        mut on_release: impl FnMut(&mut T, &mut AppContext) + Send + 'static,
+        mut on_release: impl FnMut(&mut T, &mut AppContext) + 'static,
     ) -> Subscription
     where
         T: 'static,
@@ -110,10 +110,10 @@ impl<'a, T: 'static> ModelContext<'a, T> {
     pub fn observe_release<T2, E>(
         &mut self,
         entity: &E,
-        mut on_release: impl FnMut(&mut T, &mut T2, &mut ModelContext<'_, T>) + Send + 'static,
+        mut on_release: impl FnMut(&mut T, &mut T2, &mut ModelContext<'_, T>) + 'static,
     ) -> Subscription
     where
-        T: Any + Send,
+        T: Any,
         T2: 'static,
         E: Entity<T2>,
     {
@@ -132,10 +132,10 @@ impl<'a, T: 'static> ModelContext<'a, T> {
 
     pub fn observe_global<G: 'static>(
         &mut self,
-        mut f: impl FnMut(&mut T, &mut ModelContext<'_, T>) + Send + 'static,
+        mut f: impl FnMut(&mut T, &mut ModelContext<'_, T>) + 'static,
     ) -> Subscription
     where
-        T: 'static + Send,
+        T: 'static,
     {
         let handle = self.weak_model();
         self.global_observers.insert(
@@ -146,11 +146,11 @@ impl<'a, T: 'static> ModelContext<'a, T> {
 
     pub fn on_app_quit<Fut>(
         &mut self,
-        mut on_quit: impl FnMut(&mut T, &mut ModelContext<T>) -> Fut + Send + 'static,
+        mut on_quit: impl FnMut(&mut T, &mut ModelContext<T>) -> Fut + 'static,
     ) -> Subscription
     where
-        Fut: 'static + Future<Output = ()> + Send,
-        T: 'static + Send,
+        Fut: 'static + Future<Output = ()>,
+        T: 'static,
     {
         let handle = self.weak_model();
         self.app.quit_observers.insert(
@@ -162,7 +162,7 @@ impl<'a, T: 'static> ModelContext<'a, T> {
                         future.await;
                     }
                 }
-                .boxed()
+                .boxed_local()
             }),
         )
     }
@@ -181,7 +181,7 @@ impl<'a, T: 'static> ModelContext<'a, T> {
 
     pub fn update_global<G, R>(&mut self, f: impl FnOnce(&mut G, &mut Self) -> R) -> R
     where
-        G: 'static + Send,
+        G: 'static,
     {
         let mut global = self.app.lease_global::<G>();
         let result = f(&mut global, self);
@@ -192,30 +192,17 @@ impl<'a, T: 'static> ModelContext<'a, T> {
     pub fn spawn<Fut, R>(&self, f: impl FnOnce(WeakModel<T>, AsyncAppContext) -> Fut) -> Task<R>
     where
         T: 'static,
-        Fut: Future<Output = R> + Send + 'static,
-        R: Send + 'static,
-    {
-        let this = self.weak_model();
-        self.app.spawn(|cx| f(this, cx))
-    }
-
-    pub fn spawn_on_main<Fut, R>(
-        &self,
-        f: impl FnOnce(WeakModel<T>, MainThread<AsyncAppContext>) -> Fut + Send + 'static,
-    ) -> Task<R>
-    where
         Fut: Future<Output = R> + 'static,
-        R: Send + 'static,
+        R: 'static,
     {
         let this = self.weak_model();
-        self.app.spawn_on_main(|cx| f(this, cx))
+        self.app.spawn(|cx| f(this, cx))
     }
 }
 
 impl<'a, T> ModelContext<'a, T>
 where
     T: EventEmitter,
-    T::Event: Send,
 {
     pub fn emit(&mut self, event: T::Event) {
         self.app.pending_effects.push_back(Effect::Emit {
@@ -230,13 +217,10 @@ impl<'a, T> Context for ModelContext<'a, T> {
     type ModelContext<'b, U> = ModelContext<'b, U>;
     type Result<U> = U;
 
-    fn build_model<U>(
+    fn build_model<U: 'static>(
         &mut self,
         build_model: impl FnOnce(&mut Self::ModelContext<'_, U>) -> U,
-    ) -> Model<U>
-    where
-        U: 'static + Send,
-    {
+    ) -> Model<U> {
         self.app.build_model(build_model)
     }
 

crates/gpui2/src/app/test_context.rs 🔗

@@ -1,14 +1,17 @@
 use crate::{
-    AnyView, AnyWindowHandle, AppContext, AsyncAppContext, Context, Executor, MainThread, Model,
-    ModelContext, Result, Task, TestDispatcher, TestPlatform, WindowContext,
+    AnyView, AnyWindowHandle, AppContext, AsyncAppContext, BackgroundExecutor, Context,
+    EventEmitter, ForegroundExecutor, Model, ModelContext, Result, Task, TestDispatcher,
+    TestPlatform, WindowContext,
 };
-use parking_lot::Mutex;
-use std::{future::Future, sync::Arc};
+use anyhow::{anyhow, bail};
+use futures::{Stream, StreamExt};
+use std::{cell::RefCell, future::Future, rc::Rc, sync::Arc, time::Duration};
 
 #[derive(Clone)]
 pub struct TestAppContext {
-    pub app: Arc<Mutex<AppContext>>,
-    pub executor: Executor,
+    pub app: Rc<RefCell<AppContext>>,
+    pub background_executor: BackgroundExecutor,
+    pub foreground_executor: ForegroundExecutor,
 }
 
 impl Context for TestAppContext {
@@ -21,10 +24,10 @@ impl Context for TestAppContext {
         build_model: impl FnOnce(&mut Self::ModelContext<'_, T>) -> T,
     ) -> Self::Result<Model<T>>
     where
-        T: 'static + Send,
+        T: 'static,
     {
-        let mut lock = self.app.lock();
-        lock.build_model(build_model)
+        let mut app = self.app.borrow_mut();
+        app.build_model(build_model)
     }
 
     fn update_model<T: 'static, R>(
@@ -32,97 +35,92 @@ impl Context for TestAppContext {
         handle: &Model<T>,
         update: impl FnOnce(&mut T, &mut Self::ModelContext<'_, T>) -> R,
     ) -> Self::Result<R> {
-        let mut lock = self.app.lock();
-        lock.update_model(handle, update)
+        let mut app = self.app.borrow_mut();
+        app.update_model(handle, update)
     }
 
     fn update_window<T, F>(&mut self, window: AnyWindowHandle, f: F) -> Result<T>
     where
         F: FnOnce(AnyView, &mut Self::WindowContext<'_>) -> T,
     {
-        let mut lock = self.app.lock();
+        let mut lock = self.app.borrow_mut();
         lock.update_window(window, f)
     }
 }
 
 impl TestAppContext {
     pub fn new(dispatcher: TestDispatcher) -> Self {
-        let executor = Executor::new(Arc::new(dispatcher));
-        let platform = Arc::new(TestPlatform::new(executor.clone()));
+        let dispatcher = Arc::new(dispatcher);
+        let background_executor = BackgroundExecutor::new(dispatcher.clone());
+        let foreground_executor = ForegroundExecutor::new(dispatcher);
+        let platform = Rc::new(TestPlatform::new(
+            background_executor.clone(),
+            foreground_executor.clone(),
+        ));
         let asset_source = Arc::new(());
         let http_client = util::http::FakeHttpClient::with_404_response();
         Self {
             app: AppContext::new(platform, asset_source, http_client),
-            executor,
+            background_executor,
+            foreground_executor,
         }
     }
 
     pub fn quit(&self) {
-        self.app.lock().quit();
+        self.app.borrow_mut().quit();
     }
 
     pub fn refresh(&mut self) -> Result<()> {
-        let mut lock = self.app.lock();
-        lock.refresh();
+        let mut app = self.app.borrow_mut();
+        app.refresh();
         Ok(())
     }
 
-    pub fn executor(&self) -> &Executor {
-        &self.executor
+    pub fn executor(&self) -> &BackgroundExecutor {
+        &self.background_executor
     }
 
-    pub fn update<R>(&self, f: impl FnOnce(&mut AppContext) -> R) -> R {
-        let mut lock = self.app.lock();
-        f(&mut *lock)
+    pub fn foreground_executor(&self) -> &ForegroundExecutor {
+        &self.foreground_executor
     }
 
-    pub fn spawn<Fut, R>(&self, f: impl FnOnce(AsyncAppContext) -> Fut + Send + 'static) -> Task<R>
-    where
-        Fut: Future<Output = R> + Send + 'static,
-        R: Send + 'static,
-    {
-        let cx = self.to_async();
-        self.executor.spawn(async move { f(cx).await })
+    pub fn update<R>(&self, f: impl FnOnce(&mut AppContext) -> R) -> R {
+        let mut cx = self.app.borrow_mut();
+        cx.update(f)
     }
 
-    pub fn spawn_on_main<Fut, R>(
+    pub fn update_window<R>(
         &self,
-        f: impl FnOnce(AsyncAppContext) -> Fut + Send + 'static,
-    ) -> Task<R>
-    where
-        Fut: Future<Output = R> + 'static,
-        R: Send + 'static,
-    {
-        let cx = self.to_async();
-        self.executor.spawn_on_main(|| f(cx))
+        handle: AnyWindowHandle,
+        update: impl FnOnce(&mut WindowContext) -> R,
+    ) -> R {
+        let mut app = self.app.borrow_mut();
+        app.update_window(handle.id, update).unwrap()
     }
 
-    pub fn run_on_main<R>(
-        &self,
-        f: impl FnOnce(&mut MainThread<AppContext>) -> R + Send + 'static,
-    ) -> Task<R>
+    pub fn spawn<Fut, R>(&self, f: impl FnOnce(AsyncAppContext) -> Fut) -> Task<R>
     where
-        R: Send + 'static,
+        Fut: Future<Output = R> + 'static,
+        R: 'static,
     {
-        let mut app_context = self.app.lock();
-        app_context.run_on_main(f)
+        self.foreground_executor.spawn(f(self.to_async()))
     }
 
     pub fn has_global<G: 'static>(&self) -> bool {
-        let lock = self.app.lock();
-        lock.has_global::<G>()
+        let app = self.app.borrow();
+        app.has_global::<G>()
     }
 
     pub fn read_global<G: 'static, R>(&self, read: impl FnOnce(&G, &AppContext) -> R) -> R {
-        let lock = self.app.lock();
-        read(lock.global(), &lock)
+        let app = self.app.borrow();
+        read(app.global(), &app)
     }
 
     pub fn try_read_global<G: 'static, R>(
         &self,
         read: impl FnOnce(&G, &AppContext) -> R,
     ) -> Option<R> {
-        let lock = self.app.lock();
+        let lock = self.app.borrow();
         Some(read(lock.try_global()?, &lock))
     }
 
@@ -130,14 +128,75 @@ impl TestAppContext {
         &mut self,
         update: impl FnOnce(&mut G, &mut AppContext) -> R,
     ) -> R {
-        let mut lock = self.app.lock();
+        let mut lock = self.app.borrow_mut();
         lock.update_global(update)
     }
 
     pub fn to_async(&self) -> AsyncAppContext {
         AsyncAppContext {
-            app: Arc::downgrade(&self.app),
-            executor: self.executor.clone(),
+            app: Rc::downgrade(&self.app),
+            background_executor: self.background_executor.clone(),
+            foreground_executor: self.foreground_executor.clone(),
+        }
+    }
+
+    pub fn notifications<T: 'static>(&mut self, entity: &Model<T>) -> impl Stream<Item = ()> {
+        let (tx, rx) = futures::channel::mpsc::unbounded();
+
+        entity.update(self, move |_, cx: &mut ModelContext<T>| {
+            cx.observe(entity, {
+                let tx = tx.clone();
+                move |_, _, _| {
+                    let _ = tx.unbounded_send(());
+                }
+            })
+            .detach();
+
+            cx.on_release(move |_, _| tx.close_channel()).detach();
+        });
+
+        rx
+    }
+
+    pub fn events<T: 'static + EventEmitter>(
+        &mut self,
+        entity: &Model<T>,
+    ) -> futures::channel::mpsc::UnboundedReceiver<T::Event>
+    where
+        T::Event: 'static + Clone,
+    {
+        let (tx, rx) = futures::channel::mpsc::unbounded();
+        entity
+            .update(self, |_, cx: &mut ModelContext<T>| {
+                cx.subscribe(entity, move |_model, _handle, event, _cx| {
+                    let _ = tx.unbounded_send(event.clone());
+                })
+            })
+            .detach();
+        rx
+    }
+
+    pub async fn condition<T: 'static>(
+        &mut self,
+        model: &Model<T>,
+        mut predicate: impl FnMut(&mut T, &mut ModelContext<T>) -> bool,
+    ) {
+        let timer = self.executor().timer(Duration::from_secs(3));
+        let mut notifications = self.notifications(model);
+
+        use futures::FutureExt as _;
+        use smol::future::FutureExt as _;
+
+        async {
+            while notifications.next().await.is_some() {
+                if model.update(self, &mut predicate) {
+                    return Ok(());
+                }
+            }
+            bail!("model dropped")
         }
+        .race(timer.map(|_| Err(anyhow!("condition timed out"))))
+        .await
+        .unwrap();
     }
 }

crates/gpui2/src/element.rs 🔗

@@ -4,7 +4,7 @@ pub(crate) use smallvec::SmallVec;
 use std::{any::Any, mem};
 
 pub trait Element<V: 'static> {
-    type ElementState: 'static + Send;
+    type ElementState: 'static;
 
     fn id(&self) -> Option<ElementId>;
 
@@ -97,7 +97,7 @@ impl<V, E: Element<V>> RenderedElement<V, E> {
 impl<V, E> ElementObject<V> for RenderedElement<V, E>
 where
     E: Element<V>,
-    E::ElementState: 'static + Send,
+    E::ElementState: 'static,
 {
     fn initialize(&mut self, view_state: &mut V, cx: &mut ViewContext<V>) {
         let frame_state = if let Some(id) = self.element.id() {
@@ -170,16 +170,14 @@ where
     }
 }
 
-pub struct AnyElement<V>(Box<dyn ElementObject<V> + Send>);
-
-unsafe impl<V> Send for AnyElement<V> {}
+pub struct AnyElement<V>(Box<dyn ElementObject<V>>);
 
 impl<V> AnyElement<V> {
     pub fn new<E>(element: E) -> Self
     where
         V: 'static,
-        E: 'static + Element<V> + Send,
-        E::ElementState: Any + Send,
+        E: 'static + Element<V>,
+        E::ElementState: Any,
     {
         AnyElement(Box::new(RenderedElement::new(element)))
     }
@@ -220,8 +218,8 @@ impl<V> Component<V> for AnyElement<V> {
 impl<V, E, F> Element<V> for Option<F>
 where
     V: 'static,
-    E: 'static + Component<V> + Send,
-    F: FnOnce(&mut V, &mut ViewContext<'_, V>) -> E + Send + 'static,
+    E: 'static + Component<V>,
+    F: FnOnce(&mut V, &mut ViewContext<'_, V>) -> E + 'static,
 {
     type ElementState = AnyElement<V>;
 
@@ -264,8 +262,8 @@ where
 impl<V, E, F> Component<V> for Option<F>
 where
     V: 'static,
-    E: 'static + Component<V> + Send,
-    F: FnOnce(&mut V, &mut ViewContext<'_, V>) -> E + Send + 'static,
+    E: 'static + Component<V>,
+    F: FnOnce(&mut V, &mut ViewContext<'_, V>) -> E + 'static,
 {
     fn render(self) -> AnyElement<V> {
         AnyElement::new(self)
@@ -275,8 +273,8 @@ where
 impl<V, E, F> Component<V> for F
 where
     V: 'static,
-    E: 'static + Component<V> + Send,
-    F: FnOnce(&mut V, &mut ViewContext<'_, V>) -> E + Send + 'static,
+    E: 'static + Component<V>,
+    F: FnOnce(&mut V, &mut ViewContext<'_, V>) -> E + 'static,
 {
     fn render(self) -> AnyElement<V> {
         AnyElement::new(Some(self))

crates/gpui2/src/executor.rs 🔗

@@ -6,7 +6,11 @@ use std::{
     marker::PhantomData,
     mem,
     pin::Pin,
-    sync::Arc,
+    rc::Rc,
+    sync::{
+        atomic::{AtomicBool, Ordering::SeqCst},
+        Arc,
+    },
     task::{Context, Poll},
     time::Duration,
 };
@@ -14,10 +18,16 @@ use util::TryFutureExt;
 use waker_fn::waker_fn;
 
 #[derive(Clone)]
-pub struct Executor {
+pub struct BackgroundExecutor {
     dispatcher: Arc<dyn PlatformDispatcher>,
 }
 
+#[derive(Clone)]
+pub struct ForegroundExecutor {
+    dispatcher: Arc<dyn PlatformDispatcher>,
+    not_send: PhantomData<Rc<()>>,
+}
+
 #[must_use]
 pub enum Task<T> {
     Ready(Option<T>),
@@ -43,7 +53,7 @@ where
     E: 'static + Send + Debug,
 {
     pub fn detach_and_log_err(self, cx: &mut AppContext) {
-        cx.executor().spawn(self.log_err()).detach();
+        cx.background_executor().spawn(self.log_err()).detach();
     }
 }
 
@@ -58,7 +68,7 @@ impl<T> Future for Task<T> {
     }
 }
 
-impl Executor {
+impl BackgroundExecutor {
     pub fn new(dispatcher: Arc<dyn PlatformDispatcher>) -> Self {
         Self { dispatcher }
     }
@@ -76,68 +86,30 @@ impl Executor {
         Task::Spawned(task)
     }
 
-    /// Enqueues the given closure to run on the application's event loop.
-    /// Returns the result asynchronously.
-    pub fn run_on_main<F, R>(&self, func: F) -> Task<R>
-    where
-        F: FnOnce() -> R + Send + 'static,
-        R: Send + 'static,
-    {
-        if self.dispatcher.is_main_thread() {
-            Task::ready(func())
-        } else {
-            self.spawn_on_main(move || async move { func() })
-        }
-    }
-
-    /// Enqueues the given closure to be run on the application's event loop. The
-    /// closure returns a future which will be run to completion on the main thread.
-    pub fn spawn_on_main<F, R>(&self, func: impl FnOnce() -> F + Send + 'static) -> Task<R>
-    where
-        F: Future<Output = R> + 'static,
-        R: Send + 'static,
-    {
-        let (runnable, task) = async_task::spawn(
-            {
-                let this = self.clone();
-                async move {
-                    let task = this.spawn_on_main_local(func());
-                    task.await
-                }
-            },
-            {
-                let dispatcher = self.dispatcher.clone();
-                move |runnable| dispatcher.dispatch_on_main_thread(runnable)
-            },
-        );
-        runnable.schedule();
-        Task::Spawned(task)
+    #[cfg(any(test, feature = "test-support"))]
+    pub fn block_test<R>(&self, future: impl Future<Output = R>) -> R {
+        self.block_internal(false, future)
     }
 
-    /// Enqueues the given closure to be run on the application's event loop. Must
-    /// be called on the main thread.
-    pub fn spawn_on_main_local<R>(&self, future: impl Future<Output = R> + 'static) -> Task<R>
-    where
-        R: 'static,
-    {
-        assert!(
-            self.dispatcher.is_main_thread(),
-            "must be called on main thread"
-        );
-
-        let dispatcher = self.dispatcher.clone();
-        let (runnable, task) = async_task::spawn_local(future, move |runnable| {
-            dispatcher.dispatch_on_main_thread(runnable)
-        });
-        runnable.schedule();
-        Task::Spawned(task)
+    pub fn block<R>(&self, future: impl Future<Output = R>) -> R {
+        self.block_internal(true, future)
     }
 
-    pub fn block<R>(&self, future: impl Future<Output = R>) -> R {
+    pub(crate) fn block_internal<R>(
+        &self,
+        background_only: bool,
+        future: impl Future<Output = R>,
+    ) -> R {
         pin_mut!(future);
-        let (parker, unparker) = parking::pair();
-        let waker = waker_fn(move || {
-            unparker.unpark();
+        let unparker = self.dispatcher.unparker();
+        let awoken = Arc::new(AtomicBool::new(false));
+
+        let waker = waker_fn({
+            let awoken = awoken.clone();
+            move || {
+                awoken.store(true, SeqCst);
+                unparker.unpark();
+            }
         });
         let mut cx = std::task::Context::from_waker(&waker);
 
@@ -145,12 +117,24 @@ impl Executor {
             match future.as_mut().poll(&mut cx) {
                 Poll::Ready(result) => return result,
                 Poll::Pending => {
-                    if !self.dispatcher.poll() {
+                    if !self.dispatcher.poll(background_only) {
+                        if awoken.swap(false, SeqCst) {
+                            continue;
+                        }
+
                         #[cfg(any(test, feature = "test-support"))]
-                        if let Some(_) = self.dispatcher.as_test() {
-                            panic!("blocked with nothing left to run")
+                        if let Some(test) = self.dispatcher.as_test() {
+                            if !test.parking_allowed() {
+                                let mut backtrace_message = String::new();
+                                if let Some(backtrace) = test.waiting_backtrace() {
+                                    backtrace_message =
+                                        format!("\nbacktrace of waiting future:\n{:?}", backtrace);
+                                }
+                                panic!("parked with nothing left to run\n{:?}", backtrace_message)
+                            }
                         }
-                        parker.park();
+
+                        self.dispatcher.park();
                     }
                 }
             }
@@ -206,17 +190,17 @@ impl Executor {
 
     #[cfg(any(test, feature = "test-support"))]
     pub fn start_waiting(&self) {
-        todo!("start_waiting")
+        self.dispatcher.as_test().unwrap().start_waiting();
     }
 
     #[cfg(any(test, feature = "test-support"))]
     pub fn finish_waiting(&self) {
-        todo!("finish_waiting")
+        self.dispatcher.as_test().unwrap().finish_waiting();
     }
 
     #[cfg(any(test, feature = "test-support"))]
     pub fn simulate_random_delay(&self) -> impl Future<Output = ()> {
-        self.spawn(self.dispatcher.as_test().unwrap().simulate_random_delay())
+        self.dispatcher.as_test().unwrap().simulate_random_delay()
     }
 
     #[cfg(any(test, feature = "test-support"))]
@@ -229,6 +213,11 @@ impl Executor {
         self.dispatcher.as_test().unwrap().run_until_parked()
     }
 
+    #[cfg(any(test, feature = "test-support"))]
+    pub fn allow_parking(&self) {
+        self.dispatcher.as_test().unwrap().allow_parking();
+    }
+
     pub fn num_cpus(&self) -> usize {
         num_cpus::get()
     }
@@ -238,8 +227,31 @@ impl Executor {
     }
 }
 
+impl ForegroundExecutor {
+    pub fn new(dispatcher: Arc<dyn PlatformDispatcher>) -> Self {
+        Self {
+            dispatcher,
+            not_send: PhantomData,
+        }
+    }
+
+    /// Enqueues the given closure to be run on any thread. The closure returns
+    /// a future which will be run to completion on any available thread.
+    pub fn spawn<R>(&self, future: impl Future<Output = R> + 'static) -> Task<R>
+    where
+        R: 'static,
+    {
+        let dispatcher = self.dispatcher.clone();
+        let (runnable, task) = async_task::spawn_local(future, move |runnable| {
+            dispatcher.dispatch_on_main_thread(runnable)
+        });
+        runnable.schedule();
+        Task::Spawned(task)
+    }
+}
+
 pub struct Scope<'a> {
-    executor: Executor,
+    executor: BackgroundExecutor,
     futures: Vec<Pin<Box<dyn Future<Output = ()> + Send + 'static>>>,
     tx: Option<mpsc::Sender<()>>,
     rx: mpsc::Receiver<()>,
@@ -247,7 +259,7 @@ pub struct Scope<'a> {
 }
 
 impl<'a> Scope<'a> {
-    fn new(executor: Executor) -> Self {
+    fn new(executor: BackgroundExecutor) -> Self {
         let (tx, rx) = mpsc::channel(1);
         Self {
             executor,

crates/gpui2/src/gpui2.rs 🔗

@@ -68,25 +68,20 @@ use derive_more::{Deref, DerefMut};
 use std::{
     any::{Any, TypeId},
     borrow::{Borrow, BorrowMut},
-    mem,
-    ops::{Deref, DerefMut},
-    sync::Arc,
 };
 use taffy::TaffyLayoutEngine;
 
-type AnyBox = Box<dyn Any + Send>;
+type AnyBox = Box<dyn Any>;
 
 pub trait Context {
     type WindowContext<'a>: UpdateView;
     type ModelContext<'a, T>;
     type Result<T>;
 
-    fn build_model<T>(
+    fn build_model<T: 'static>(
         &mut self,
         build_model: impl FnOnce(&mut Self::ModelContext<'_, T>) -> T,
-    ) -> Self::Result<Model<T>>
-    where
-        T: 'static + Send;
+    ) -> Self::Result<Model<T>>;
 
     fn update_model<T, R>(
         &mut self,
@@ -109,7 +104,7 @@ pub trait VisualContext: Context {
         build_view: impl FnOnce(&mut Self::ViewContext<'_, V>) -> V,
     ) -> Self::Result<View<V>>
     where
-        V: 'static + Send;
+        V: 'static;
 
     fn update_view<V: 'static, R>(
         &mut self,
@@ -151,153 +146,6 @@ pub enum GlobalKey {
     Type(TypeId),
 }
 
-#[repr(transparent)]
-pub struct MainThread<T>(T);
-
-impl<T> Deref for MainThread<T> {
-    type Target = T;
-
-    fn deref(&self) -> &Self::Target {
-        &self.0
-    }
-}
-
-impl<T> DerefMut for MainThread<T> {
-    fn deref_mut(&mut self) -> &mut Self::Target {
-        &mut self.0
-    }
-}
-
-impl<C: Context> Context for MainThread<C> {
-    type WindowContext<'a> = MainThread<C::WindowContext<'a>>;
-    type ModelContext<'a, T> = MainThread<C::ModelContext<'a, T>>;
-    type Result<T> = C::Result<T>;
-
-    fn build_model<T>(
-        &mut self,
-        build_model: impl FnOnce(&mut Self::ModelContext<'_, T>) -> T,
-    ) -> Self::Result<Model<T>>
-    where
-        T: 'static + Send,
-    {
-        self.0.build_model(|cx| {
-            let cx = unsafe {
-                mem::transmute::<
-                    &mut C::ModelContext<'_, T>,
-                    &mut MainThread<C::ModelContext<'_, T>>,
-                >(cx)
-            };
-            build_model(cx)
-        })
-    }
-
-    fn update_model<T: 'static, R>(
-        &mut self,
-        handle: &Model<T>,
-        update: impl FnOnce(&mut T, &mut Self::ModelContext<'_, T>) -> R,
-    ) -> Self::Result<R> {
-        self.0.update_model(handle, |entity, cx| {
-            let cx = unsafe {
-                mem::transmute::<
-                    &mut C::ModelContext<'_, T>,
-                    &mut MainThread<C::ModelContext<'_, T>>,
-                >(cx)
-            };
-            update(entity, cx)
-        })
-    }
-
-    fn update_window<T, F>(&mut self, window: AnyWindowHandle, update: F) -> Result<T>
-    where
-        F: FnOnce(AnyView, &mut Self::WindowContext<'_>) -> T,
-    {
-        self.0.update_window(window, |root, cx| {
-            let cx = unsafe {
-                mem::transmute::<&mut C::WindowContext<'_>, &mut MainThread<C::WindowContext<'_>>>(
-                    cx,
-                )
-            };
-            update(root, cx)
-        })
-    }
-}
-
-impl<C: VisualContext> VisualContext for MainThread<C> {
-    type ViewContext<'a, V: 'static> = MainThread<C::ViewContext<'a, V>>;
-
-    fn build_view<V>(
-        &mut self,
-        build_view_state: impl FnOnce(&mut Self::ViewContext<'_, V>) -> V,
-    ) -> Self::Result<View<V>>
-    where
-        V: 'static + Send,
-    {
-        self.0.build_view(|cx| {
-            let cx = unsafe {
-                mem::transmute::<
-                    &mut C::ViewContext<'_, V>,
-                    &mut MainThread<C::ViewContext<'_, V>>,
-                >(cx)
-            };
-            build_view_state(cx)
-        })
-    }
-
-    fn update_view<V: 'static, R>(
-        &mut self,
-        view: &View<V>,
-        update: impl FnOnce(&mut V, &mut Self::ViewContext<'_, V>) -> R,
-    ) -> Self::Result<R> {
-        self.0.update_view(view, |view_state, cx| {
-            let cx = unsafe {
-                mem::transmute::<
-                    &mut C::ViewContext<'_, V>,
-                    &mut MainThread<C::ViewContext<'_, V>>,
-                >(cx)
-            };
-            update(view_state, cx)
-        })
-    }
-
-    fn replace_root_view<V>(
-        &mut self,
-        build_view: impl FnOnce(&mut Self::ViewContext<'_, V>) -> V,
-    ) -> Self::Result<View<V>>
-    where
-        V: 'static + Send + Render,
-    {
-        self.0.replace_root_view(|cx| {
-            let cx = unsafe {
-                mem::transmute::<
-                    &mut C::ViewContext<'_, V>,
-                    &mut MainThread<C::ViewContext<'_, V>>,
-                >(cx)
-            };
-            build_view(cx)
-        })
-    }
-}
-
-impl<C: UpdateView> UpdateView for MainThread<C> {
-    type ViewContext<'a, V: 'static> = MainThread<C::ViewContext<'a, V>>;
-
-    fn update_view<V: 'static, R>(
-        &mut self,
-        view: &View<V>,
-        update: impl FnOnce(&mut V, &mut Self::ViewContext<'_, V>) -> R,
-    ) -> R {
-        self.0.update_view(view, |view_state, cx| {
-            let cx = unsafe {
-                mem::transmute::<
-                    &mut C::ViewContext<'_, V>,
-                    &mut MainThread<C::ViewContext<'_, V>>,
-                >(cx)
-            };
-            update(view_state, cx)
-        })
-    }
-}
-
 pub trait BorrowAppContext {
     fn with_text_style<F, R>(&mut self, style: TextStyleRefinement, f: F) -> R
     where
@@ -383,32 +231,3 @@ impl<T: Into<ArcCow<'static, str>>> From<T> for SharedString {
         Self(value.into())
     }
 }
-
-pub(crate) struct MainThreadOnly<T: ?Sized> {
-    executor: Executor,
-    value: Arc<T>,
-}
-
-impl<T: ?Sized> Clone for MainThreadOnly<T> {
-    fn clone(&self) -> Self {
-        Self {
-            executor: self.executor.clone(),
-            value: self.value.clone(),
-        }
-    }
-}
-
-/// Allows a value to be accessed only on the main thread, allowing a non-`Send` type
-/// to become `Send`.
-impl<T: 'static + ?Sized> MainThreadOnly<T> {
-    pub(crate) fn new(value: Arc<T>, executor: Executor) -> Self {
-        Self { executor, value }
-    }
-
-    pub(crate) fn borrow_on_main_thread(&self) -> &T {
-        assert!(self.executor.is_main_thread());
-        &self.value
-    }
-}
-
-unsafe impl<T: ?Sized> Send for MainThreadOnly<T> {}

crates/gpui2/src/interactive.rs 🔗

@@ -50,7 +50,7 @@ pub trait StatelessInteractive<V: 'static>: Element<V> {
     fn on_mouse_down(
         mut self,
         button: MouseButton,
-        handler: impl Fn(&mut V, &MouseDownEvent, &mut ViewContext<V>) + Send + 'static,
+        handler: impl Fn(&mut V, &MouseDownEvent, &mut ViewContext<V>) + 'static,
     ) -> Self
     where
         Self: Sized,
@@ -71,7 +71,7 @@ pub trait StatelessInteractive<V: 'static>: Element<V> {
     fn on_mouse_up(
         mut self,
         button: MouseButton,
-        handler: impl Fn(&mut V, &MouseUpEvent, &mut ViewContext<V>) + Send + 'static,
+        handler: impl Fn(&mut V, &MouseUpEvent, &mut ViewContext<V>) + 'static,
     ) -> Self
     where
         Self: Sized,
@@ -92,7 +92,7 @@ pub trait StatelessInteractive<V: 'static>: Element<V> {
     fn on_mouse_down_out(
         mut self,
         button: MouseButton,
-        handler: impl Fn(&mut V, &MouseDownEvent, &mut ViewContext<V>) + Send + 'static,
+        handler: impl Fn(&mut V, &MouseDownEvent, &mut ViewContext<V>) + 'static,
     ) -> Self
     where
         Self: Sized,
@@ -113,7 +113,7 @@ pub trait StatelessInteractive<V: 'static>: Element<V> {
     fn on_mouse_up_out(
         mut self,
         button: MouseButton,
-        handler: impl Fn(&mut V, &MouseUpEvent, &mut ViewContext<V>) + Send + 'static,
+        handler: impl Fn(&mut V, &MouseUpEvent, &mut ViewContext<V>) + 'static,
     ) -> Self
     where
         Self: Sized,
@@ -133,7 +133,7 @@ pub trait StatelessInteractive<V: 'static>: Element<V> {
 
     fn on_mouse_move(
         mut self,
-        handler: impl Fn(&mut V, &MouseMoveEvent, &mut ViewContext<V>) + Send + 'static,
+        handler: impl Fn(&mut V, &MouseMoveEvent, &mut ViewContext<V>) + 'static,
     ) -> Self
     where
         Self: Sized,
@@ -150,7 +150,7 @@ pub trait StatelessInteractive<V: 'static>: Element<V> {
 
     fn on_scroll_wheel(
         mut self,
-        handler: impl Fn(&mut V, &ScrollWheelEvent, &mut ViewContext<V>) + Send + 'static,
+        handler: impl Fn(&mut V, &ScrollWheelEvent, &mut ViewContext<V>) + 'static,
     ) -> Self
     where
         Self: Sized,
@@ -178,7 +178,7 @@ pub trait StatelessInteractive<V: 'static>: Element<V> {
 
     fn on_action<A: 'static>(
         mut self,
-        listener: impl Fn(&mut V, &A, DispatchPhase, &mut ViewContext<V>) + Send + 'static,
+        listener: impl Fn(&mut V, &A, DispatchPhase, &mut ViewContext<V>) + 'static,
     ) -> Self
     where
         Self: Sized,
@@ -196,7 +196,7 @@ pub trait StatelessInteractive<V: 'static>: Element<V> {
 
     fn on_key_down(
         mut self,
-        listener: impl Fn(&mut V, &KeyDownEvent, DispatchPhase, &mut ViewContext<V>) + Send + 'static,
+        listener: impl Fn(&mut V, &KeyDownEvent, DispatchPhase, &mut ViewContext<V>) + 'static,
     ) -> Self
     where
         Self: Sized,
@@ -214,7 +214,7 @@ pub trait StatelessInteractive<V: 'static>: Element<V> {
 
     fn on_key_up(
         mut self,
-        listener: impl Fn(&mut V, &KeyUpEvent, DispatchPhase, &mut ViewContext<V>) + Send + 'static,
+        listener: impl Fn(&mut V, &KeyUpEvent, DispatchPhase, &mut ViewContext<V>) + 'static,
     ) -> Self
     where
         Self: Sized,
@@ -258,9 +258,9 @@ pub trait StatelessInteractive<V: 'static>: Element<V> {
         self
     }
 
-    fn on_drop<W: 'static + Send>(
+    fn on_drop<W: 'static>(
         mut self,
-        listener: impl Fn(&mut V, View<W>, &mut ViewContext<V>) + Send + 'static,
+        listener: impl Fn(&mut V, View<W>, &mut ViewContext<V>) + 'static,
     ) -> Self
     where
         Self: Sized,
@@ -303,7 +303,7 @@ pub trait StatefulInteractive<V: 'static>: StatelessInteractive<V> {
 
     fn on_click(
         mut self,
-        listener: impl Fn(&mut V, &ClickEvent, &mut ViewContext<V>) + Send + 'static,
+        listener: impl Fn(&mut V, &ClickEvent, &mut ViewContext<V>) + 'static,
     ) -> Self
     where
         Self: Sized,
@@ -316,11 +316,11 @@ pub trait StatefulInteractive<V: 'static>: StatelessInteractive<V> {
 
     fn on_drag<W>(
         mut self,
-        listener: impl Fn(&mut V, &mut ViewContext<V>) -> View<W> + Send + 'static,
+        listener: impl Fn(&mut V, &mut ViewContext<V>) -> View<W> + 'static,
     ) -> Self
     where
         Self: Sized,
-        W: 'static + Send + Render,
+        W: 'static + Render,
     {
         debug_assert!(
             self.stateful_interaction().drag_listener.is_none(),
@@ -335,7 +335,7 @@ pub trait StatefulInteractive<V: 'static>: StatelessInteractive<V> {
     }
 }
 
-pub trait ElementInteraction<V: 'static>: 'static + Send {
+pub trait ElementInteraction<V: 'static>: 'static {
     fn as_stateless(&self) -> &StatelessInteraction<V>;
     fn as_stateless_mut(&mut self) -> &mut StatelessInteraction<V>;
     fn as_stateful(&self) -> Option<&StatefulInteraction<V>>;
@@ -672,7 +672,7 @@ impl<V> From<ElementId> for StatefulInteraction<V> {
     }
 }
 
-type DropListener<V> = dyn Fn(&mut V, AnyView, &mut ViewContext<V>) + 'static + Send;
+type DropListener<V> = dyn Fn(&mut V, AnyView, &mut ViewContext<V>) + 'static;
 
 pub struct StatelessInteraction<V> {
     pub dispatch_context: DispatchContext,
@@ -1077,32 +1077,25 @@ pub struct FocusEvent {
 }
 
 pub type MouseDownListener<V> = Box<
-    dyn Fn(&mut V, &MouseDownEvent, &Bounds<Pixels>, DispatchPhase, &mut ViewContext<V>)
-        + Send
-        + 'static,
+    dyn Fn(&mut V, &MouseDownEvent, &Bounds<Pixels>, DispatchPhase, &mut ViewContext<V>) + 'static,
 >;
 pub type MouseUpListener<V> = Box<
-    dyn Fn(&mut V, &MouseUpEvent, &Bounds<Pixels>, DispatchPhase, &mut ViewContext<V>)
-        + Send
-        + 'static,
+    dyn Fn(&mut V, &MouseUpEvent, &Bounds<Pixels>, DispatchPhase, &mut ViewContext<V>) + 'static,
 >;
 
 pub type MouseMoveListener<V> = Box<
-    dyn Fn(&mut V, &MouseMoveEvent, &Bounds<Pixels>, DispatchPhase, &mut ViewContext<V>)
-        + Send
-        + 'static,
+    dyn Fn(&mut V, &MouseMoveEvent, &Bounds<Pixels>, DispatchPhase, &mut ViewContext<V>) + 'static,
 >;
 
 pub type ScrollWheelListener<V> = Box<
     dyn Fn(&mut V, &ScrollWheelEvent, &Bounds<Pixels>, DispatchPhase, &mut ViewContext<V>)
-        + Send
         + 'static,
 >;
 
-pub type ClickListener<V> = Box<dyn Fn(&mut V, &ClickEvent, &mut ViewContext<V>) + Send + 'static>;
+pub type ClickListener<V> = Box<dyn Fn(&mut V, &ClickEvent, &mut ViewContext<V>) + 'static>;
 
 pub(crate) type DragListener<V> =
-    Box<dyn Fn(&mut V, Point<Pixels>, &mut ViewContext<V>) -> AnyDrag + Send + 'static>;
+    Box<dyn Fn(&mut V, Point<Pixels>, &mut ViewContext<V>) -> AnyDrag + 'static>;
 
 pub type KeyListener<V> = Box<
     dyn Fn(
@@ -1112,6 +1105,5 @@ pub type KeyListener<V> = Box<
             DispatchPhase,
             &mut ViewContext<V>,
         ) -> Option<Box<dyn Action>>
-        + Send
         + 'static,
 >;

crates/gpui2/src/platform.rs 🔗

@@ -5,13 +5,14 @@ mod mac;
 mod test;
 
 use crate::{
-    AnyWindowHandle, Bounds, DevicePixels, Executor, Font, FontId, FontMetrics, FontRun,
-    GlobalPixels, GlyphId, InputEvent, LineLayout, Pixels, Point, RenderGlyphParams,
-    RenderImageParams, RenderSvgParams, Result, Scene, SharedString, Size,
+    AnyWindowHandle, BackgroundExecutor, Bounds, DevicePixels, Font, FontId, FontMetrics, FontRun,
+    ForegroundExecutor, GlobalPixels, GlyphId, InputEvent, LineLayout, Pixels, Point,
+    RenderGlyphParams, RenderImageParams, RenderSvgParams, Result, Scene, SharedString, Size,
 };
 use anyhow::{anyhow, bail};
 use async_task::Runnable;
 use futures::channel::oneshot;
+use parking::Unparker;
 use seahash::SeaHasher;
 use serde::{Deserialize, Serialize};
 use sqlez::bindable::{Bind, Column, StaticColumnCount};
@@ -38,12 +39,13 @@ pub use test::*;
 pub use time::UtcOffset;
 
 #[cfg(target_os = "macos")]
-pub(crate) fn current_platform() -> Arc<dyn Platform> {
-    Arc::new(MacPlatform::new())
+pub(crate) fn current_platform() -> Rc<dyn Platform> {
+    Rc::new(MacPlatform::new())
 }
 
 pub(crate) trait Platform: 'static {
-    fn executor(&self) -> Executor;
+    fn background_executor(&self) -> BackgroundExecutor;
+    fn foreground_executor(&self) -> ForegroundExecutor;
     fn text_system(&self) -> Arc<dyn PlatformTextSystem>;
 
     fn run(&self, on_finish_launching: Box<dyn 'static + FnOnce()>);
@@ -167,7 +169,9 @@ pub trait PlatformDispatcher: Send + Sync {
     fn dispatch(&self, runnable: Runnable);
     fn dispatch_on_main_thread(&self, runnable: Runnable);
     fn dispatch_after(&self, duration: Duration, runnable: Runnable);
-    fn poll(&self) -> bool;
+    fn poll(&self, background_only: bool) -> bool;
+    fn park(&self);
+    fn unparker(&self) -> Unparker;
 
     #[cfg(any(test, feature = "test-support"))]
     fn as_test(&self) -> Option<&TestDispatcher> {

crates/gpui2/src/platform/mac/dispatcher.rs 🔗

@@ -9,8 +9,11 @@ use objc::{
     runtime::{BOOL, YES},
     sel, sel_impl,
 };
+use parking::{Parker, Unparker};
+use parking_lot::Mutex;
 use std::{
     ffi::c_void,
+    sync::Arc,
     time::{Duration, SystemTime},
 };
 
@@ -20,7 +23,17 @@ pub fn dispatch_get_main_queue() -> dispatch_queue_t {
     unsafe { &_dispatch_main_q as *const _ as dispatch_queue_t }
 }
 
-pub struct MacDispatcher;
+pub struct MacDispatcher {
+    parker: Arc<Mutex<Parker>>,
+}
+
+impl MacDispatcher {
+    pub fn new() -> Self {
+        MacDispatcher {
+            parker: Arc::new(Mutex::new(Parker::new())),
+        }
+    }
+}
 
 impl PlatformDispatcher for MacDispatcher {
     fn is_main_thread(&self) -> bool {
@@ -68,33 +81,20 @@ impl PlatformDispatcher for MacDispatcher {
         }
     }
 
-    fn poll(&self) -> bool {
+    fn poll(&self, _background_only: bool) -> bool {
         false
     }
+
+    fn park(&self) {
+        self.parker.lock().park()
+    }
+
+    fn unparker(&self) -> Unparker {
+        self.parker.lock().unparker()
+    }
 }
 
 extern "C" fn trampoline(runnable: *mut c_void) {
     let task = unsafe { Runnable::from_raw(runnable as *mut ()) };
     task.run();
 }
-
-// #include <dispatch/dispatch.h>
-
-// int main(void) {
-
-//     dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
-//         // Do some lengthy background work here...
-//         printf("Background Work\n");
-
-//         dispatch_async(dispatch_get_main_queue(), ^{
-//             // Once done, update your UI on the main queue here.
-//             printf("UI Updated\n");
-
-//         });
-//     });
-
-//     sleep(3);  // prevent the program from terminating immediately
-
-//     return 0;
-// }
-// ```

crates/gpui2/src/platform/mac/platform.rs 🔗

@@ -1,9 +1,9 @@
 use super::BoolExt;
 use crate::{
-    AnyWindowHandle, ClipboardItem, CursorStyle, DisplayId, Executor, InputEvent, MacDispatcher,
-    MacDisplay, MacDisplayLinker, MacTextSystem, MacWindow, PathPromptOptions, Platform,
-    PlatformDisplay, PlatformTextSystem, PlatformWindow, Result, SemanticVersion, VideoTimestamp,
-    WindowOptions,
+    AnyWindowHandle, BackgroundExecutor, ClipboardItem, CursorStyle, DisplayId, ForegroundExecutor,
+    InputEvent, MacDispatcher, MacDisplay, MacDisplayLinker, MacTextSystem, MacWindow,
+    PathPromptOptions, Platform, PlatformDisplay, PlatformTextSystem, PlatformWindow, Result,
+    SemanticVersion, VideoTimestamp, WindowOptions,
 };
 use anyhow::anyhow;
 use block::ConcreteBlock;
@@ -143,7 +143,8 @@ unsafe fn build_classes() {
 pub struct MacPlatform(Mutex<MacPlatformState>);
 
 pub struct MacPlatformState {
-    executor: Executor,
+    background_executor: BackgroundExecutor,
+    foreground_executor: ForegroundExecutor,
     text_system: Arc<MacTextSystem>,
     display_linker: MacDisplayLinker,
     pasteboard: id,
@@ -164,8 +165,10 @@ pub struct MacPlatformState {
 
 impl MacPlatform {
     pub fn new() -> Self {
+        let dispatcher = Arc::new(MacDispatcher::new());
         Self(Mutex::new(MacPlatformState {
-            executor: Executor::new(Arc::new(MacDispatcher)),
+            background_executor: BackgroundExecutor::new(dispatcher.clone()),
+            foreground_executor: ForegroundExecutor::new(dispatcher),
             text_system: Arc::new(MacTextSystem::new()),
             display_linker: MacDisplayLinker::new(),
             pasteboard: unsafe { NSPasteboard::generalPasteboard(nil) },
@@ -345,8 +348,12 @@ impl MacPlatform {
 }
 
 impl Platform for MacPlatform {
-    fn executor(&self) -> Executor {
-        self.0.lock().executor.clone()
+    fn background_executor(&self) -> BackgroundExecutor {
+        self.0.lock().background_executor.clone()
+    }
+
+    fn foreground_executor(&self) -> crate::ForegroundExecutor {
+        self.0.lock().foreground_executor.clone()
     }
 
     fn text_system(&self) -> Arc<dyn PlatformTextSystem> {
@@ -457,6 +464,10 @@ impl Platform for MacPlatform {
         }
     }
 
+    // fn add_status_item(&self, _handle: AnyWindowHandle) -> Box<dyn platform::Window> {
+    //     Box::new(StatusItem::add(self.fonts()))
+    // }
+
     fn displays(&self) -> Vec<Rc<dyn PlatformDisplay>> {
         MacDisplay::all()
             .into_iter()
@@ -464,10 +475,6 @@ impl Platform for MacPlatform {
             .collect()
     }
 
-    // fn add_status_item(&self, _handle: AnyWindowHandle) -> Box<dyn platform::Window> {
-    //     Box::new(StatusItem::add(self.fonts()))
-    // }
-
     fn display(&self, id: DisplayId) -> Option<Rc<dyn PlatformDisplay>> {
         MacDisplay::find_by_id(id).map(|screen| Rc::new(screen) as Rc<_>)
     }
@@ -481,7 +488,7 @@ impl Platform for MacPlatform {
         handle: AnyWindowHandle,
         options: WindowOptions,
     ) -> Box<dyn PlatformWindow> {
-        Box::new(MacWindow::open(handle, options, self.executor()))
+        Box::new(MacWindow::open(handle, options, self.foreground_executor()))
     }
 
     fn set_display_link_output_callback(
@@ -589,8 +596,8 @@ impl Platform for MacPlatform {
             let path = path.to_path_buf();
             self.0
                 .lock()
-                .executor
-                .spawn_on_main_local(async move {
+                .background_executor
+                .spawn(async move {
                     let full_path = ns_string(path.to_str().unwrap_or(""));
                     let root_full_path = ns_string("");
                     let workspace: id = msg_send![class!(NSWorkspace), sharedWorkspace];
@@ -674,23 +681,6 @@ impl Platform for MacPlatform {
         }
     }
 
-    fn path_for_auxiliary_executable(&self, name: &str) -> Result<PathBuf> {
-        unsafe {
-            let bundle: id = NSBundle::mainBundle();
-            if bundle.is_null() {
-                Err(anyhow!("app is not running inside a bundle"))
-            } else {
-                let name = ns_string(name);
-                let url: id = msg_send![bundle, URLForAuxiliaryExecutable: name];
-                if url.is_null() {
-                    Err(anyhow!("resource not found"))
-                } else {
-                    ns_url_to_path(url)
-                }
-            }
-        }
-    }
-
     // fn on_menu_command(&self, callback: Box<dyn FnMut(&dyn Action)>) {
     //     self.0.lock().menu_command = Some(callback);
     // }
@@ -717,6 +707,23 @@ impl Platform for MacPlatform {
     //     }
     // }
 
+    fn path_for_auxiliary_executable(&self, name: &str) -> Result<PathBuf> {
+        unsafe {
+            let bundle: id = NSBundle::mainBundle();
+            if bundle.is_null() {
+                Err(anyhow!("app is not running inside a bundle"))
+            } else {
+                let name = ns_string(name);
+                let url: id = msg_send![bundle, URLForAuxiliaryExecutable: name];
+                if url.is_null() {
+                    Err(anyhow!("resource not found"))
+                } else {
+                    ns_url_to_path(url)
+                }
+            }
+        }
+    }
+
     fn set_cursor_style(&self, style: CursorStyle) {
         unsafe {
             let new_cursor: id = match style {

crates/gpui2/src/platform/mac/window.rs 🔗

@@ -1,10 +1,10 @@
 use super::{display_bounds_from_native, ns_string, MacDisplay, MetalRenderer, NSRange};
 use crate::{
-    display_bounds_to_native, point, px, size, AnyWindowHandle, Bounds, Executor, ExternalPaths,
-    FileDropEvent, GlobalPixels, InputEvent, KeyDownEvent, Keystroke, Modifiers,
-    ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Pixels,
-    PlatformAtlas, PlatformDisplay, PlatformInputHandler, PlatformWindow, Point, Scene, Size,
-    Timer, WindowAppearance, WindowBounds, WindowKind, WindowOptions, WindowPromptLevel,
+    display_bounds_to_native, point, px, size, AnyWindowHandle, Bounds, ExternalPaths,
+    FileDropEvent, ForegroundExecutor, GlobalPixels, InputEvent, KeyDownEvent, Keystroke,
+    Modifiers, ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent,
+    Pixels, PlatformAtlas, PlatformDisplay, PlatformInputHandler, PlatformWindow, Point, Scene,
+    Size, Timer, WindowAppearance, WindowBounds, WindowKind, WindowOptions, WindowPromptLevel,
 };
 use block::ConcreteBlock;
 use cocoa::{
@@ -315,7 +315,7 @@ struct InsertText {
 
 struct MacWindowState {
     handle: AnyWindowHandle,
-    executor: Executor,
+    executor: ForegroundExecutor,
     native_window: id,
     renderer: MetalRenderer,
     scene_to_render: Option<Scene>,
@@ -451,7 +451,11 @@ unsafe impl Send for MacWindowState {}
 pub struct MacWindow(Arc<Mutex<MacWindowState>>);
 
 impl MacWindow {
-    pub fn open(handle: AnyWindowHandle, options: WindowOptions, executor: Executor) -> Self {
+    pub fn open(
+        handle: AnyWindowHandle,
+        options: WindowOptions,
+        executor: ForegroundExecutor,
+    ) -> Self {
         unsafe {
             let pool = NSAutoreleasePool::new(nil);
 
@@ -674,13 +678,10 @@ impl MacWindow {
 
 impl Drop for MacWindow {
     fn drop(&mut self) {
-        let this = self.0.clone();
-        let executor = self.0.lock().executor.clone();
-        executor
-            .run_on_main(move || unsafe {
-                this.lock().native_window.close();
-            })
-            .detach();
+        let native_window = self.0.lock().native_window;
+        unsafe {
+            native_window.close();
+        }
     }
 }
 
@@ -807,7 +808,7 @@ impl PlatformWindow for MacWindow {
             let native_window = self.0.lock().native_window;
             let executor = self.0.lock().executor.clone();
             executor
-                .spawn_on_main_local(async move {
+                .spawn(async move {
                     let _: () = msg_send![
                         alert,
                         beginSheetModalForWindow: native_window
@@ -824,7 +825,7 @@ impl PlatformWindow for MacWindow {
         let window = self.0.lock().native_window;
         let executor = self.0.lock().executor.clone();
         executor
-            .spawn_on_main_local(async move {
+            .spawn(async move {
                 unsafe {
                     let _: () = msg_send![window, makeKeyAndOrderFront: nil];
                 }
@@ -872,25 +873,17 @@ impl PlatformWindow for MacWindow {
     fn zoom(&self) {
         let this = self.0.lock();
         let window = this.native_window;
-        this.executor
-            .spawn_on_main_local(async move {
-                unsafe {
-                    window.zoom_(nil);
-                }
-            })
-            .detach();
+        unsafe {
+            window.zoom_(nil);
+        }
     }
 
     fn toggle_full_screen(&self) {
         let this = self.0.lock();
         let window = this.native_window;
-        this.executor
-            .spawn_on_main_local(async move {
-                unsafe {
-                    window.toggleFullScreen_(nil);
-                }
-            })
-            .detach();
+        unsafe {
+            window.toggleFullScreen_(nil);
+        }
     }
 
     fn on_input(&self, callback: Box<dyn FnMut(InputEvent) -> bool>) {
@@ -1189,7 +1182,7 @@ extern "C" fn handle_view_event(this: &Object, _: Sel, native_event: id) {
                 lock.synthetic_drag_counter += 1;
                 let executor = lock.executor.clone();
                 executor
-                    .spawn_on_main_local(synthetic_drag(
+                    .spawn(synthetic_drag(
                         weak_window_state,
                         lock.synthetic_drag_counter,
                         event.clone(),
@@ -1317,7 +1310,7 @@ extern "C" fn window_did_change_key_status(this: &Object, selector: Sel, _: id)
     let executor = lock.executor.clone();
     drop(lock);
     executor
-        .spawn_on_main_local(async move {
+        .spawn(async move {
             let mut lock = window_state.as_ref().lock();
             if let Some(mut callback) = lock.activate_callback.take() {
                 drop(lock);

crates/gpui2/src/platform/test/dispatcher.rs 🔗

@@ -1,6 +1,8 @@
 use crate::PlatformDispatcher;
 use async_task::Runnable;
+use backtrace::Backtrace;
 use collections::{HashMap, VecDeque};
+use parking::{Parker, Unparker};
 use parking_lot::Mutex;
 use rand::prelude::*;
 use std::{
@@ -18,6 +20,8 @@ struct TestDispatcherId(usize);
 pub struct TestDispatcher {
     id: TestDispatcherId,
     state: Arc<Mutex<TestDispatcherState>>,
+    parker: Arc<Mutex<Parker>>,
+    unparker: Unparker,
 }
 
 struct TestDispatcherState {
@@ -28,10 +32,13 @@ struct TestDispatcherState {
     time: Duration,
     is_main_thread: bool,
     next_id: TestDispatcherId,
+    allow_parking: bool,
+    waiting_backtrace: Option<Backtrace>,
 }
 
 impl TestDispatcher {
     pub fn new(random: StdRng) -> Self {
+        let (parker, unparker) = parking::pair();
         let state = TestDispatcherState {
             random,
             foreground: HashMap::default(),
@@ -40,11 +47,15 @@ impl TestDispatcher {
             time: Duration::ZERO,
             is_main_thread: true,
             next_id: TestDispatcherId(1),
+            allow_parking: false,
+            waiting_backtrace: None,
         };
 
         TestDispatcher {
             id: TestDispatcherId(0),
             state: Arc::new(Mutex::new(state)),
+            parker: Arc::new(Mutex::new(parker)),
+            unparker,
         }
     }
 
@@ -66,7 +77,7 @@ impl TestDispatcher {
         self.state.lock().time = new_now;
     }
 
-    pub fn simulate_random_delay(&self) -> impl Future<Output = ()> {
+    pub fn simulate_random_delay(&self) -> impl 'static + Send + Future<Output = ()> {
         pub struct YieldNow {
             count: usize,
         }
@@ -91,7 +102,30 @@ impl TestDispatcher {
     }
 
     pub fn run_until_parked(&self) {
-        while self.poll() {}
+        while self.poll(false) {}
+    }
+
+    pub fn parking_allowed(&self) -> bool {
+        self.state.lock().allow_parking
+    }
+
+    pub fn allow_parking(&self) {
+        self.state.lock().allow_parking = true
+    }
+
+    pub fn start_waiting(&self) {
+        self.state.lock().waiting_backtrace = Some(Backtrace::new_unresolved());
+    }
+
+    pub fn finish_waiting(&self) {
+        self.state.lock().waiting_backtrace.take();
+    }
+
+    pub fn waiting_backtrace(&self) -> Option<Backtrace> {
+        self.state.lock().waiting_backtrace.take().map(|mut b| {
+            b.resolve();
+            b
+        })
     }
 }
 
@@ -101,6 +135,8 @@ impl Clone for TestDispatcher {
         Self {
             id: TestDispatcherId(id),
             state: self.state.clone(),
+            parker: self.parker.clone(),
+            unparker: self.unparker.clone(),
         }
     }
 }
@@ -112,6 +148,7 @@ impl PlatformDispatcher for TestDispatcher {
 
     fn dispatch(&self, runnable: Runnable) {
         self.state.lock().background.push(runnable);
+        self.unparker.unpark();
     }
 
     fn dispatch_on_main_thread(&self, runnable: Runnable) {
@@ -121,6 +158,7 @@ impl PlatformDispatcher for TestDispatcher {
             .entry(self.id)
             .or_default()
             .push_back(runnable);
+        self.unparker.unpark();
     }
 
     fn dispatch_after(&self, duration: std::time::Duration, runnable: Runnable) {
@@ -132,7 +170,7 @@ impl PlatformDispatcher for TestDispatcher {
         state.delayed.insert(ix, (next_time, runnable));
     }
 
-    fn poll(&self) -> bool {
+    fn poll(&self, background_only: bool) -> bool {
         let mut state = self.state.lock();
 
         while let Some((deadline, _)) = state.delayed.first() {
@@ -143,11 +181,15 @@ impl PlatformDispatcher for TestDispatcher {
             state.background.push(runnable);
         }
 
-        let foreground_len: usize = state
-            .foreground
-            .values()
-            .map(|runnables| runnables.len())
-            .sum();
+        let foreground_len: usize = if background_only {
+            0
+        } else {
+            state
+                .foreground
+                .values()
+                .map(|runnables| runnables.len())
+                .sum()
+        };
         let background_len = state.background.len();
 
         if foreground_len == 0 && background_len == 0 {
@@ -183,62 +225,15 @@ impl PlatformDispatcher for TestDispatcher {
         true
     }
 
-    fn as_test(&self) -> Option<&TestDispatcher> {
-        Some(self)
+    fn park(&self) {
+        self.parker.lock().park();
     }
-}
 
-#[cfg(test)]
-mod tests {
-    use super::*;
-    use crate::Executor;
-    use std::sync::Arc;
-
-    #[test]
-    fn test_dispatch() {
-        let dispatcher = TestDispatcher::new(StdRng::seed_from_u64(0));
-        let executor = Executor::new(Arc::new(dispatcher));
-
-        let result = executor.block(async { executor.run_on_main(|| 1).await });
-        assert_eq!(result, 1);
-
-        let result = executor.block({
-            let executor = executor.clone();
-            async move {
-                executor
-                    .spawn_on_main({
-                        let executor = executor.clone();
-                        assert!(executor.is_main_thread());
-                        || async move {
-                            assert!(executor.is_main_thread());
-                            let result = executor
-                                .spawn({
-                                    let executor = executor.clone();
-                                    async move {
-                                        assert!(!executor.is_main_thread());
-
-                                        let result = executor
-                                            .spawn_on_main({
-                                                let executor = executor.clone();
-                                                || async move {
-                                                    assert!(executor.is_main_thread());
-                                                    2
-                                                }
-                                            })
-                                            .await;
-
-                                        assert!(!executor.is_main_thread());
-                                        result
-                                    }
-                                })
-                                .await;
-                            assert!(executor.is_main_thread());
-                            result
-                        }
-                    })
-                    .await
-            }
-        });
-        assert_eq!(result, 2);
+    fn unparker(&self) -> Unparker {
+        self.unparker.clone()
+    }
+
+    fn as_test(&self) -> Option<&TestDispatcher> {
+        Some(self)
     }
 }

crates/gpui2/src/platform/test/platform.rs 🔗

@@ -1,21 +1,29 @@
-use crate::{DisplayId, Executor, Platform, PlatformTextSystem};
+use crate::{BackgroundExecutor, DisplayId, ForegroundExecutor, Platform, PlatformTextSystem};
 use anyhow::{anyhow, Result};
 use std::sync::Arc;
 
 pub struct TestPlatform {
-    executor: Executor,
+    background_executor: BackgroundExecutor,
+    foreground_executor: ForegroundExecutor,
 }
 
 impl TestPlatform {
-    pub fn new(executor: Executor) -> Self {
-        TestPlatform { executor }
+    pub fn new(executor: BackgroundExecutor, foreground_executor: ForegroundExecutor) -> Self {
+        TestPlatform {
+            background_executor: executor,
+            foreground_executor,
+        }
     }
 }
 
 // todo!("implement out what our tests needed in GPUI 1")
 impl Platform for TestPlatform {
-    fn executor(&self) -> Executor {
-        self.executor.clone()
+    fn background_executor(&self) -> BackgroundExecutor {
+        self.background_executor.clone()
+    }
+
+    fn foreground_executor(&self) -> ForegroundExecutor {
+        self.foreground_executor.clone()
     }
 
     fn text_system(&self) -> Arc<dyn PlatformTextSystem> {

crates/gpui2/src/subscription.rs 🔗

@@ -21,8 +21,8 @@ struct SubscriberSetState<EmitterKey, Callback> {
 
 impl<EmitterKey, Callback> SubscriberSet<EmitterKey, Callback>
 where
-    EmitterKey: 'static + Send + Ord + Clone + Debug,
-    Callback: 'static + Send,
+    EmitterKey: 'static + Ord + Clone + Debug,
+    Callback: 'static,
 {
     pub fn new() -> Self {
         Self(Arc::new(Mutex::new(SubscriberSetState {
@@ -96,7 +96,7 @@ where
 
 #[must_use]
 pub struct Subscription {
-    unsubscribe: Option<Box<dyn FnOnce() + Send + 'static>>,
+    unsubscribe: Option<Box<dyn FnOnce() + 'static>>,
 }
 
 impl Subscription {

crates/gpui2/src/view.rs 🔗

@@ -5,13 +5,13 @@ use crate::{
 };
 use anyhow::{Context, Result};
 use std::{
-    any::TypeId,
+    any::{Any, TypeId},
     hash::{Hash, Hasher},
     marker::PhantomData,
 };
 
 pub trait Render: 'static + Sized {
-    type Element: Element<Self> + 'static + Send;
+    type Element: Element<Self> + 'static;
 
     fn render(&mut self, cx: &mut ViewContext<Self>) -> Self::Element;
 }
@@ -199,7 +199,7 @@ impl<V: Render, ParentV: 'static> Component<ParentV> for EraseViewState<V, Paren
 }
 
 impl<V: Render, ParentV: 'static> Element<ParentV> for EraseViewState<V, ParentV> {
-    type ElementState = AnyBox;
+    type ElementState = Box<dyn Any>;
 
     fn id(&self) -> Option<ElementId> {
         Element::id(&self.view)
@@ -379,7 +379,7 @@ impl<V: Render> From<View<V>> for AnyView {
 }
 
 impl<ParentViewState: 'static> Element<ParentViewState> for AnyView {
-    type ElementState = AnyBox;
+    type ElementState = Box<dyn Any>;
 
     fn id(&self) -> Option<ElementId> {
         Some(self.model.entity_id.into())

crates/gpui2/src/window.rs 🔗

@@ -3,12 +3,11 @@ use crate::{
     Bounds, BoxShadow, Context, Corners, DevicePixels, DispatchContext, DisplayId, Edges, Effect,
     Entity, EntityId, EventEmitter, FileDropEvent, FocusEvent, FontId, GlobalElementId, GlyphId,
     Hsla, ImageData, InputEvent, IsZero, KeyListener, KeyMatch, KeyMatcher, Keystroke, LayoutId,
-    MainThread, MainThreadOnly, Model, ModelContext, Modifiers, MonochromeSprite, MouseButton,
-    MouseDownEvent, MouseMoveEvent, MouseUpEvent, Path, Pixels, PlatformAtlas, PlatformWindow,
-    Point, PolychromeSprite, Quad, Render, RenderGlyphParams, RenderImageParams, RenderSvgParams,
-    ScaledPixels, SceneBuilder, Shadow, SharedString, Size, Style, Subscription, TaffyLayoutEngine,
-    Task, Underline, UnderlineStyle, UpdateView, View, VisualContext, WeakView, WindowOptions,
-    SUBPIXEL_VARIANTS,
+    Model, ModelContext, Modifiers, MonochromeSprite, MouseButton, MouseDownEvent, MouseMoveEvent,
+    MouseUpEvent, Path, Pixels, PlatformAtlas, PlatformWindow, Point, PolychromeSprite, Quad,
+    Render, RenderGlyphParams, RenderImageParams, RenderSvgParams, ScaledPixels, SceneBuilder,
+    Shadow, SharedString, Size, Style, Subscription, TaffyLayoutEngine, Task, Underline,
+    UnderlineStyle, UpdateView, View, VisualContext, WeakView, WindowOptions, SUBPIXEL_VARIANTS,
 };
 use anyhow::{anyhow, Result};
 use collections::HashMap;
@@ -53,7 +52,7 @@ pub enum DispatchPhase {
     Capture,
 }
 
-type AnyListener = Box<dyn Fn(&dyn Any, DispatchPhase, &mut WindowContext) + Send + 'static>;
+type AnyListener = Box<dyn Fn(&dyn Any, DispatchPhase, &mut WindowContext) + 'static>;
 type AnyKeyListener = Box<
     dyn Fn(
             &dyn Any,
@@ -61,10 +60,9 @@ type AnyKeyListener = Box<
             DispatchPhase,
             &mut WindowContext,
         ) -> Option<Box<dyn Action>>
-        + Send
         + 'static,
 >;
-type AnyFocusListener = Box<dyn Fn(&FocusEvent, &mut WindowContext) + Send + 'static>;
+type AnyFocusListener = Box<dyn Fn(&FocusEvent, &mut WindowContext) + 'static>;
 
 slotmap::new_key_type! { pub struct FocusId; }
 
@@ -160,7 +158,7 @@ impl Drop for FocusHandle {
 // Holds the state for a specific window.
 pub struct Window {
     pub(crate) handle: AnyWindowHandle,
-    platform_window: MainThreadOnly<Box<dyn PlatformWindow>>,
+    platform_window: Box<dyn PlatformWindow>,
     display_id: DisplayId,
     sprite_atlas: Arc<dyn PlatformAtlas>,
     rem_size: Pixels,
@@ -195,7 +193,7 @@ impl Window {
     pub(crate) fn new(
         handle: AnyWindowHandle,
         options: WindowOptions,
-        cx: &mut MainThread<AppContext>,
+        cx: &mut AppContext,
     ) -> Self {
         let platform_window = cx.platform().open_window(handle, options);
         let display_id = platform_window.display().id();
@@ -206,20 +204,14 @@ impl Window {
         platform_window.on_resize(Box::new({
             let mut cx = cx.to_async();
             move |content_size, scale_factor| {
-                handle
-                    .update(&mut cx, |_, cx| {
-                        cx.window.scale_factor = scale_factor;
-                        cx.window.scene_builder = SceneBuilder::new();
-                        cx.window.content_size = content_size;
-                        cx.window.display_id = cx
-                            .window
-                            .platform_window
-                            .borrow_on_main_thread()
-                            .display()
-                            .id();
-                        cx.window.dirty = true;
-                    })
-                    .log_err();
+                cx.update_window(handle, |cx| {
+                    cx.window.scale_factor = scale_factor;
+                    cx.window.scene_builder = SceneBuilder::new();
+                    cx.window.content_size = content_size;
+                    cx.window.display_id = cx.window.platform_window.display().id();
+                    cx.window.dirty = true;
+                })
+                .log_err();
             }
         }));
 
@@ -233,8 +225,6 @@ impl Window {
             })
         });
 
-        let platform_window = MainThreadOnly::new(Arc::new(platform_window), cx.executor.clone());
-
         Window {
             handle,
             platform_window,
@@ -408,27 +398,6 @@ impl<'a> WindowContext<'a> {
         )
     }
 
-    /// Schedule the given closure to be run on the main thread. It will be invoked with
-    /// a `MainThread<WindowContext>`, which provides access to platform-specific functionality
-    /// of the window.
-    pub fn run_on_main<R>(
-        &mut self,
-        f: impl FnOnce(&mut MainThread<WindowContext<'_>>) -> R + Send + 'static,
-    ) -> Task<Result<R>>
-    where
-        R: Send + 'static,
-    {
-        if self.executor.is_main_thread() {
-            Task::ready(Ok(f(unsafe {
-                mem::transmute::<&mut Self, &mut MainThread<Self>>(self)
-            })))
-        } else {
-            let handle = self.window.handle;
-            self.app
-                .run_on_main(move |cx| handle.update(cx, |_, cx| f(cx)))
-        }
-    }
-
     /// Create an `AsyncWindowContext`, which has a static lifetime and can be held across
     /// await points in async code.
     pub fn to_async(&self) -> AsyncWindowContext {
@@ -439,44 +408,39 @@ impl<'a> WindowContext<'a> {
     pub fn on_next_frame(&mut self, f: impl FnOnce(&mut WindowContext) + Send + 'static) {
         let f = Box::new(f);
         let display_id = self.window.display_id;
-        self.run_on_main(move |cx| {
-            if let Some(callbacks) = cx.next_frame_callbacks.get_mut(&display_id) {
-                callbacks.push(f);
-                // If there was already a callback, it means that we already scheduled a frame.
-                if callbacks.len() > 1 {
-                    return;
-                }
-            } else {
-                let mut async_cx = cx.to_async();
-                cx.next_frame_callbacks.insert(display_id, vec![f]);
-                cx.platform().set_display_link_output_callback(
-                    display_id,
-                    Box::new(move |_current_time, _output_time| {
-                        let _ = async_cx.update(|_, cx| {
-                            let callbacks = cx
-                                .next_frame_callbacks
-                                .get_mut(&display_id)
-                                .unwrap()
-                                .drain(..)
-                                .collect::<Vec<_>>();
-                            for callback in callbacks {
-                                callback(cx);
-                            }
 
-                            cx.run_on_main(move |cx| {
-                                if cx.next_frame_callbacks.get(&display_id).unwrap().is_empty() {
-                                    cx.platform().stop_display_link(display_id);
-                                }
-                            })
-                            .detach();
-                        });
-                    }),
-                );
+        if let Some(callbacks) = self.next_frame_callbacks.get_mut(&display_id) {
+            callbacks.push(f);
+            // If there was already a callback, it means that we already scheduled a frame.
+            if callbacks.len() > 1 {
+                return;
             }
+        } else {
+            let async_cx = self.to_async();
+            self.next_frame_callbacks.insert(display_id, vec![f]);
+            self.platform().set_display_link_output_callback(
+                display_id,
+                Box::new(move |_current_time, _output_time| {
+                    let _ = async_cx.update(|_, cx| {
+                        let callbacks = cx
+                            .next_frame_callbacks
+                            .get_mut(&display_id)
+                            .unwrap()
+                            .drain(..)
+                            .collect::<Vec<_>>();
+                        for callback in callbacks {
+                            callback(cx);
+                        }
 
-            cx.platform().start_display_link(display_id);
-        })
-        .detach();
+                        if cx.next_frame_callbacks.get(&display_id).unwrap().is_empty() {
+                            cx.platform().stop_display_link(display_id);
+                        }
+                    });
+                }),
+            );
+        }
+
+        self.platform().start_display_link(display_id);
     }
 
     /// Spawn the future returned by the given closure on the application thread pool.
@@ -487,8 +451,8 @@ impl<'a> WindowContext<'a> {
         f: impl FnOnce(AnyWindowHandle, AsyncWindowContext) -> Fut,
     ) -> Task<R>
     where
-        R: Send + 'static,
-        Fut: Future<Output = R> + Send + 'static,
+        R: 'static,
+        Fut: Future<Output = R> + 'static,
     {
         let window = self.window.handle;
         self.app.spawn(move |app| {
@@ -605,7 +569,7 @@ impl<'a> WindowContext<'a> {
     /// a specific need to register a global listener.
     pub fn on_mouse_event<Event: 'static>(
         &mut self,
-        handler: impl Fn(&Event, DispatchPhase, &mut WindowContext) + Send + 'static,
+        handler: impl Fn(&Event, DispatchPhase, &mut WindowContext) + 'static,
     ) {
         let order = self.window.z_index_stack.clone();
         self.window
@@ -942,14 +906,8 @@ impl<'a> WindowContext<'a> {
         self.window.root_view = Some(root_view);
         let scene = self.window.scene_builder.build();
 
-        self.run_on_main(|cx| {
-            cx.window
-                .platform_window
-                .borrow_on_main_thread()
-                .draw(scene);
-            cx.window.dirty = false;
-        })
-        .detach();
+        self.window.platform_window.draw(scene);
+        self.window.dirty = false;
     }
 
     fn start_frame(&mut self) {
@@ -1283,7 +1241,7 @@ impl Context for WindowContext<'_> {
         build_model: impl FnOnce(&mut Self::ModelContext<'_, T>) -> T,
     ) -> Model<T>
     where
-        T: 'static + Send,
+        T: 'static,
     {
         let slot = self.app.entities.reserve();
         let model = build_model(&mut ModelContext::new(&mut *self.app, slot.downgrade()));
@@ -1325,7 +1283,7 @@ impl VisualContext for WindowContext<'_> {
         build_view_state: impl FnOnce(&mut Self::ViewContext<'_, V>) -> V,
     ) -> Self::Result<View<V>>
     where
-        V: 'static + Send,
+        V: 'static,
     {
         let slot = self.app.entities.reserve();
         let view = View {
@@ -1501,7 +1459,7 @@ pub trait BorrowWindow: BorrowMut<Window> + BorrowMut<AppContext> {
         f: impl FnOnce(Option<S>, &mut Self) -> (R, S),
     ) -> R
     where
-        S: 'static + Send,
+        S: 'static,
     {
         self.with_element_id(id, |global_id, cx| {
             if let Some(any) = cx
@@ -1539,7 +1497,7 @@ pub trait BorrowWindow: BorrowMut<Window> + BorrowMut<AppContext> {
         f: impl FnOnce(Option<S>, &mut Self) -> (R, S),
     ) -> R
     where
-        S: 'static + Send,
+        S: 'static,
     {
         if let Some(element_id) = element_id {
             self.with_element_state(element_id, f)
@@ -1858,30 +1816,13 @@ impl<'a, V: 'static> ViewContext<'a, V> {
         result
     }
 
-    pub fn run_on_main<R>(
-        &mut self,
-        view: &mut V,
-        f: impl FnOnce(&mut V, &mut MainThread<ViewContext<'_, V>>) -> R + Send + 'static,
-    ) -> Task<Result<R>>
-    where
-        R: Send + 'static,
-    {
-        if self.executor.is_main_thread() {
-            let cx = unsafe { mem::transmute::<&mut Self, &mut MainThread<Self>>(self) };
-            Task::ready(Ok(f(view, cx)))
-        } else {
-            let view = self.view();
-            self.window_cx.run_on_main(move |cx| view.update(cx, f))
-        }
-    }
-
     pub fn spawn<Fut, R>(
         &mut self,
         f: impl FnOnce(WeakView<V>, AsyncWindowContext) -> Fut,
     ) -> Task<R>
     where
-        R: Send + 'static,
-        Fut: Future<Output = R> + Send + 'static,
+        R: 'static,
+        Fut: Future<Output = R> + 'static,
     {
         let view = self.view().downgrade();
         self.window_cx.spawn(move |_, cx| f(view, cx))
@@ -1915,7 +1856,7 @@ impl<'a, V: 'static> ViewContext<'a, V> {
 
     pub fn on_mouse_event<Event: 'static>(
         &mut self,
-        handler: impl Fn(&mut V, &Event, DispatchPhase, &mut ViewContext<V>) + Send + 'static,
+        handler: impl Fn(&mut V, &Event, DispatchPhase, &mut ViewContext<V>) + 'static,
     ) {
         let handle = self.view();
         self.window_cx.on_mouse_event(move |event, phase, cx| {
@@ -1940,28 +1881,15 @@ where
     }
 }
 
-impl<V: 'static> MainThread<ViewContext<'_, V>> {
-    fn platform_window(&self) -> &dyn PlatformWindow {
-        self.window.platform_window.borrow_on_main_thread().as_ref()
-    }
-
-    pub fn activate_window(&self) {
-        self.platform_window().activate();
-    }
-}
-
 impl<V> Context for ViewContext<'_, V> {
     type WindowContext<'a> = WindowContext<'a>;
     type ModelContext<'b, U> = ModelContext<'b, U>;
     type Result<U> = U;
 
-    fn build_model<T>(
+    fn build_model<T: 'static>(
         &mut self,
         build_model: impl FnOnce(&mut Self::ModelContext<'_, T>) -> T,
-    ) -> Model<T>
-    where
-        T: 'static + Send,
-    {
+    ) -> Model<T> {
         self.window_cx.build_model(build_model)
     }
 
@@ -1984,7 +1912,7 @@ impl<V> Context for ViewContext<'_, V> {
 impl<V: 'static> VisualContext for ViewContext<'_, V> {
     type ViewContext<'a, W: 'static> = ViewContext<'a, W>;
 
-    fn build_view<W: 'static + Send>(
+    fn build_view<W: 'static>(
         &mut self,
         build_view: impl FnOnce(&mut Self::ViewContext<'_, W>) -> W,
     ) -> Self::Result<View<W>> {

crates/gpui2_macros/src/test.rs 🔗

@@ -89,9 +89,9 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
                             inner_fn_args.extend(quote!(rand::SeedableRng::seed_from_u64(_seed),));
                             continue;
                         }
-                        Some("Executor") => {
-                            inner_fn_args.extend(quote!(gpui2::Executor::new(
-                                std::sync::Arc::new(dispatcher.clone())
+                        Some("BackgroundExecutor") => {
+                            inner_fn_args.extend(quote!(gpui2::BackgroundExecutor::new(
+                                std::sync::Arc::new(dispatcher.clone()),
                             ),));
                             continue;
                         }
@@ -134,9 +134,9 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
                     #num_iterations as u64,
                     #max_retries,
                     &mut |dispatcher, _seed| {
-                        let executor = gpui2::Executor::new(std::sync::Arc::new(dispatcher.clone()));
+                        let executor = gpui2::BackgroundExecutor::new(std::sync::Arc::new(dispatcher.clone()));
                         #cx_vars
-                        executor.block(#inner_fn_name(#inner_fn_args));
+                        executor.block_test(#inner_fn_name(#inner_fn_args));
                         #cx_teardowns
                     },
                     #on_failure_fn_name,
@@ -170,7 +170,7 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
                                     let mut #cx_varname = gpui2::TestAppContext::new(
                                        dispatcher.clone()
                                     );
-                                    let mut #cx_varname_lock = #cx_varname.app.lock();
+                                    let mut #cx_varname_lock = #cx_varname.app.borrow_mut();
                                 ));
                                 inner_fn_args.extend(quote!(&mut #cx_varname_lock,));
                                 cx_teardowns.extend(quote!(

crates/install_cli2/src/install_cli2.rs 🔗

@@ -7,9 +7,7 @@ use util::ResultExt;
 // actions!(cli, [Install]);
 
 pub async fn install_cli(cx: &AsyncAppContext) -> Result<()> {
-    let cli_path = cx
-        .run_on_main(|cx| cx.path_for_auxiliary_executable("cli"))?
-        .await?;
+    let cli_path = cx.update(|cx| cx.path_for_auxiliary_executable("cli"))??;
     let link_path = Path::new("/usr/local/bin/zed");
     let bin_dir_path = link_path.parent().unwrap();
 

crates/journal2/src/journal2.rs 🔗

@@ -77,7 +77,7 @@ pub fn new_journal_entry(_: Arc<AppState>, cx: &mut AppContext) {
     let now = now.time();
     let _entry_heading = heading_entry(now, &settings.hour_format);
 
-    let _create_entry = cx.executor().spawn(async move {
+    let _create_entry = cx.background_executor().spawn(async move {
         std::fs::create_dir_all(month_dir)?;
         OpenOptions::new()
             .create(true)

crates/language2/src/buffer.rs 🔗

@@ -434,7 +434,7 @@ impl Buffer {
         ));
 
         let text_operations = self.text.operations().clone();
-        cx.spawn(|_| async move {
+        cx.background_executor().spawn(async move {
             let since = since.unwrap_or_default();
             operations.extend(
                 text_operations
@@ -652,7 +652,7 @@ impl Buffer {
 
                     if !self.is_dirty() {
                         let reload = self.reload(cx).log_err().map(drop);
-                        task = cx.executor().spawn(reload);
+                        task = cx.background_executor().spawn(reload);
                     }
                 }
             }
@@ -684,7 +684,7 @@ impl Buffer {
         let snapshot = self.snapshot();
 
         let mut diff = self.git_diff.clone();
-        let diff = cx.executor().spawn(async move {
+        let diff = cx.background_executor().spawn(async move {
             diff.update(&diff_base, &snapshot).await;
             diff
         });
@@ -793,7 +793,7 @@ impl Buffer {
         let mut syntax_snapshot = syntax_map.snapshot();
         drop(syntax_map);
 
-        let parse_task = cx.executor().spawn({
+        let parse_task = cx.background_executor().spawn({
             let language = language.clone();
             let language_registry = language_registry.clone();
             async move {
@@ -803,7 +803,7 @@ impl Buffer {
         });
 
         match cx
-            .executor()
+            .background_executor()
             .block_with_timeout(self.sync_parse_timeout, parse_task)
         {
             Ok(new_syntax_snapshot) => {
@@ -866,9 +866,9 @@ impl Buffer {
 
     fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
         if let Some(indent_sizes) = self.compute_autoindents() {
-            let indent_sizes = cx.executor().spawn(indent_sizes);
+            let indent_sizes = cx.background_executor().spawn(indent_sizes);
             match cx
-                .executor()
+                .background_executor()
                 .block_with_timeout(Duration::from_micros(500), indent_sizes)
             {
                 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
@@ -1117,7 +1117,7 @@ impl Buffer {
     pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
         let old_text = self.as_rope().clone();
         let base_version = self.version();
-        cx.executor().spawn(async move {
+        cx.background_executor().spawn(async move {
             let old_text = old_text.to_string();
             let line_ending = LineEnding::detect(&new_text);
             LineEnding::normalize(&mut new_text);
@@ -1155,7 +1155,7 @@ impl Buffer {
         let old_text = self.as_rope().clone();
         let line_ending = self.line_ending();
         let base_version = self.version();
-        cx.executor().spawn(async move {
+        cx.background_executor().spawn(async move {
             let ranges = trailing_whitespace_ranges(&old_text);
             let empty = Arc::<str>::from("");
             Diff {

crates/language2/src/buffer_tests.rs 🔗

@@ -559,7 +559,7 @@ async fn test_outline(cx: &mut gpui2::TestAppContext) {
         cx: &'a gpui2::TestAppContext,
     ) -> Vec<(&'a str, Vec<usize>)> {
         let matches = cx
-            .update(|cx| outline.search(query, cx.executor().clone()))
+            .update(|cx| outline.search(query, cx.background_executor().clone()))
             .await;
         matches
             .into_iter()
@@ -1879,7 +1879,7 @@ fn test_serialization(cx: &mut gpui2::AppContext) {
 
     let state = buffer1.read(cx).to_proto();
     let ops = cx
-        .executor()
+        .background_executor()
         .block(buffer1.read(cx).serialize_ops(None, cx));
     let buffer2 = cx.build_model(|cx| {
         let mut buffer = Buffer::from_proto(1, state, None).unwrap();
@@ -1921,7 +1921,7 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
         let buffer = cx.build_model(|cx| {
             let state = base_buffer.read(cx).to_proto();
             let ops = cx
-                .executor()
+                .background_executor()
                 .block(base_buffer.read(cx).serialize_ops(None, cx));
             let mut buffer = Buffer::from_proto(i as ReplicaId, state, None).unwrap();
             buffer
@@ -1943,6 +1943,7 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
             .detach();
             buffer
         });
+
         buffers.push(buffer);
         replica_ids.push(i as ReplicaId);
         network.lock().add_peer(i as ReplicaId);
@@ -2025,7 +2026,9 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
             }
             50..=59 if replica_ids.len() < max_peers => {
                 let old_buffer_state = buffer.read(cx).to_proto();
-                let old_buffer_ops = cx.executor().block(buffer.read(cx).serialize_ops(None, cx));
+                let old_buffer_ops = cx
+                    .background_executor()
+                    .block(buffer.read(cx).serialize_ops(None, cx));
                 let new_replica_id = (0..=replica_ids.len() as ReplicaId)
                     .filter(|replica_id| *replica_id != buffer.read(cx).replica_id())
                     .choose(&mut rng)

crates/language2/src/language2.rs 🔗

@@ -17,7 +17,7 @@ use futures::{
     future::{BoxFuture, Shared},
     FutureExt, TryFutureExt as _,
 };
-use gpui2::{AppContext, AsyncAppContext, Executor, Task};
+use gpui2::{AppContext, AsyncAppContext, BackgroundExecutor, Task};
 pub use highlight_map::HighlightMap;
 use lazy_static::lazy_static;
 use lsp2::{CodeActionKind, LanguageServerBinary};
@@ -631,7 +631,7 @@ pub struct LanguageRegistry {
     lsp_binary_paths: Mutex<
         HashMap<LanguageServerName, Shared<Task<Result<LanguageServerBinary, Arc<anyhow::Error>>>>>,
     >,
-    executor: Option<Executor>,
+    executor: Option<BackgroundExecutor>,
     lsp_binary_status_tx: LspBinaryStatusSender,
 }
 
@@ -680,7 +680,7 @@ impl LanguageRegistry {
         Self::new(Task::ready(()))
     }
 
-    pub fn set_executor(&mut self, executor: Executor) {
+    pub fn set_executor(&mut self, executor: BackgroundExecutor) {
         self.executor = Some(executor);
     }
 
@@ -916,7 +916,7 @@ impl LanguageRegistry {
                 }
 
                 let servers_tx = servers_tx.clone();
-                cx.executor()
+                cx.background_executor()
                     .spawn(async move {
                         if fake_server
                             .try_receive_notification::<lsp2::notification::Initialized>()

crates/language2/src/outline.rs 🔗

@@ -1,5 +1,5 @@
 use fuzzy2::{StringMatch, StringMatchCandidate};
-use gpui2::{Executor, HighlightStyle};
+use gpui2::{BackgroundExecutor, HighlightStyle};
 use std::ops::Range;
 
 #[derive(Debug)]
@@ -57,7 +57,7 @@ impl<T> Outline<T> {
         }
     }
 
-    pub async fn search(&self, query: &str, executor: Executor) -> Vec<StringMatch> {
+    pub async fn search(&self, query: &str, executor: BackgroundExecutor) -> Vec<StringMatch> {
         let query = query.trim_start();
         let is_path_query = query.contains(' ');
         let smart_case = query.chars().any(|c| c.is_uppercase());

crates/live_kit_client2/examples/test_app.rs 🔗

@@ -42,7 +42,7 @@ fn main() {
         let live_kit_key = std::env::var("LIVE_KIT_KEY").unwrap_or("devkey".into());
         let live_kit_secret = std::env::var("LIVE_KIT_SECRET").unwrap_or("secret".into());
 
-        cx.spawn_on_main(|cx| async move {
+        cx.spawn(|cx| async move {
             let user_a_token = token::create(
                 &live_kit_key,
                 &live_kit_secret,
@@ -104,7 +104,7 @@ fn main() {
             }
 
             println!("Pausing for 5 seconds to test audio, make some noise!");
-            let timer = cx.executor().timer(Duration::from_secs(5));
+            let timer = cx.background_executor().timer(Duration::from_secs(5));
             timer.await;
             let remote_audio_track = room_b
                 .remote_audio_tracks("test-participant-1")

crates/live_kit_client2/src/test.rs 🔗

@@ -2,7 +2,7 @@ use anyhow::{anyhow, Context, Result};
 use async_trait::async_trait;
 use collections::{BTreeMap, HashMap};
 use futures::Stream;
-use gpui2::Executor;
+use gpui2::BackgroundExecutor;
 use live_kit_server::token;
 use media::core_video::CVImageBuffer;
 use parking_lot::Mutex;
@@ -16,7 +16,7 @@ pub struct TestServer {
     pub api_key: String,
     pub secret_key: String,
     rooms: Mutex<HashMap<String, TestServerRoom>>,
-    executor: Arc<Executor>,
+    executor: Arc<BackgroundExecutor>,
 }
 
 impl TestServer {
@@ -24,7 +24,7 @@ impl TestServer {
         url: String,
         api_key: String,
         secret_key: String,
-        executor: Arc<Executor>,
+        executor: Arc<BackgroundExecutor>,
     ) -> Result<Arc<TestServer>> {
         let mut servers = SERVERS.lock();
         if servers.contains_key(&url) {

crates/lsp2/src/lsp2.rs 🔗

@@ -5,7 +5,7 @@ pub use lsp_types::*;
 use anyhow::{anyhow, Context, Result};
 use collections::HashMap;
 use futures::{channel::oneshot, io::BufWriter, AsyncRead, AsyncWrite, FutureExt};
-use gpui2::{AsyncAppContext, Executor, Task};
+use gpui2::{AsyncAppContext, BackgroundExecutor, Task};
 use parking_lot::Mutex;
 use postage::{barrier, prelude::Stream};
 use serde::{de::DeserializeOwned, Deserialize, Serialize};
@@ -62,7 +62,7 @@ pub struct LanguageServer {
     notification_handlers: Arc<Mutex<HashMap<&'static str, NotificationHandler>>>,
     response_handlers: Arc<Mutex<Option<HashMap<usize, ResponseHandler>>>>,
     io_handlers: Arc<Mutex<HashMap<usize, IoHandler>>>,
-    executor: Executor,
+    executor: BackgroundExecutor,
     #[allow(clippy::type_complexity)]
     io_tasks: Mutex<Option<(Task<Option<()>>, Task<Option<()>>)>>,
     output_done_rx: Mutex<Option<barrier::Receiver>>,
@@ -248,7 +248,7 @@ impl LanguageServer {
             let (stdout, stderr) = futures::join!(stdout_input_task, stderr_input_task);
             stdout.or(stderr)
         });
-        let output_task = cx.executor().spawn({
+        let output_task = cx.background_executor().spawn({
             Self::handle_output(
                 stdin,
                 outbound_rx,
@@ -269,7 +269,7 @@ impl LanguageServer {
             code_action_kinds,
             next_id: Default::default(),
             outbound_tx,
-            executor: cx.executor().clone(),
+            executor: cx.background_executor().clone(),
             io_tasks: Mutex::new(Some((input_task, output_task))),
             output_done_rx: Mutex::new(Some(output_done_rx)),
             root_path: root_path.to_path_buf(),
@@ -595,8 +595,8 @@ impl LanguageServer {
     where
         T: request::Request,
         T::Params: 'static + Send,
-        F: 'static + Send + FnMut(T::Params, AsyncAppContext) -> Fut,
-        Fut: 'static + Future<Output = Result<T::Result>> + Send,
+        F: 'static + FnMut(T::Params, AsyncAppContext) -> Fut + Send,
+        Fut: 'static + Future<Output = Result<T::Result>>,
     {
         self.on_custom_request(T::METHOD, f)
     }
@@ -629,7 +629,7 @@ impl LanguageServer {
     #[must_use]
     pub fn on_custom_notification<Params, F>(&self, method: &'static str, mut f: F) -> Subscription
     where
-        F: 'static + Send + FnMut(Params, AsyncAppContext),
+        F: 'static + FnMut(Params, AsyncAppContext) + Send,
         Params: DeserializeOwned,
     {
         let prev_handler = self.notification_handlers.lock().insert(
@@ -657,8 +657,8 @@ impl LanguageServer {
         mut f: F,
     ) -> Subscription
     where
-        F: 'static + Send + FnMut(Params, AsyncAppContext) -> Fut,
-        Fut: 'static + Future<Output = Result<Res>> + Send,
+        F: 'static + FnMut(Params, AsyncAppContext) -> Fut + Send,
+        Fut: 'static + Future<Output = Result<Res>>,
         Params: DeserializeOwned + Send + 'static,
         Res: Serialize,
     {
@@ -670,10 +670,10 @@ impl LanguageServer {
                     match serde_json::from_str(params) {
                         Ok(params) => {
                             let response = f(params, cx.clone());
-                            cx.executor()
-                                .spawn_on_main({
+                            cx.foreground_executor()
+                                .spawn({
                                     let outbound_tx = outbound_tx.clone();
-                                    move || async move {
+                                    async move {
                                         let response = match response.await {
                                             Ok(result) => Response {
                                                 jsonrpc: JSON_RPC_VERSION,
@@ -769,7 +769,7 @@ impl LanguageServer {
         next_id: &AtomicUsize,
         response_handlers: &Mutex<Option<HashMap<usize, ResponseHandler>>>,
         outbound_tx: &channel::Sender<String>,
-        executor: &Executor,
+        executor: &BackgroundExecutor,
         params: T::Params,
     ) -> impl 'static + Future<Output = anyhow::Result<T::Result>>
     where
@@ -1047,8 +1047,9 @@ impl FakeLanguageServer {
             .on_request::<T, _, _>(move |params, cx| {
                 let result = handler(params, cx.clone());
                 let responded_tx = responded_tx.clone();
+                let executor = cx.background_executor().clone();
                 async move {
-                    cx.executor().simulate_random_delay().await;
+                    executor.simulate_random_delay().await;
                     let result = result.await;
                     responded_tx.unbounded_send(()).ok();
                     result

crates/multi_buffer2/src/multi_buffer2.rs 🔗

@@ -878,7 +878,7 @@ impl MultiBuffer {
         cx.spawn(move |this, mut cx| async move {
             let mut excerpt_ranges = Vec::new();
             let mut range_counts = Vec::new();
-            cx.executor()
+            cx.background_executor()
                 .scoped(|scope| {
                     scope.spawn(async {
                         let (ranges, counts) =
@@ -4177,7 +4177,7 @@ mod tests {
         let guest_buffer = cx.build_model(|cx| {
             let state = host_buffer.read(cx).to_proto();
             let ops = cx
-                .executor()
+                .background_executor()
                 .block(host_buffer.read(cx).serialize_ops(None, cx));
             let mut buffer = Buffer::from_proto(1, state, None).unwrap();
             buffer

crates/prettier2/src/prettier2.rs 🔗

@@ -143,7 +143,7 @@ impl Prettier {
     ) -> anyhow::Result<Self> {
         use lsp2::LanguageServerBinary;
 
-        let executor = cx.executor().clone();
+        let executor = cx.background_executor().clone();
         anyhow::ensure!(
             prettier_dir.is_dir(),
             "Prettier dir {prettier_dir:?} is not a directory"

crates/project/src/project_tests.rs 🔗

@@ -2604,64 +2604,64 @@ async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
     assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
 }
 
-#[gpui::test]
-async fn test_save_as(cx: &mut gpui::TestAppContext) {
-    init_test(cx);
-
-    let fs = FakeFs::new(cx.background());
-    fs.insert_tree("/dir", json!({})).await;
-
-    let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
-
-    let languages = project.read_with(cx, |project, _| project.languages().clone());
-    languages.register(
-        "/some/path",
-        LanguageConfig {
-            name: "Rust".into(),
-            path_suffixes: vec!["rs".into()],
-            ..Default::default()
-        },
-        tree_sitter_rust::language(),
-        vec![],
-        |_| Default::default(),
-    );
-
-    let buffer = project.update(cx, |project, cx| {
-        project.create_buffer("", None, cx).unwrap()
-    });
-    buffer.update(cx, |buffer, cx| {
-        buffer.edit([(0..0, "abc")], None, cx);
-        assert!(buffer.is_dirty());
-        assert!(!buffer.has_conflict());
-        assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
-    });
-    project
-        .update(cx, |project, cx| {
-            project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
-        })
-        .await
-        .unwrap();
-    assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
-
-    cx.foreground().run_until_parked();
-    buffer.read_with(cx, |buffer, cx| {
-        assert_eq!(
-            buffer.file().unwrap().full_path(cx),
-            Path::new("dir/file1.rs")
-        );
-        assert!(!buffer.is_dirty());
-        assert!(!buffer.has_conflict());
-        assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
-    });
-
-    let opened_buffer = project
-        .update(cx, |project, cx| {
-            project.open_local_buffer("/dir/file1.rs", cx)
-        })
-        .await
-        .unwrap();
-    assert_eq!(opened_buffer, buffer);
-}
+// #[gpui::test]
+// async fn test_save_as(cx: &mut gpui::TestAppContext) {
+//     init_test(cx);
+
+//     let fs = FakeFs::new(cx.background());
+//     fs.insert_tree("/dir", json!({})).await;
+
+//     let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+
+//     let languages = project.read_with(cx, |project, _| project.languages().clone());
+//     languages.register(
+//         "/some/path",
+//         LanguageConfig {
+//             name: "Rust".into(),
+//             path_suffixes: vec!["rs".into()],
+//             ..Default::default()
+//         },
+//         tree_sitter_rust::language(),
+//         vec![],
+//         |_| Default::default(),
+//     );
+
+//     let buffer = project.update(cx, |project, cx| {
+//         project.create_buffer("", None, cx).unwrap()
+//     });
+//     buffer.update(cx, |buffer, cx| {
+//         buffer.edit([(0..0, "abc")], None, cx);
+//         assert!(buffer.is_dirty());
+//         assert!(!buffer.has_conflict());
+//         assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
+//     });
+//     project
+//         .update(cx, |project, cx| {
+//             project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
+//         })
+//         .await
+//         .unwrap();
+//     assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
+
+//     cx.foreground().run_until_parked();
+//     buffer.read_with(cx, |buffer, cx| {
+//         assert_eq!(
+//             buffer.file().unwrap().full_path(cx),
+//             Path::new("dir/file1.rs")
+//         );
+//         assert!(!buffer.is_dirty());
+//         assert!(!buffer.has_conflict());
+//         assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
+//     });
+
+//     let opened_buffer = project
+//         .update(cx, |project, cx| {
+//             project.open_local_buffer("/dir/file1.rs", cx)
+//         })
+//         .await
+//         .unwrap();
+//     assert_eq!(opened_buffer, buffer);
+// }
 
 #[gpui::test(retries = 5)]
 async fn test_rescan_and_remote_updates(

crates/project2/Cargo.toml 🔗

@@ -16,6 +16,7 @@ test-support = [
     "settings2/test-support",
     "text/test-support",
     "prettier2/test-support",
+    "gpui2/test-support",
 ]
 
 [dependencies]

crates/project2/src/lsp_command.rs 🔗

@@ -32,7 +32,7 @@ pub fn lsp_formatting_options(tab_size: u32) -> lsp2::FormattingOptions {
     }
 }
 
-#[async_trait]
+#[async_trait(?Send)]
 pub(crate) trait LspCommand: 'static + Sized + Send {
     type Response: 'static + Default + Send;
     type LspRequest: 'static + Send + lsp2::request::Request;
@@ -148,7 +148,7 @@ impl From<lsp2::FormattingOptions> for FormattingOptions {
     }
 }
 
-#[async_trait]
+#[async_trait(?Send)]
 impl LspCommand for PrepareRename {
     type Response = Option<Range<Anchor>>;
     type LspRequest = lsp2::request::PrepareRenameRequest;
@@ -279,7 +279,7 @@ impl LspCommand for PrepareRename {
     }
 }
 
-#[async_trait]
+#[async_trait(?Send)]
 impl LspCommand for PerformRename {
     type Response = ProjectTransaction;
     type LspRequest = lsp2::request::Rename;
@@ -398,7 +398,7 @@ impl LspCommand for PerformRename {
     }
 }
 
-#[async_trait]
+#[async_trait(?Send)]
 impl LspCommand for GetDefinition {
     type Response = Vec<LocationLink>;
     type LspRequest = lsp2::request::GotoDefinition;
@@ -491,7 +491,7 @@ impl LspCommand for GetDefinition {
     }
 }
 
-#[async_trait]
+#[async_trait(?Send)]
 impl LspCommand for GetTypeDefinition {
     type Response = Vec<LocationLink>;
     type LspRequest = lsp2::request::GotoTypeDefinition;
@@ -783,7 +783,7 @@ fn location_links_to_proto(
         .collect()
 }
 
-#[async_trait]
+#[async_trait(?Send)]
 impl LspCommand for GetReferences {
     type Response = Vec<Location>;
     type LspRequest = lsp2::request::References;
@@ -945,7 +945,7 @@ impl LspCommand for GetReferences {
     }
 }
 
-#[async_trait]
+#[async_trait(?Send)]
 impl LspCommand for GetDocumentHighlights {
     type Response = Vec<DocumentHighlight>;
     type LspRequest = lsp2::request::DocumentHighlightRequest;
@@ -1096,7 +1096,7 @@ impl LspCommand for GetDocumentHighlights {
     }
 }
 
-#[async_trait]
+#[async_trait(?Send)]
 impl LspCommand for GetHover {
     type Response = Option<Hover>;
     type LspRequest = lsp2::request::HoverRequest;
@@ -1314,7 +1314,7 @@ impl LspCommand for GetHover {
     }
 }
 
-#[async_trait]
+#[async_trait(?Send)]
 impl LspCommand for GetCompletions {
     type Response = Vec<Completion>;
     type LspRequest = lsp2::request::Completion;
@@ -1545,7 +1545,7 @@ impl LspCommand for GetCompletions {
     }
 }
 
-#[async_trait]
+#[async_trait(?Send)]
 impl LspCommand for GetCodeActions {
     type Response = Vec<CodeAction>;
     type LspRequest = lsp2::request::CodeActionRequest;
@@ -1684,7 +1684,7 @@ impl LspCommand for GetCodeActions {
     }
 }
 
-#[async_trait]
+#[async_trait(?Send)]
 impl LspCommand for OnTypeFormatting {
     type Response = Option<Transaction>;
     type LspRequest = lsp2::request::OnTypeFormatting;
@@ -2192,7 +2192,7 @@ impl InlayHints {
     }
 }
 
-#[async_trait]
+#[async_trait(?Send)]
 impl LspCommand for InlayHints {
     type Response = Vec<InlayHint>;
     type LspRequest = lsp2::InlayHintRequest;

crates/project2/src/project2.rs 🔗

@@ -26,8 +26,8 @@ use futures::{
 };
 use globset::{Glob, GlobSet, GlobSetBuilder};
 use gpui2::{
-    AnyModel, AppContext, AsyncAppContext, Context, Entity, EventEmitter, Executor, Model,
-    ModelContext, Task, WeakModel,
+    AnyModel, AppContext, AsyncAppContext, BackgroundExecutor, Context, Entity, EventEmitter,
+    Model, ModelContext, Task, WeakModel,
 };
 use itertools::Itertools;
 use language2::{
@@ -207,7 +207,7 @@ impl DelayedDebounced {
 
         let previous_task = self.task.take();
         self.task = Some(cx.spawn(move |project, mut cx| async move {
-            let mut timer = cx.executor().timer(delay).fuse();
+            let mut timer = cx.background_executor().timer(delay).fuse();
             if let Some(previous_task) = previous_task {
                 previous_task.await;
             }
@@ -855,39 +855,39 @@ impl Project {
         }
     }
 
-    // #[cfg(any(test, feature = "test-support"))]
-    // pub async fn test(
-    //     fs: Arc<dyn Fs>,
-    //     root_paths: impl IntoIterator<Item = &Path>,
-    //     cx: &mut gpui::TestAppContext,
-    // ) -> Handle<Project> {
-    //     let mut languages = LanguageRegistry::test();
-    //     languages.set_executor(cx.background());
-    //     let http_client = util::http::FakeHttpClient::with_404_response();
-    //     let client = cx.update(|cx| client2::Client::new(http_client.clone(), cx));
-    //     let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
-    //     let project = cx.update(|cx| {
-    //         Project::local(
-    //             client,
-    //             node_runtime::FakeNodeRuntime::new(),
-    //             user_store,
-    //             Arc::new(languages),
-    //             fs,
-    //             cx,
-    //         )
-    //     });
-    //     for path in root_paths {
-    //         let (tree, _) = project
-    //             .update(cx, |project, cx| {
-    //                 project.find_or_create_local_worktree(path, true, cx)
-    //             })
-    //             .await
-    //             .unwrap();
-    //         tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
-    //             .await;
-    //     }
-    //     project
-    // }
+    #[cfg(any(test, feature = "test-support"))]
+    pub async fn test(
+        fs: Arc<dyn Fs>,
+        root_paths: impl IntoIterator<Item = &Path>,
+        cx: &mut gpui2::TestAppContext,
+    ) -> Model<Project> {
+        let mut languages = LanguageRegistry::test();
+        languages.set_executor(cx.executor().clone());
+        let http_client = util::http::FakeHttpClient::with_404_response();
+        let client = cx.update(|cx| client2::Client::new(http_client.clone(), cx));
+        let user_store = cx.build_model(|cx| UserStore::new(client.clone(), http_client, cx));
+        let project = cx.update(|cx| {
+            Project::local(
+                client,
+                node_runtime::FakeNodeRuntime::new(),
+                user_store,
+                Arc::new(languages),
+                fs,
+                cx,
+            )
+        });
+        for path in root_paths {
+            let (tree, _) = project
+                .update(cx, |project, cx| {
+                    project.find_or_create_local_worktree(path, true, cx)
+                })
+                .await
+                .unwrap();
+            tree.update(cx, |tree, _| tree.as_local().unwrap().scan_complete())
+                .await;
+        }
+        project
+    }
 
     fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
         let mut language_servers_to_start = Vec::new();
@@ -1453,7 +1453,7 @@ impl Project {
                             };
                             if client.send(initial_state).log_err().is_some() {
                                 let client = client.clone();
-                                cx.executor()
+                                cx.background_executor()
                                     .spawn(async move {
                                         let mut chunks = split_operations(operations).peekable();
                                         while let Some(chunk) = chunks.next() {
@@ -1758,7 +1758,7 @@ impl Project {
             }
         };
 
-        cx.executor().spawn(async move {
+        cx.background_executor().spawn(async move {
             wait_for_loading_buffer(loading_watch)
                 .await
                 .map_err(|error| anyhow!("{}", error))
@@ -2436,7 +2436,7 @@ impl Project {
                                 Duration::from_secs(1);
 
                             let task = cx.spawn(move |this, mut cx| async move {
-                                cx.executor().timer(DISK_BASED_DIAGNOSTICS_DEBOUNCE).await;
+                                cx.background_executor().timer(DISK_BASED_DIAGNOSTICS_DEBOUNCE).await;
                                 if let Some(this) = this.upgrade() {
                                     this.update(&mut cx, |this, cx| {
                                         this.disk_based_diagnostics_finished(
@@ -3477,7 +3477,7 @@ impl Project {
             });
 
             const PROCESS_TIMEOUT: Duration = Duration::from_secs(5);
-            let mut timeout = cx.executor().timer(PROCESS_TIMEOUT).fuse();
+            let mut timeout = cx.background_executor().timer(PROCESS_TIMEOUT).fuse();
 
             let mut errored = false;
             if let Some(mut process) = process {
@@ -5593,7 +5593,7 @@ impl Project {
             })
             .collect::<Vec<_>>();
 
-        let background = cx.executor().clone();
+        let background = cx.background_executor().clone();
         let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
         if path_count == 0 {
             let (_, rx) = smol::channel::bounded(1024);
@@ -5616,11 +5616,11 @@ impl Project {
                 }
             })
             .collect();
-        cx.executor()
+        cx.background_executor()
             .spawn(Self::background_search(
                 unnamed_files,
                 opened_buffers,
-                cx.executor().clone(),
+                cx.background_executor().clone(),
                 self.fs.clone(),
                 workers,
                 query.clone(),
@@ -5631,9 +5631,9 @@ impl Project {
             .detach();
 
         let (buffers, buffers_rx) = Self::sort_candidates_and_open_buffers(matching_paths_rx, cx);
-        let background = cx.executor().clone();
+        let background = cx.background_executor().clone();
         let (result_tx, result_rx) = smol::channel::bounded(1024);
-        cx.executor()
+        cx.background_executor()
             .spawn(async move {
                 let Ok(buffers) = buffers.await else {
                     return;
@@ -5741,7 +5741,7 @@ impl Project {
     async fn background_search(
         unnamed_buffers: Vec<Model<Buffer>>,
         opened_buffers: HashMap<Arc<Path>, (Model<Buffer>, BufferSnapshot)>,
-        executor: Executor,
+        executor: BackgroundExecutor,
         fs: Arc<dyn Fs>,
         workers: usize,
         query: SearchQuery,
@@ -5993,7 +5993,7 @@ impl Project {
             Task::ready(Ok((tree, relative_path)))
         } else {
             let worktree = self.create_local_worktree(abs_path, visible, cx);
-            cx.executor()
+            cx.background_executor()
                 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
         }
     }
@@ -6064,7 +6064,7 @@ impl Project {
                 .shared()
             })
             .clone();
-        cx.executor().spawn(async move {
+        cx.background_executor().spawn(async move {
             match task.await {
                 Ok(worktree) => Ok(worktree),
                 Err(err) => Err(anyhow!("{}", err)),
@@ -6376,7 +6376,7 @@ impl Project {
             let snapshot =
                 worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?;
             let diff_bases_by_buffer = cx
-                .executor()
+                .background_executor()
                 .spawn(async move {
                     future_buffers
                         .into_iter()
@@ -6519,7 +6519,7 @@ impl Project {
                 })
                 .collect::<Vec<_>>();
 
-            cx.executor()
+            cx.background_executor()
                 .spawn(async move {
                     for task_result in future::join_all(prettiers_to_reload.into_iter().map(|(worktree_id, prettier_path, prettier_task)| {
                         async move {
@@ -7358,7 +7358,7 @@ impl Project {
                         })
                         .log_err();
 
-                    cx.executor()
+                    cx.background_executor()
                         .spawn(
                             async move {
                                 let operations = operations.await;
@@ -7960,7 +7960,7 @@ impl Project {
                         if let Some(buffer) = this.buffer_for_id(buffer_id) {
                             let operations =
                                 buffer.read(cx).serialize_ops(Some(remote_version), cx);
-                            cx.executor().spawn(async move {
+                            cx.background_executor().spawn(async move {
                                 let operations = operations.await;
                                 for chunk in split_operations(operations) {
                                     client
@@ -7983,7 +7983,7 @@ impl Project {
             // Any incomplete buffers have open requests waiting. Request that the host sends
             // creates these buffers for us again to unblock any waiting futures.
             for id in incomplete_buffer_ids {
-                cx.executor()
+                cx.background_executor()
                     .spawn(client.request(proto::OpenBufferById { project_id, id }))
                     .detach();
             }
@@ -8198,7 +8198,7 @@ impl Project {
         cx: &mut ModelContext<Self>,
     ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
         let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx);
-        cx.executor().spawn(async move {
+        cx.background_executor().spawn(async move {
             let snapshot = snapshot?;
             let mut lsp_edits = lsp_edits
                 .into_iter()
@@ -8438,7 +8438,7 @@ impl Project {
             let fs = self.fs.clone();
             cx.spawn(move |this, mut cx| async move {
                 let prettier_dir = match cx
-                    .executor()
+                    .background_executor()
                     .spawn(Prettier::locate(
                         worktree_path.zip(buffer_path).map(
                             |(worktree_root_path, starting_path)| LocateStart {
@@ -8649,7 +8649,7 @@ impl Project {
             .get(&(worktree, default_prettier_dir.to_path_buf()))
             .cloned();
         let fs = Arc::clone(&self.fs);
-        cx.spawn_on_main(move |this, mut cx| async move {
+        cx.spawn(move |this, mut cx| async move {
             if let Some(previous_installation_process) = previous_installation_process {
                 previous_installation_process.await;
             }

crates/project2/src/project_tests.rs 🔗

@@ -1,4077 +1,4093 @@
-// use crate::{search::PathMatcher, worktree::WorktreeModelHandle, Event, *};
-// use fs::{FakeFs, RealFs};
-// use futures::{future, StreamExt};
-// use gpui::{executor::Deterministic, test::subscribe, AppContext};
-// use language2::{
-//     language_settings::{AllLanguageSettings, LanguageSettingsContent},
-//     tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
-//     LineEnding, OffsetRangeExt, Point, ToPoint,
-// };
-// use lsp2::Url;
-// use parking_lot::Mutex;
-// use pretty_assertions::assert_eq;
-// use serde_json::json;
-// use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
-// use unindent::Unindent as _;
-// use util::{assert_set_eq, test::temp_tree};
-
-// #[cfg(test)]
-// #[ctor::ctor]
-// fn init_logger() {
-//     if std::env::var("RUST_LOG").is_ok() {
-//         env_logger::init();
-//     }
-// }
-
-// #[gpui::test]
-// async fn test_symlinks(cx: &mut gpui::TestAppContext) {
-//     init_test(cx);
-//     cx.foreground().allow_parking();
-
-//     let dir = temp_tree(json!({
-//         "root": {
-//             "apple": "",
-//             "banana": {
-//                 "carrot": {
-//                     "date": "",
-//                     "endive": "",
-//                 }
-//             },
-//             "fennel": {
-//                 "grape": "",
-//             }
-//         }
-//     }));
-
-//     let root_link_path = dir.path().join("root_link");
-//     unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
-//     unix::fs::symlink(
-//         &dir.path().join("root/fennel"),
-//         &dir.path().join("root/finnochio"),
-//     )
-//     .unwrap();
-
-//     let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
-//     project.read_with(cx, |project, cx| {
-//         let tree = project.worktrees(cx).next().unwrap().read(cx);
-//         assert_eq!(tree.file_count(), 5);
-//         assert_eq!(
-//             tree.inode_for_path("fennel/grape"),
-//             tree.inode_for_path("finnochio/grape")
-//         );
-//     });
-// }
-
-// #[gpui::test]
-// async fn test_managing_project_specific_settings(
-//     deterministic: Arc<Deterministic>,
-//     cx: &mut gpui::TestAppContext,
-// ) {
-//     init_test(cx);
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/the-root",
-//         json!({
-//             ".zed": {
-//                 "settings.json": r#"{ "tab_size": 8 }"#
-//             },
-//             "a": {
-//                 "a.rs": "fn a() {\n    A\n}"
-//             },
-//             "b": {
-//                 ".zed": {
-//                     "settings.json": r#"{ "tab_size": 2 }"#
-//                 },
-//                 "b.rs": "fn b() {\n  B\n}"
-//             }
-//         }),
-//     )
-//     .await;
-
-//     let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
-//     let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
-
-//     deterministic.run_until_parked();
-//     cx.read(|cx| {
-//         let tree = worktree.read(cx);
-
-//         let settings_a = language_settings(
-//             None,
-//             Some(
-//                 &(File::for_entry(
-//                     tree.entry_for_path("a/a.rs").unwrap().clone(),
-//                     worktree.clone(),
-//                 ) as _),
-//             ),
-//             cx,
-//         );
-//         let settings_b = language_settings(
-//             None,
-//             Some(
-//                 &(File::for_entry(
-//                     tree.entry_for_path("b/b.rs").unwrap().clone(),
-//                     worktree.clone(),
-//                 ) as _),
-//             ),
-//             cx,
-//         );
-
-//         assert_eq!(settings_a.tab_size.get(), 8);
-//         assert_eq!(settings_b.tab_size.get(), 2);
-//     });
-// }
-
-// #[gpui::test]
-// async fn test_managing_language_servers(
-//     deterministic: Arc<Deterministic>,
-//     cx: &mut gpui::TestAppContext,
-// ) {
-//     init_test(cx);
-
-//     let mut rust_language = Language::new(
-//         LanguageConfig {
-//             name: "Rust".into(),
-//             path_suffixes: vec!["rs".to_string()],
-//             ..Default::default()
-//         },
-//         Some(tree_sitter_rust::language()),
-//     );
-//     let mut json_language = Language::new(
-//         LanguageConfig {
-//             name: "JSON".into(),
-//             path_suffixes: vec!["json".to_string()],
-//             ..Default::default()
-//         },
-//         None,
-//     );
-//     let mut fake_rust_servers = rust_language
-//         .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
-//             name: "the-rust-language-server",
-//             capabilities: lsp::ServerCapabilities {
-//                 completion_provider: Some(lsp::CompletionOptions {
-//                     trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
-//                     ..Default::default()
-//                 }),
-//                 ..Default::default()
-//             },
-//             ..Default::default()
-//         }))
-//         .await;
-//     let mut fake_json_servers = json_language
-//         .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
-//             name: "the-json-language-server",
-//             capabilities: lsp::ServerCapabilities {
-//                 completion_provider: Some(lsp::CompletionOptions {
-//                     trigger_characters: Some(vec![":".to_string()]),
-//                     ..Default::default()
-//                 }),
-//                 ..Default::default()
-//             },
-//             ..Default::default()
-//         }))
-//         .await;
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/the-root",
-//         json!({
-//             "test.rs": "const A: i32 = 1;",
-//             "test2.rs": "",
-//             "Cargo.toml": "a = 1",
-//             "package.json": "{\"a\": 1}",
-//         }),
-//     )
-//     .await;
-
-//     let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
-
-//     // Open a buffer without an associated language server.
-//     let toml_buffer = project
-//         .update(cx, |project, cx| {
-//             project.open_local_buffer("/the-root/Cargo.toml", cx)
-//         })
-//         .await
-//         .unwrap();
-
-//     // Open a buffer with an associated language server before the language for it has been loaded.
-//     let rust_buffer = project
-//         .update(cx, |project, cx| {
-//             project.open_local_buffer("/the-root/test.rs", cx)
-//         })
-//         .await
-//         .unwrap();
-//     rust_buffer.read_with(cx, |buffer, _| {
-//         assert_eq!(buffer.language().map(|l| l.name()), None);
-//     });
-
-//     // Now we add the languages to the project, and ensure they get assigned to all
-//     // the relevant open buffers.
-//     project.update(cx, |project, _| {
-//         project.languages.add(Arc::new(json_language));
-//         project.languages.add(Arc::new(rust_language));
-//     });
-//     deterministic.run_until_parked();
-//     rust_buffer.read_with(cx, |buffer, _| {
-//         assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
-//     });
-
-//     // A server is started up, and it is notified about Rust files.
-//     let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
-//     assert_eq!(
-//         fake_rust_server
-//             .receive_notification::<lsp2::notification::DidOpenTextDocument>()
-//             .await
-//             .text_document,
-//         lsp2::TextDocumentItem {
-//             uri: lsp2::Url::from_file_path("/the-root/test.rs").unwrap(),
-//             version: 0,
-//             text: "const A: i32 = 1;".to_string(),
-//             language_id: Default::default()
-//         }
-//     );
-
-//     // The buffer is configured based on the language server's capabilities.
-//     rust_buffer.read_with(cx, |buffer, _| {
-//         assert_eq!(
-//             buffer.completion_triggers(),
-//             &[".".to_string(), "::".to_string()]
-//         );
-//     });
-//     toml_buffer.read_with(cx, |buffer, _| {
-//         assert!(buffer.completion_triggers().is_empty());
-//     });
-
-//     // Edit a buffer. The changes are reported to the language server.
-//     rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
-//     assert_eq!(
-//         fake_rust_server
-//             .receive_notification::<lsp2::notification::DidChangeTextDocument>()
-//             .await
-//             .text_document,
-//         lsp2::VersionedTextDocumentIdentifier::new(
-//             lsp2::Url::from_file_path("/the-root/test.rs").unwrap(),
-//             1
-//         )
-//     );
-
-//     // Open a third buffer with a different associated language server.
-//     let json_buffer = project
-//         .update(cx, |project, cx| {
-//             project.open_local_buffer("/the-root/package.json", cx)
-//         })
-//         .await
-//         .unwrap();
-
-//     // A json language server is started up and is only notified about the json buffer.
-//     let mut fake_json_server = fake_json_servers.next().await.unwrap();
-//     assert_eq!(
-//         fake_json_server
-//             .receive_notification::<lsp2::notification::DidOpenTextDocument>()
-//             .await
-//             .text_document,
-//         lsp2::TextDocumentItem {
-//             uri: lsp2::Url::from_file_path("/the-root/package.json").unwrap(),
-//             version: 0,
-//             text: "{\"a\": 1}".to_string(),
-//             language_id: Default::default()
-//         }
-//     );
-
-//     // This buffer is configured based on the second language server's
-//     // capabilities.
-//     json_buffer.read_with(cx, |buffer, _| {
-//         assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
-//     });
-
-//     // When opening another buffer whose language server is already running,
-//     // it is also configured based on the existing language server's capabilities.
-//     let rust_buffer2 = project
-//         .update(cx, |project, cx| {
-//             project.open_local_buffer("/the-root/test2.rs", cx)
-//         })
-//         .await
-//         .unwrap();
-//     rust_buffer2.read_with(cx, |buffer, _| {
-//         assert_eq!(
-//             buffer.completion_triggers(),
-//             &[".".to_string(), "::".to_string()]
-//         );
-//     });
-
-//     // Changes are reported only to servers matching the buffer's language.
-//     toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
-//     rust_buffer2.update(cx, |buffer, cx| {
-//         buffer.edit([(0..0, "let x = 1;")], None, cx)
-//     });
-//     assert_eq!(
-//         fake_rust_server
-//             .receive_notification::<lsp2::notification::DidChangeTextDocument>()
-//             .await
-//             .text_document,
-//         lsp2::VersionedTextDocumentIdentifier::new(
-//             lsp2::Url::from_file_path("/the-root/test2.rs").unwrap(),
-//             1
-//         )
-//     );
-
-//     // Save notifications are reported to all servers.
-//     project
-//         .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
-//         .await
-//         .unwrap();
-//     assert_eq!(
-//         fake_rust_server
-//             .receive_notification::<lsp2::notification::DidSaveTextDocument>()
-//             .await
-//             .text_document,
-//         lsp2::TextDocumentIdentifier::new(
-//             lsp2::Url::from_file_path("/the-root/Cargo.toml").unwrap()
-//         )
-//     );
-//     assert_eq!(
-//         fake_json_server
-//             .receive_notification::<lsp2::notification::DidSaveTextDocument>()
-//             .await
-//             .text_document,
-//         lsp2::TextDocumentIdentifier::new(
-//             lsp2::Url::from_file_path("/the-root/Cargo.toml").unwrap()
-//         )
-//     );
-
-//     // Renames are reported only to servers matching the buffer's language.
-//     fs.rename(
-//         Path::new("/the-root/test2.rs"),
-//         Path::new("/the-root/test3.rs"),
-//         Default::default(),
-//     )
-//     .await
-//     .unwrap();
-//     assert_eq!(
-//         fake_rust_server
-//             .receive_notification::<lsp2::notification::DidCloseTextDocument>()
-//             .await
-//             .text_document,
-//         lsp2::TextDocumentIdentifier::new(lsp2::Url::from_file_path("/the-root/test2.rs").unwrap()),
-//     );
-//     assert_eq!(
-//         fake_rust_server
-//             .receive_notification::<lsp2::notification::DidOpenTextDocument>()
-//             .await
-//             .text_document,
-//         lsp2::TextDocumentItem {
-//             uri: lsp2::Url::from_file_path("/the-root/test3.rs").unwrap(),
-//             version: 0,
-//             text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
-//             language_id: Default::default()
-//         },
-//     );
-
-//     rust_buffer2.update(cx, |buffer, cx| {
-//         buffer.update_diagnostics(
-//             LanguageServerId(0),
-//             DiagnosticSet::from_sorted_entries(
-//                 vec![DiagnosticEntry {
-//                     diagnostic: Default::default(),
-//                     range: Anchor::MIN..Anchor::MAX,
-//                 }],
-//                 &buffer.snapshot(),
-//             ),
-//             cx,
-//         );
-//         assert_eq!(
-//             buffer
-//                 .snapshot()
-//                 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
-//                 .count(),
-//             1
-//         );
-//     });
-
-//     // When the rename changes the extension of the file, the buffer gets closed on the old
-//     // language server and gets opened on the new one.
-//     fs.rename(
-//         Path::new("/the-root/test3.rs"),
-//         Path::new("/the-root/test3.json"),
-//         Default::default(),
-//     )
-//     .await
-//     .unwrap();
-//     assert_eq!(
-//         fake_rust_server
-//             .receive_notification::<lsp2::notification::DidCloseTextDocument>()
-//             .await
-//             .text_document,
-//         lsp2::TextDocumentIdentifier::new(lsp2::Url::from_file_path("/the-root/test3.rs").unwrap(),),
-//     );
-//     assert_eq!(
-//         fake_json_server
-//             .receive_notification::<lsp2::notification::DidOpenTextDocument>()
-//             .await
-//             .text_document,
-//         lsp2::TextDocumentItem {
-//             uri: lsp2::Url::from_file_path("/the-root/test3.json").unwrap(),
-//             version: 0,
-//             text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
-//             language_id: Default::default()
-//         },
-//     );
-
-//     // We clear the diagnostics, since the language has changed.
-//     rust_buffer2.read_with(cx, |buffer, _| {
-//         assert_eq!(
-//             buffer
-//                 .snapshot()
-//                 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
-//                 .count(),
-//             0
-//         );
-//     });
-
-//     // The renamed file's version resets after changing language server.
-//     rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
-//     assert_eq!(
-//         fake_json_server
-//             .receive_notification::<lsp2::notification::DidChangeTextDocument>()
-//             .await
-//             .text_document,
-//         lsp2::VersionedTextDocumentIdentifier::new(
-//             lsp2::Url::from_file_path("/the-root/test3.json").unwrap(),
-//             1
-//         )
-//     );
-
-//     // Restart language servers
-//     project.update(cx, |project, cx| {
-//         project.restart_language_servers_for_buffers(
-//             vec![rust_buffer.clone(), json_buffer.clone()],
-//             cx,
-//         );
-//     });
-
-//     let mut rust_shutdown_requests = fake_rust_server
-//         .handle_request::<lsp2::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
-//     let mut json_shutdown_requests = fake_json_server
-//         .handle_request::<lsp2::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
-//     futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
-
-//     let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
-//     let mut fake_json_server = fake_json_servers.next().await.unwrap();
-
-//     // Ensure rust document is reopened in new rust language server
-//     assert_eq!(
-//         fake_rust_server
-//             .receive_notification::<lsp2::notification::DidOpenTextDocument>()
-//             .await
-//             .text_document,
-//         lsp2::TextDocumentItem {
-//             uri: lsp2::Url::from_file_path("/the-root/test.rs").unwrap(),
-//             version: 0,
-//             text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
-//             language_id: Default::default()
-//         }
-//     );
-
-//     // Ensure json documents are reopened in new json language server
-//     assert_set_eq!(
-//         [
-//             fake_json_server
-//                 .receive_notification::<lsp2::notification::DidOpenTextDocument>()
-//                 .await
-//                 .text_document,
-//             fake_json_server
-//                 .receive_notification::<lsp2::notification::DidOpenTextDocument>()
-//                 .await
-//                 .text_document,
-//         ],
-//         [
-//             lsp2::TextDocumentItem {
-//                 uri: lsp2::Url::from_file_path("/the-root/package.json").unwrap(),
-//                 version: 0,
-//                 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
-//                 language_id: Default::default()
-//             },
-//             lsp2::TextDocumentItem {
-//                 uri: lsp2::Url::from_file_path("/the-root/test3.json").unwrap(),
-//                 version: 0,
-//                 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
-//                 language_id: Default::default()
-//             }
-//         ]
-//     );
-
-//     // Close notifications are reported only to servers matching the buffer's language.
-//     cx.update(|_| drop(json_buffer));
-//     let close_message = lsp2::DidCloseTextDocumentParams {
-//         text_document: lsp2::TextDocumentIdentifier::new(
-//             lsp2::Url::from_file_path("/the-root/package.json").unwrap(),
-//         ),
-//     };
-//     assert_eq!(
-//         fake_json_server
-//             .receive_notification::<lsp2::notification::DidCloseTextDocument>()
-//             .await,
-//         close_message,
-//     );
-// }
-
-// #[gpui::test]
-// async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
-//     init_test(cx);
-
-//     let mut language = Language::new(
-//         LanguageConfig {
-//             name: "Rust".into(),
-//             path_suffixes: vec!["rs".to_string()],
-//             ..Default::default()
-//         },
-//         Some(tree_sitter_rust::language()),
-//     );
-//     let mut fake_servers = language
-//         .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
-//             name: "the-language-server",
-//             ..Default::default()
-//         }))
-//         .await;
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/the-root",
-//         json!({
-//             ".gitignore": "target\n",
-//             "src": {
-//                 "a.rs": "",
-//                 "b.rs": "",
-//             },
-//             "target": {
-//                 "x": {
-//                     "out": {
-//                         "x.rs": ""
-//                     }
-//                 },
-//                 "y": {
-//                     "out": {
-//                         "y.rs": "",
-//                     }
-//                 },
-//                 "z": {
-//                     "out": {
-//                         "z.rs": ""
-//                     }
-//                 }
-//             }
-//         }),
-//     )
-//     .await;
-
-//     let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
-//     project.update(cx, |project, _| {
-//         project.languages.add(Arc::new(language));
-//     });
-//     cx.foreground().run_until_parked();
-
-//     // Start the language server by opening a buffer with a compatible file extension.
-//     let _buffer = project
-//         .update(cx, |project, cx| {
-//             project.open_local_buffer("/the-root/src/a.rs", cx)
-//         })
-//         .await
-//         .unwrap();
-
-//     // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
-//     project.read_with(cx, |project, cx| {
-//         let worktree = project.worktrees(cx).next().unwrap();
-//         assert_eq!(
-//             worktree
-//                 .read(cx)
-//                 .snapshot()
-//                 .entries(true)
-//                 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
-//                 .collect::<Vec<_>>(),
-//             &[
-//                 (Path::new(""), false),
-//                 (Path::new(".gitignore"), false),
-//                 (Path::new("src"), false),
-//                 (Path::new("src/a.rs"), false),
-//                 (Path::new("src/b.rs"), false),
-//                 (Path::new("target"), true),
-//             ]
-//         );
-//     });
-
-//     let prev_read_dir_count = fs.read_dir_call_count();
-
-//     // Keep track of the FS events reported to the language server.
-//     let fake_server = fake_servers.next().await.unwrap();
-//     let file_changes = Arc::new(Mutex::new(Vec::new()));
-//     fake_server
-//         .request::<lsp2::request::RegisterCapability>(lsp2::RegistrationParams {
-//             registrations: vec![lsp2::Registration {
-//                 id: Default::default(),
-//                 method: "workspace/didChangeWatchedFiles".to_string(),
-//                 register_options: serde_json::to_value(
-//                     lsp::DidChangeWatchedFilesRegistrationOptions {
-//                         watchers: vec![
-//                             lsp2::FileSystemWatcher {
-//                                 glob_pattern: lsp2::GlobPattern::String(
-//                                     "/the-root/Cargo.toml".to_string(),
-//                                 ),
-//                                 kind: None,
-//                             },
-//                             lsp2::FileSystemWatcher {
-//                                 glob_pattern: lsp2::GlobPattern::String(
-//                                     "/the-root/src/*.{rs,c}".to_string(),
-//                                 ),
-//                                 kind: None,
-//                             },
-//                             lsp2::FileSystemWatcher {
-//                                 glob_pattern: lsp2::GlobPattern::String(
-//                                     "/the-root/target/y/**/*.rs".to_string(),
-//                                 ),
-//                                 kind: None,
-//                             },
-//                         ],
-//                     },
-//                 )
-//                 .ok(),
-//             }],
-//         })
-//         .await
-//         .unwrap();
-//     fake_server.handle_notification::<lsp2::notification::DidChangeWatchedFiles, _>({
-//         let file_changes = file_changes.clone();
-//         move |params, _| {
-//             let mut file_changes = file_changes.lock();
-//             file_changes.extend(params.changes);
-//             file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
-//         }
-//     });
-
-//     cx.foreground().run_until_parked();
-//     assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
-//     assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
-
-//     // Now the language server has asked us to watch an ignored directory path,
-//     // so we recursively load it.
-//     project.read_with(cx, |project, cx| {
-//         let worktree = project.worktrees(cx).next().unwrap();
-//         assert_eq!(
-//             worktree
-//                 .read(cx)
-//                 .snapshot()
-//                 .entries(true)
-//                 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
-//                 .collect::<Vec<_>>(),
-//             &[
-//                 (Path::new(""), false),
-//                 (Path::new(".gitignore"), false),
-//                 (Path::new("src"), false),
-//                 (Path::new("src/a.rs"), false),
-//                 (Path::new("src/b.rs"), false),
-//                 (Path::new("target"), true),
-//                 (Path::new("target/x"), true),
-//                 (Path::new("target/y"), true),
-//                 (Path::new("target/y/out"), true),
-//                 (Path::new("target/y/out/y.rs"), true),
-//                 (Path::new("target/z"), true),
-//             ]
-//         );
-//     });
-
-//     // Perform some file system mutations, two of which match the watched patterns,
-//     // and one of which does not.
-//     fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
-//         .await
-//         .unwrap();
-//     fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
-//         .await
-//         .unwrap();
-//     fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
-//         .await
-//         .unwrap();
-//     fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
-//         .await
-//         .unwrap();
-//     fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
-//         .await
-//         .unwrap();
-
-//     // The language server receives events for the FS mutations that match its watch patterns.
-//     cx.foreground().run_until_parked();
-//     assert_eq!(
-//         &*file_changes.lock(),
-//         &[
-//             lsp2::FileEvent {
-//                 uri: lsp2::Url::from_file_path("/the-root/src/b.rs").unwrap(),
-//                 typ: lsp2::FileChangeType::DELETED,
-//             },
-//             lsp2::FileEvent {
-//                 uri: lsp2::Url::from_file_path("/the-root/src/c.rs").unwrap(),
-//                 typ: lsp2::FileChangeType::CREATED,
-//             },
-//             lsp2::FileEvent {
-//                 uri: lsp2::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
-//                 typ: lsp2::FileChangeType::CREATED,
-//             },
-//         ]
-//     );
-// }
-
-// #[gpui::test]
-// async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
-//     init_test(cx);
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/dir",
-//         json!({
-//             "a.rs": "let a = 1;",
-//             "b.rs": "let b = 2;"
-//         }),
-//     )
-//     .await;
-
-//     let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
-
-//     let buffer_a = project
-//         .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
-//         .await
-//         .unwrap();
-//     let buffer_b = project
-//         .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
-//         .await
-//         .unwrap();
-
-//     project.update(cx, |project, cx| {
-//         project
-//             .update_diagnostics(
-//                 LanguageServerId(0),
-//                 lsp::PublishDiagnosticsParams {
-//                     uri: Url::from_file_path("/dir/a.rs").unwrap(),
-//                     version: None,
-//                     diagnostics: vec![lsp2::Diagnostic {
-//                         range: lsp2::Range::new(
-//                             lsp2::Position::new(0, 4),
-//                             lsp2::Position::new(0, 5),
-//                         ),
-//                         severity: Some(lsp2::DiagnosticSeverity::ERROR),
-//                         message: "error 1".to_string(),
-//                         ..Default::default()
-//                     }],
-//                 },
-//                 &[],
-//                 cx,
-//             )
-//             .unwrap();
-//         project
-//             .update_diagnostics(
-//                 LanguageServerId(0),
-//                 lsp::PublishDiagnosticsParams {
-//                     uri: Url::from_file_path("/dir/b.rs").unwrap(),
-//                     version: None,
-//                     diagnostics: vec![lsp2::Diagnostic {
-//                         range: lsp2::Range::new(
-//                             lsp2::Position::new(0, 4),
-//                             lsp2::Position::new(0, 5),
-//                         ),
-//                         severity: Some(lsp2::DiagnosticSeverity::WARNING),
-//                         message: "error 2".to_string(),
-//                         ..Default::default()
-//                     }],
-//                 },
-//                 &[],
-//                 cx,
-//             )
-//             .unwrap();
-//     });
-
-//     buffer_a.read_with(cx, |buffer, _| {
-//         let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
-//         assert_eq!(
-//             chunks
-//                 .iter()
-//                 .map(|(s, d)| (s.as_str(), *d))
-//                 .collect::<Vec<_>>(),
-//             &[
-//                 ("let ", None),
-//                 ("a", Some(DiagnosticSeverity::ERROR)),
-//                 (" = 1;", None),
-//             ]
-//         );
-//     });
-//     buffer_b.read_with(cx, |buffer, _| {
-//         let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
-//         assert_eq!(
-//             chunks
-//                 .iter()
-//                 .map(|(s, d)| (s.as_str(), *d))
-//                 .collect::<Vec<_>>(),
-//             &[
-//                 ("let ", None),
-//                 ("b", Some(DiagnosticSeverity::WARNING)),
-//                 (" = 2;", None),
-//             ]
-//         );
-//     });
-// }
-
-// #[gpui::test]
-// async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
-//     init_test(cx);
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/root",
-//         json!({
-//             "dir": {
-//                 "a.rs": "let a = 1;",
-//             },
-//             "other.rs": "let b = c;"
-//         }),
-//     )
-//     .await;
-
-//     let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
-
-//     let (worktree, _) = project
-//         .update(cx, |project, cx| {
-//             project.find_or_create_local_worktree("/root/other.rs", false, cx)
-//         })
-//         .await
-//         .unwrap();
-//     let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
-
-//     project.update(cx, |project, cx| {
-//         project
-//             .update_diagnostics(
-//                 LanguageServerId(0),
-//                 lsp::PublishDiagnosticsParams {
-//                     uri: Url::from_file_path("/root/other.rs").unwrap(),
-//                     version: None,
-//                     diagnostics: vec![lsp2::Diagnostic {
-//                         range: lsp2::Range::new(
-//                             lsp2::Position::new(0, 8),
-//                             lsp2::Position::new(0, 9),
-//                         ),
-//                         severity: Some(lsp2::DiagnosticSeverity::ERROR),
-//                         message: "unknown variable 'c'".to_string(),
-//                         ..Default::default()
-//                     }],
-//                 },
-//                 &[],
-//                 cx,
-//             )
-//             .unwrap();
-//     });
-
-//     let buffer = project
-//         .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
-//         .await
-//         .unwrap();
-//     buffer.read_with(cx, |buffer, _| {
-//         let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
-//         assert_eq!(
-//             chunks
-//                 .iter()
-//                 .map(|(s, d)| (s.as_str(), *d))
-//                 .collect::<Vec<_>>(),
-//             &[
-//                 ("let b = ", None),
-//                 ("c", Some(DiagnosticSeverity::ERROR)),
-//                 (";", None),
-//             ]
-//         );
-//     });
-
-//     project.read_with(cx, |project, cx| {
-//         assert_eq!(project.diagnostic_summaries(cx).next(), None);
-//         assert_eq!(project.diagnostic_summary(cx).error_count, 0);
-//     });
-// }
-
-// #[gpui::test]
-// async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
-//     init_test(cx);
-
-//     let progress_token = "the-progress-token";
-//     let mut language = Language::new(
-//         LanguageConfig {
-//             name: "Rust".into(),
-//             path_suffixes: vec!["rs".to_string()],
-//             ..Default::default()
-//         },
-//         Some(tree_sitter_rust::language()),
-//     );
-//     let mut fake_servers = language
-//         .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
-//             disk_based_diagnostics_progress_token: Some(progress_token.into()),
-//             disk_based_diagnostics_sources: vec!["disk".into()],
-//             ..Default::default()
-//         }))
-//         .await;
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/dir",
-//         json!({
-//             "a.rs": "fn a() { A }",
-//             "b.rs": "const y: i32 = 1",
-//         }),
-//     )
-//     .await;
-
-//     let project = Project::test(fs, ["/dir".as_ref()], cx).await;
-//     project.update(cx, |project, _| project.languages.add(Arc::new(language)));
-//     let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
-
-//     // Cause worktree to start the fake language server
-//     let _buffer = project
-//         .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
-//         .await
-//         .unwrap();
-
-//     let mut events = subscribe(&project, cx);
-
-//     let fake_server = fake_servers.next().await.unwrap();
-//     assert_eq!(
-//         events.next().await.unwrap(),
-//         Event::LanguageServerAdded(LanguageServerId(0)),
-//     );
-
-//     fake_server
-//         .start_progress(format!("{}/0", progress_token))
-//         .await;
-//     assert_eq!(
-//         events.next().await.unwrap(),
-//         Event::DiskBasedDiagnosticsStarted {
-//             language_server_id: LanguageServerId(0),
-//         }
-//     );
-
-//     fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
-//         uri: Url::from_file_path("/dir/a.rs").unwrap(),
-//         version: None,
-//         diagnostics: vec![lsp2::Diagnostic {
-//             range: lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 10)),
-//             severity: Some(lsp2::DiagnosticSeverity::ERROR),
-//             message: "undefined variable 'A'".to_string(),
-//             ..Default::default()
-//         }],
-//     });
-//     assert_eq!(
-//         events.next().await.unwrap(),
-//         Event::DiagnosticsUpdated {
-//             language_server_id: LanguageServerId(0),
-//             path: (worktree_id, Path::new("a.rs")).into()
-//         }
-//     );
-
-//     fake_server.end_progress(format!("{}/0", progress_token));
-//     assert_eq!(
-//         events.next().await.unwrap(),
-//         Event::DiskBasedDiagnosticsFinished {
-//             language_server_id: LanguageServerId(0)
-//         }
-//     );
-
-//     let buffer = project
-//         .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
-//         .await
-//         .unwrap();
-
-//     buffer.read_with(cx, |buffer, _| {
-//         let snapshot = buffer.snapshot();
-//         let diagnostics = snapshot
-//             .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
-//             .collect::<Vec<_>>();
-//         assert_eq!(
-//             diagnostics,
-//             &[DiagnosticEntry {
-//                 range: Point::new(0, 9)..Point::new(0, 10),
-//                 diagnostic: Diagnostic {
-//                     severity: lsp2::DiagnosticSeverity::ERROR,
-//                     message: "undefined variable 'A'".to_string(),
-//                     group_id: 0,
-//                     is_primary: true,
-//                     ..Default::default()
-//                 }
-//             }]
-//         )
-//     });
-
-//     // Ensure publishing empty diagnostics twice only results in one update event.
-//     fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
-//         uri: Url::from_file_path("/dir/a.rs").unwrap(),
-//         version: None,
-//         diagnostics: Default::default(),
-//     });
-//     assert_eq!(
-//         events.next().await.unwrap(),
-//         Event::DiagnosticsUpdated {
-//             language_server_id: LanguageServerId(0),
-//             path: (worktree_id, Path::new("a.rs")).into()
-//         }
-//     );
-
-//     fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
-//         uri: Url::from_file_path("/dir/a.rs").unwrap(),
-//         version: None,
-//         diagnostics: Default::default(),
-//     });
-//     cx.foreground().run_until_parked();
-//     assert_eq!(futures::poll!(events.next()), Poll::Pending);
-// }
-
-// #[gpui::test]
-// async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
-//     init_test(cx);
-
-//     let progress_token = "the-progress-token";
-//     let mut language = Language::new(
-//         LanguageConfig {
-//             path_suffixes: vec!["rs".to_string()],
-//             ..Default::default()
-//         },
-//         None,
-//     );
-//     let mut fake_servers = language
-//         .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
-//             disk_based_diagnostics_sources: vec!["disk".into()],
-//             disk_based_diagnostics_progress_token: Some(progress_token.into()),
-//             ..Default::default()
-//         }))
-//         .await;
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
-
-//     let project = Project::test(fs, ["/dir".as_ref()], cx).await;
-//     project.update(cx, |project, _| project.languages.add(Arc::new(language)));
-
-//     let buffer = project
-//         .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
-//         .await
-//         .unwrap();
-
-//     // Simulate diagnostics starting to update.
-//     let fake_server = fake_servers.next().await.unwrap();
-//     fake_server.start_progress(progress_token).await;
-
-//     // Restart the server before the diagnostics finish updating.
-//     project.update(cx, |project, cx| {
-//         project.restart_language_servers_for_buffers([buffer], cx);
-//     });
-//     let mut events = subscribe(&project, cx);
-
-//     // Simulate the newly started server sending more diagnostics.
-//     let fake_server = fake_servers.next().await.unwrap();
-//     assert_eq!(
-//         events.next().await.unwrap(),
-//         Event::LanguageServerAdded(LanguageServerId(1))
-//     );
-//     fake_server.start_progress(progress_token).await;
-//     assert_eq!(
-//         events.next().await.unwrap(),
-//         Event::DiskBasedDiagnosticsStarted {
-//             language_server_id: LanguageServerId(1)
-//         }
-//     );
-//     project.read_with(cx, |project, _| {
-//         assert_eq!(
-//             project
-//                 .language_servers_running_disk_based_diagnostics()
-//                 .collect::<Vec<_>>(),
-//             [LanguageServerId(1)]
-//         );
-//     });
-
-//     // All diagnostics are considered done, despite the old server's diagnostic
-//     // task never completing.
-//     fake_server.end_progress(progress_token);
-//     assert_eq!(
-//         events.next().await.unwrap(),
-//         Event::DiskBasedDiagnosticsFinished {
-//             language_server_id: LanguageServerId(1)
-//         }
-//     );
-//     project.read_with(cx, |project, _| {
-//         assert_eq!(
-//             project
-//                 .language_servers_running_disk_based_diagnostics()
-//                 .collect::<Vec<_>>(),
-//             [LanguageServerId(0); 0]
-//         );
-//     });
-// }
-
-// #[gpui::test]
-// async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
-//     init_test(cx);
-
-//     let mut language = Language::new(
-//         LanguageConfig {
-//             path_suffixes: vec!["rs".to_string()],
-//             ..Default::default()
-//         },
-//         None,
-//     );
-//     let mut fake_servers = language
-//         .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
-//             ..Default::default()
-//         }))
-//         .await;
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
-
-//     let project = Project::test(fs, ["/dir".as_ref()], cx).await;
-//     project.update(cx, |project, _| project.languages.add(Arc::new(language)));
-
-//     let buffer = project
-//         .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
-//         .await
-//         .unwrap();
-
-//     // Publish diagnostics
-//     let fake_server = fake_servers.next().await.unwrap();
-//     fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
-//         uri: Url::from_file_path("/dir/a.rs").unwrap(),
-//         version: None,
-//         diagnostics: vec![lsp2::Diagnostic {
-//             range: lsp2::Range::new(lsp2::Position::new(0, 0), lsp2::Position::new(0, 0)),
-//             severity: Some(lsp2::DiagnosticSeverity::ERROR),
-//             message: "the message".to_string(),
-//             ..Default::default()
-//         }],
-//     });
-
-//     cx.foreground().run_until_parked();
-//     buffer.read_with(cx, |buffer, _| {
-//         assert_eq!(
-//             buffer
-//                 .snapshot()
-//                 .diagnostics_in_range::<_, usize>(0..1, false)
-//                 .map(|entry| entry.diagnostic.message.clone())
-//                 .collect::<Vec<_>>(),
-//             ["the message".to_string()]
-//         );
-//     });
-//     project.read_with(cx, |project, cx| {
-//         assert_eq!(
-//             project.diagnostic_summary(cx),
-//             DiagnosticSummary {
-//                 error_count: 1,
-//                 warning_count: 0,
-//             }
-//         );
-//     });
-
-//     project.update(cx, |project, cx| {
-//         project.restart_language_servers_for_buffers([buffer.clone()], cx);
-//     });
-
-//     // The diagnostics are cleared.
-//     cx.foreground().run_until_parked();
-//     buffer.read_with(cx, |buffer, _| {
-//         assert_eq!(
-//             buffer
-//                 .snapshot()
-//                 .diagnostics_in_range::<_, usize>(0..1, false)
-//                 .map(|entry| entry.diagnostic.message.clone())
-//                 .collect::<Vec<_>>(),
-//             Vec::<String>::new(),
-//         );
-//     });
-//     project.read_with(cx, |project, cx| {
-//         assert_eq!(
-//             project.diagnostic_summary(cx),
-//             DiagnosticSummary {
-//                 error_count: 0,
-//                 warning_count: 0,
-//             }
-//         );
-//     });
-// }
-
-// #[gpui::test]
-// async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
-//     init_test(cx);
-
-//     let mut language = Language::new(
-//         LanguageConfig {
-//             path_suffixes: vec!["rs".to_string()],
-//             ..Default::default()
-//         },
-//         None,
-//     );
-//     let mut fake_servers = language
-//         .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
-//             name: "the-lsp",
-//             ..Default::default()
-//         }))
-//         .await;
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
-
-//     let project = Project::test(fs, ["/dir".as_ref()], cx).await;
-//     project.update(cx, |project, _| project.languages.add(Arc::new(language)));
-
-//     let buffer = project
-//         .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
-//         .await
-//         .unwrap();
-
-//     // Before restarting the server, report diagnostics with an unknown buffer version.
-//     let fake_server = fake_servers.next().await.unwrap();
-//     fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
-//         uri: lsp2::Url::from_file_path("/dir/a.rs").unwrap(),
-//         version: Some(10000),
-//         diagnostics: Vec::new(),
-//     });
-//     cx.foreground().run_until_parked();
-
-//     project.update(cx, |project, cx| {
-//         project.restart_language_servers_for_buffers([buffer.clone()], cx);
-//     });
-//     let mut fake_server = fake_servers.next().await.unwrap();
-//     let notification = fake_server
-//         .receive_notification::<lsp2::notification::DidOpenTextDocument>()
-//         .await
-//         .text_document;
-//     assert_eq!(notification.version, 0);
-// }
-
-// #[gpui::test]
-// async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
-//     init_test(cx);
-
-//     let mut rust = Language::new(
-//         LanguageConfig {
-//             name: Arc::from("Rust"),
-//             path_suffixes: vec!["rs".to_string()],
-//             ..Default::default()
-//         },
-//         None,
-//     );
-//     let mut fake_rust_servers = rust
-//         .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
-//             name: "rust-lsp",
-//             ..Default::default()
-//         }))
-//         .await;
-//     let mut js = Language::new(
-//         LanguageConfig {
-//             name: Arc::from("JavaScript"),
-//             path_suffixes: vec!["js".to_string()],
-//             ..Default::default()
-//         },
-//         None,
-//     );
-//     let mut fake_js_servers = js
-//         .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
-//             name: "js-lsp",
-//             ..Default::default()
-//         }))
-//         .await;
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
-//         .await;
-
-//     let project = Project::test(fs, ["/dir".as_ref()], cx).await;
-//     project.update(cx, |project, _| {
-//         project.languages.add(Arc::new(rust));
-//         project.languages.add(Arc::new(js));
-//     });
-
-//     let _rs_buffer = project
-//         .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
-//         .await
-//         .unwrap();
-//     let _js_buffer = project
-//         .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
-//         .await
-//         .unwrap();
-
-//     let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
-//     assert_eq!(
-//         fake_rust_server_1
-//             .receive_notification::<lsp2::notification::DidOpenTextDocument>()
-//             .await
-//             .text_document
-//             .uri
-//             .as_str(),
-//         "file:///dir/a.rs"
-//     );
-
-//     let mut fake_js_server = fake_js_servers.next().await.unwrap();
-//     assert_eq!(
-//         fake_js_server
-//             .receive_notification::<lsp2::notification::DidOpenTextDocument>()
-//             .await
-//             .text_document
-//             .uri
-//             .as_str(),
-//         "file:///dir/b.js"
-//     );
-
-//     // Disable Rust language server, ensuring only that server gets stopped.
-//     cx.update(|cx| {
-//         cx.update_global(|settings: &mut SettingsStore, cx| {
-//             settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
-//                 settings.languages.insert(
-//                     Arc::from("Rust"),
-//                     LanguageSettingsContent {
-//                         enable_language_server: Some(false),
-//                         ..Default::default()
-//                     },
-//                 );
-//             });
-//         })
-//     });
-//     fake_rust_server_1
-//         .receive_notification::<lsp2::notification::Exit>()
-//         .await;
-
-//     // Enable Rust and disable JavaScript language servers, ensuring that the
-//     // former gets started again and that the latter stops.
-//     cx.update(|cx| {
-//         cx.update_global(|settings: &mut SettingsStore, cx| {
-//             settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
-//                 settings.languages.insert(
-//                     Arc::from("Rust"),
-//                     LanguageSettingsContent {
-//                         enable_language_server: Some(true),
-//                         ..Default::default()
-//                     },
-//                 );
-//                 settings.languages.insert(
-//                     Arc::from("JavaScript"),
-//                     LanguageSettingsContent {
-//                         enable_language_server: Some(false),
-//                         ..Default::default()
-//                     },
-//                 );
-//             });
-//         })
-//     });
-//     let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
-//     assert_eq!(
-//         fake_rust_server_2
-//             .receive_notification::<lsp2::notification::DidOpenTextDocument>()
-//             .await
-//             .text_document
-//             .uri
-//             .as_str(),
-//         "file:///dir/a.rs"
-//     );
-//     fake_js_server
-//         .receive_notification::<lsp2::notification::Exit>()
-//         .await;
-// }
-
-// #[gpui::test(iterations = 3)]
-// async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
-//     init_test(cx);
-
-//     let mut language = Language::new(
-//         LanguageConfig {
-//             name: "Rust".into(),
-//             path_suffixes: vec!["rs".to_string()],
-//             ..Default::default()
-//         },
-//         Some(tree_sitter_rust::language()),
-//     );
-//     let mut fake_servers = language
-//         .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
-//             disk_based_diagnostics_sources: vec!["disk".into()],
-//             ..Default::default()
-//         }))
-//         .await;
-
-//     let text = "
-//         fn a() { A }
-//         fn b() { BB }
-//         fn c() { CCC }
-//     "
-//     .unindent();
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree("/dir", json!({ "a.rs": text })).await;
-
-//     let project = Project::test(fs, ["/dir".as_ref()], cx).await;
-//     project.update(cx, |project, _| project.languages.add(Arc::new(language)));
-
-//     let buffer = project
-//         .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
-//         .await
-//         .unwrap();
-
-//     let mut fake_server = fake_servers.next().await.unwrap();
-//     let open_notification = fake_server
-//         .receive_notification::<lsp2::notification::DidOpenTextDocument>()
-//         .await;
-
-//     // Edit the buffer, moving the content down
-//     buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
-//     let change_notification_1 = fake_server
-//         .receive_notification::<lsp2::notification::DidChangeTextDocument>()
-//         .await;
-//     assert!(change_notification_1.text_document.version > open_notification.text_document.version);
-
-//     // Report some diagnostics for the initial version of the buffer
-//     fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
-//         uri: lsp2::Url::from_file_path("/dir/a.rs").unwrap(),
-//         version: Some(open_notification.text_document.version),
-//         diagnostics: vec![
-//             lsp2::Diagnostic {
-//                 range: lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 10)),
-//                 severity: Some(DiagnosticSeverity::ERROR),
-//                 message: "undefined variable 'A'".to_string(),
-//                 source: Some("disk".to_string()),
-//                 ..Default::default()
-//             },
-//             lsp2::Diagnostic {
-//                 range: lsp2::Range::new(lsp2::Position::new(1, 9), lsp2::Position::new(1, 11)),
-//                 severity: Some(DiagnosticSeverity::ERROR),
-//                 message: "undefined variable 'BB'".to_string(),
-//                 source: Some("disk".to_string()),
-//                 ..Default::default()
-//             },
-//             lsp2::Diagnostic {
-//                 range: lsp2::Range::new(lsp2::Position::new(2, 9), lsp2::Position::new(2, 12)),
-//                 severity: Some(DiagnosticSeverity::ERROR),
-//                 source: Some("disk".to_string()),
-//                 message: "undefined variable 'CCC'".to_string(),
-//                 ..Default::default()
-//             },
-//         ],
-//     });
-
-//     // The diagnostics have moved down since they were created.
-//     buffer.next_notification(cx).await;
-//     cx.foreground().run_until_parked();
-//     buffer.read_with(cx, |buffer, _| {
-//         assert_eq!(
-//             buffer
-//                 .snapshot()
-//                 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
-//                 .collect::<Vec<_>>(),
-//             &[
-//                 DiagnosticEntry {
-//                     range: Point::new(3, 9)..Point::new(3, 11),
-//                     diagnostic: Diagnostic {
-//                         source: Some("disk".into()),
-//                         severity: DiagnosticSeverity::ERROR,
-//                         message: "undefined variable 'BB'".to_string(),
-//                         is_disk_based: true,
-//                         group_id: 1,
-//                         is_primary: true,
-//                         ..Default::default()
-//                     },
-//                 },
-//                 DiagnosticEntry {
-//                     range: Point::new(4, 9)..Point::new(4, 12),
-//                     diagnostic: Diagnostic {
-//                         source: Some("disk".into()),
-//                         severity: DiagnosticSeverity::ERROR,
-//                         message: "undefined variable 'CCC'".to_string(),
-//                         is_disk_based: true,
-//                         group_id: 2,
-//                         is_primary: true,
-//                         ..Default::default()
-//                     }
-//                 }
-//             ]
-//         );
-//         assert_eq!(
-//             chunks_with_diagnostics(buffer, 0..buffer.len()),
-//             [
-//                 ("\n\nfn a() { ".to_string(), None),
-//                 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
-//                 (" }\nfn b() { ".to_string(), None),
-//                 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
-//                 (" }\nfn c() { ".to_string(), None),
-//                 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
-//                 (" }\n".to_string(), None),
-//             ]
-//         );
-//         assert_eq!(
-//             chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
-//             [
-//                 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
-//                 (" }\nfn c() { ".to_string(), None),
-//                 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
-//             ]
-//         );
-//     });
-
-//     // Ensure overlapping diagnostics are highlighted correctly.
-//     fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
-//         uri: lsp2::Url::from_file_path("/dir/a.rs").unwrap(),
-//         version: Some(open_notification.text_document.version),
-//         diagnostics: vec![
-//             lsp2::Diagnostic {
-//                 range: lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 10)),
-//                 severity: Some(DiagnosticSeverity::ERROR),
-//                 message: "undefined variable 'A'".to_string(),
-//                 source: Some("disk".to_string()),
-//                 ..Default::default()
-//             },
-//             lsp2::Diagnostic {
-//                 range: lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 12)),
-//                 severity: Some(DiagnosticSeverity::WARNING),
-//                 message: "unreachable statement".to_string(),
-//                 source: Some("disk".to_string()),
-//                 ..Default::default()
-//             },
-//         ],
-//     });
-
-//     buffer.next_notification(cx).await;
-//     cx.foreground().run_until_parked();
-//     buffer.read_with(cx, |buffer, _| {
-//         assert_eq!(
-//             buffer
-//                 .snapshot()
-//                 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
-//                 .collect::<Vec<_>>(),
-//             &[
-//                 DiagnosticEntry {
-//                     range: Point::new(2, 9)..Point::new(2, 12),
-//                     diagnostic: Diagnostic {
-//                         source: Some("disk".into()),
-//                         severity: DiagnosticSeverity::WARNING,
-//                         message: "unreachable statement".to_string(),
-//                         is_disk_based: true,
-//                         group_id: 4,
-//                         is_primary: true,
-//                         ..Default::default()
-//                     }
-//                 },
-//                 DiagnosticEntry {
-//                     range: Point::new(2, 9)..Point::new(2, 10),
-//                     diagnostic: Diagnostic {
-//                         source: Some("disk".into()),
-//                         severity: DiagnosticSeverity::ERROR,
-//                         message: "undefined variable 'A'".to_string(),
-//                         is_disk_based: true,
-//                         group_id: 3,
-//                         is_primary: true,
-//                         ..Default::default()
-//                     },
-//                 }
-//             ]
-//         );
-//         assert_eq!(
-//             chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
-//             [
-//                 ("fn a() { ".to_string(), None),
-//                 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
-//                 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
-//                 ("\n".to_string(), None),
-//             ]
-//         );
-//         assert_eq!(
-//             chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
-//             [
-//                 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
-//                 ("\n".to_string(), None),
-//             ]
-//         );
-//     });
-
-//     // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
-//     // changes since the last save.
-//     buffer.update(cx, |buffer, cx| {
-//         buffer.edit([(Point::new(2, 0)..Point::new(2, 0), "    ")], None, cx);
-//         buffer.edit(
-//             [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
-//             None,
-//             cx,
-//         );
-//         buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
-//     });
-//     let change_notification_2 = fake_server
-//         .receive_notification::<lsp2::notification::DidChangeTextDocument>()
-//         .await;
-//     assert!(
-//         change_notification_2.text_document.version > change_notification_1.text_document.version
-//     );
-
-//     // Handle out-of-order diagnostics
-//     fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
-//         uri: lsp2::Url::from_file_path("/dir/a.rs").unwrap(),
-//         version: Some(change_notification_2.text_document.version),
-//         diagnostics: vec![
-//             lsp2::Diagnostic {
-//                 range: lsp2::Range::new(lsp2::Position::new(1, 9), lsp2::Position::new(1, 11)),
-//                 severity: Some(DiagnosticSeverity::ERROR),
-//                 message: "undefined variable 'BB'".to_string(),
-//                 source: Some("disk".to_string()),
-//                 ..Default::default()
-//             },
-//             lsp2::Diagnostic {
-//                 range: lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 10)),
-//                 severity: Some(DiagnosticSeverity::WARNING),
-//                 message: "undefined variable 'A'".to_string(),
-//                 source: Some("disk".to_string()),
-//                 ..Default::default()
-//             },
-//         ],
-//     });
-
-//     buffer.next_notification(cx).await;
-//     cx.foreground().run_until_parked();
-//     buffer.read_with(cx, |buffer, _| {
-//         assert_eq!(
-//             buffer
-//                 .snapshot()
-//                 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
-//                 .collect::<Vec<_>>(),
-//             &[
-//                 DiagnosticEntry {
-//                     range: Point::new(2, 21)..Point::new(2, 22),
-//                     diagnostic: Diagnostic {
-//                         source: Some("disk".into()),
-//                         severity: DiagnosticSeverity::WARNING,
-//                         message: "undefined variable 'A'".to_string(),
-//                         is_disk_based: true,
-//                         group_id: 6,
-//                         is_primary: true,
-//                         ..Default::default()
-//                     }
-//                 },
-//                 DiagnosticEntry {
-//                     range: Point::new(3, 9)..Point::new(3, 14),
-//                     diagnostic: Diagnostic {
-//                         source: Some("disk".into()),
-//                         severity: DiagnosticSeverity::ERROR,
-//                         message: "undefined variable 'BB'".to_string(),
-//                         is_disk_based: true,
-//                         group_id: 5,
-//                         is_primary: true,
-//                         ..Default::default()
-//                     },
-//                 }
-//             ]
-//         );
-//     });
-// }
-
-// #[gpui::test]
-// async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
-//     init_test(cx);
-
-//     let text = concat!(
-//         "let one = ;\n", //
-//         "let two = \n",
-//         "let three = 3;\n",
-//     );
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree("/dir", json!({ "a.rs": text })).await;
-
-//     let project = Project::test(fs, ["/dir".as_ref()], cx).await;
-//     let buffer = project
-//         .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
-//         .await
-//         .unwrap();
-
-//     project.update(cx, |project, cx| {
-//         project
-//             .update_buffer_diagnostics(
-//                 &buffer,
-//                 LanguageServerId(0),
-//                 None,
-//                 vec![
-//                     DiagnosticEntry {
-//                         range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
-//                         diagnostic: Diagnostic {
-//                             severity: DiagnosticSeverity::ERROR,
-//                             message: "syntax error 1".to_string(),
-//                             ..Default::default()
-//                         },
-//                     },
-//                     DiagnosticEntry {
-//                         range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
-//                         diagnostic: Diagnostic {
-//                             severity: DiagnosticSeverity::ERROR,
-//                             message: "syntax error 2".to_string(),
-//                             ..Default::default()
-//                         },
-//                     },
-//                 ],
-//                 cx,
-//             )
-//             .unwrap();
-//     });
-
-//     // An empty range is extended forward to include the following character.
-//     // At the end of a line, an empty range is extended backward to include
-//     // the preceding character.
-//     buffer.read_with(cx, |buffer, _| {
-//         let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
-//         assert_eq!(
-//             chunks
-//                 .iter()
-//                 .map(|(s, d)| (s.as_str(), *d))
-//                 .collect::<Vec<_>>(),
-//             &[
-//                 ("let one = ", None),
-//                 (";", Some(DiagnosticSeverity::ERROR)),
-//                 ("\nlet two =", None),
-//                 (" ", Some(DiagnosticSeverity::ERROR)),
-//                 ("\nlet three = 3;\n", None)
-//             ]
-//         );
-//     });
-// }
-
-// #[gpui::test]
-// async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
-//     init_test(cx);
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
-//         .await;
-
-//     let project = Project::test(fs, ["/dir".as_ref()], cx).await;
-
-//     project.update(cx, |project, cx| {
-//         project
-//             .update_diagnostic_entries(
-//                 LanguageServerId(0),
-//                 Path::new("/dir/a.rs").to_owned(),
-//                 None,
-//                 vec![DiagnosticEntry {
-//                     range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
-//                     diagnostic: Diagnostic {
-//                         severity: DiagnosticSeverity::ERROR,
-//                         is_primary: true,
-//                         message: "syntax error a1".to_string(),
-//                         ..Default::default()
-//                     },
-//                 }],
-//                 cx,
-//             )
-//             .unwrap();
-//         project
-//             .update_diagnostic_entries(
-//                 LanguageServerId(1),
-//                 Path::new("/dir/a.rs").to_owned(),
-//                 None,
-//                 vec![DiagnosticEntry {
-//                     range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
-//                     diagnostic: Diagnostic {
-//                         severity: DiagnosticSeverity::ERROR,
-//                         is_primary: true,
-//                         message: "syntax error b1".to_string(),
-//                         ..Default::default()
-//                     },
-//                 }],
-//                 cx,
-//             )
-//             .unwrap();
-
-//         assert_eq!(
-//             project.diagnostic_summary(cx),
-//             DiagnosticSummary {
-//                 error_count: 2,
-//                 warning_count: 0,
-//             }
-//         );
-//     });
-// }
-
-// #[gpui::test]
-// async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
-//     init_test(cx);
-
-//     let mut language = Language::new(
-//         LanguageConfig {
-//             name: "Rust".into(),
-//             path_suffixes: vec!["rs".to_string()],
-//             ..Default::default()
-//         },
-//         Some(tree_sitter_rust::language()),
-//     );
-//     let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
-
-//     let text = "
-//         fn a() {
-//             f1();
-//         }
-//         fn b() {
-//             f2();
-//         }
-//         fn c() {
-//             f3();
-//         }
-//     "
-//     .unindent();
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/dir",
-//         json!({
-//             "a.rs": text.clone(),
-//         }),
-//     )
-//     .await;
-
-//     let project = Project::test(fs, ["/dir".as_ref()], cx).await;
-//     project.update(cx, |project, _| project.languages.add(Arc::new(language)));
-//     let buffer = project
-//         .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
-//         .await
-//         .unwrap();
-
-//     let mut fake_server = fake_servers.next().await.unwrap();
-//     let lsp_document_version = fake_server
-//         .receive_notification::<lsp2::notification::DidOpenTextDocument>()
-//         .await
-//         .text_document
-//         .version;
-
-//     // Simulate editing the buffer after the language server computes some edits.
-//     buffer.update(cx, |buffer, cx| {
-//         buffer.edit(
-//             [(
-//                 Point::new(0, 0)..Point::new(0, 0),
-//                 "// above first function\n",
-//             )],
-//             None,
-//             cx,
-//         );
-//         buffer.edit(
-//             [(
-//                 Point::new(2, 0)..Point::new(2, 0),
-//                 "    // inside first function\n",
-//             )],
-//             None,
-//             cx,
-//         );
-//         buffer.edit(
-//             [(
-//                 Point::new(6, 4)..Point::new(6, 4),
-//                 "// inside second function ",
-//             )],
-//             None,
-//             cx,
-//         );
-
-//         assert_eq!(
-//             buffer.text(),
-//             "
-//                 // above first function
-//                 fn a() {
-//                     // inside first function
-//                     f1();
-//                 }
-//                 fn b() {
-//                     // inside second function f2();
-//                 }
-//                 fn c() {
-//                     f3();
-//                 }
-//             "
-//             .unindent()
-//         );
-//     });
-
-//     let edits = project
-//         .update(cx, |project, cx| {
-//             project.edits_from_lsp(
-//                 &buffer,
-//                 vec![
-//                     // replace body of first function
-//                     lsp2::TextEdit {
-//                         range: lsp2::Range::new(
-//                             lsp2::Position::new(0, 0),
-//                             lsp2::Position::new(3, 0),
-//                         ),
-//                         new_text: "
-//                             fn a() {
-//                                 f10();
-//                             }
-//                             "
-//                         .unindent(),
-//                     },
-//                     // edit inside second function
-//                     lsp2::TextEdit {
-//                         range: lsp2::Range::new(
-//                             lsp2::Position::new(4, 6),
-//                             lsp2::Position::new(4, 6),
-//                         ),
-//                         new_text: "00".into(),
-//                     },
-//                     // edit inside third function via two distinct edits
-//                     lsp2::TextEdit {
-//                         range: lsp2::Range::new(
-//                             lsp2::Position::new(7, 5),
-//                             lsp2::Position::new(7, 5),
-//                         ),
-//                         new_text: "4000".into(),
-//                     },
-//                     lsp2::TextEdit {
-//                         range: lsp2::Range::new(
-//                             lsp2::Position::new(7, 5),
-//                             lsp2::Position::new(7, 6),
-//                         ),
-//                         new_text: "".into(),
-//                     },
-//                 ],
-//                 LanguageServerId(0),
-//                 Some(lsp_document_version),
-//                 cx,
-//             )
-//         })
-//         .await
-//         .unwrap();
-
-//     buffer.update(cx, |buffer, cx| {
-//         for (range, new_text) in edits {
-//             buffer.edit([(range, new_text)], None, cx);
-//         }
-//         assert_eq!(
-//             buffer.text(),
-//             "
-//                 // above first function
-//                 fn a() {
-//                     // inside first function
-//                     f10();
-//                 }
-//                 fn b() {
-//                     // inside second function f200();
-//                 }
-//                 fn c() {
-//                     f4000();
-//                 }
-//                 "
-//             .unindent()
-//         );
-//     });
-// }
-
-// #[gpui::test]
-// async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
-//     init_test(cx);
-
-//     let text = "
-//         use a::b;
-//         use a::c;
-
-//         fn f() {
-//             b();
-//             c();
-//         }
-//     "
-//     .unindent();
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/dir",
-//         json!({
-//             "a.rs": text.clone(),
-//         }),
-//     )
-//     .await;
-
-//     let project = Project::test(fs, ["/dir".as_ref()], cx).await;
-//     let buffer = project
-//         .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
-//         .await
-//         .unwrap();
-
-//     // Simulate the language server sending us a small edit in the form of a very large diff.
-//     // Rust-analyzer does this when performing a merge-imports code action.
-//     let edits = project
-//         .update(cx, |project, cx| {
-//             project.edits_from_lsp(
-//                 &buffer,
-//                 [
-//                     // Replace the first use statement without editing the semicolon.
-//                     lsp2::TextEdit {
-//                         range: lsp2::Range::new(
-//                             lsp2::Position::new(0, 4),
-//                             lsp2::Position::new(0, 8),
-//                         ),
-//                         new_text: "a::{b, c}".into(),
-//                     },
-//                     // Reinsert the remainder of the file between the semicolon and the final
-//                     // newline of the file.
-//                     lsp2::TextEdit {
-//                         range: lsp2::Range::new(
-//                             lsp2::Position::new(0, 9),
-//                             lsp2::Position::new(0, 9),
-//                         ),
-//                         new_text: "\n\n".into(),
-//                     },
-//                     lsp2::TextEdit {
-//                         range: lsp2::Range::new(
-//                             lsp2::Position::new(0, 9),
-//                             lsp2::Position::new(0, 9),
-//                         ),
-//                         new_text: "
-//                             fn f() {
-//                                 b();
-//                                 c();
-//                             }"
-//                         .unindent(),
-//                     },
-//                     // Delete everything after the first newline of the file.
-//                     lsp2::TextEdit {
-//                         range: lsp2::Range::new(
-//                             lsp2::Position::new(1, 0),
-//                             lsp2::Position::new(7, 0),
-//                         ),
-//                         new_text: "".into(),
-//                     },
-//                 ],
-//                 LanguageServerId(0),
-//                 None,
-//                 cx,
-//             )
-//         })
-//         .await
-//         .unwrap();
-
-//     buffer.update(cx, |buffer, cx| {
-//         let edits = edits
-//             .into_iter()
-//             .map(|(range, text)| {
-//                 (
-//                     range.start.to_point(buffer)..range.end.to_point(buffer),
-//                     text,
-//                 )
-//             })
-//             .collect::<Vec<_>>();
-
-//         assert_eq!(
-//             edits,
-//             [
-//                 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
-//                 (Point::new(1, 0)..Point::new(2, 0), "".into())
-//             ]
-//         );
-
-//         for (range, new_text) in edits {
-//             buffer.edit([(range, new_text)], None, cx);
-//         }
-//         assert_eq!(
-//             buffer.text(),
-//             "
-//                 use a::{b, c};
-
-//                 fn f() {
-//                     b();
-//                     c();
-//                 }
-//             "
-//             .unindent()
-//         );
-//     });
-// }
-
-// #[gpui::test]
-// async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
-//     init_test(cx);
-
-//     let text = "
-//         use a::b;
-//         use a::c;
-
-//         fn f() {
-//             b();
-//             c();
-//         }
-//     "
-//     .unindent();
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/dir",
-//         json!({
-//             "a.rs": text.clone(),
-//         }),
-//     )
-//     .await;
-
-//     let project = Project::test(fs, ["/dir".as_ref()], cx).await;
-//     let buffer = project
-//         .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
-//         .await
-//         .unwrap();
-
-//     // Simulate the language server sending us edits in a non-ordered fashion,
-//     // with ranges sometimes being inverted or pointing to invalid locations.
-//     let edits = project
-//         .update(cx, |project, cx| {
-//             project.edits_from_lsp(
-//                 &buffer,
-//                 [
-//                     lsp2::TextEdit {
-//                         range: lsp2::Range::new(
-//                             lsp2::Position::new(0, 9),
-//                             lsp2::Position::new(0, 9),
-//                         ),
-//                         new_text: "\n\n".into(),
-//                     },
-//                     lsp2::TextEdit {
-//                         range: lsp2::Range::new(
-//                             lsp2::Position::new(0, 8),
-//                             lsp2::Position::new(0, 4),
-//                         ),
-//                         new_text: "a::{b, c}".into(),
-//                     },
-//                     lsp2::TextEdit {
-//                         range: lsp2::Range::new(
-//                             lsp2::Position::new(1, 0),
-//                             lsp2::Position::new(99, 0),
-//                         ),
-//                         new_text: "".into(),
-//                     },
-//                     lsp2::TextEdit {
-//                         range: lsp2::Range::new(
-//                             lsp2::Position::new(0, 9),
-//                             lsp2::Position::new(0, 9),
-//                         ),
-//                         new_text: "
-//                             fn f() {
-//                                 b();
-//                                 c();
-//                             }"
-//                         .unindent(),
-//                     },
-//                 ],
-//                 LanguageServerId(0),
-//                 None,
-//                 cx,
-//             )
-//         })
-//         .await
-//         .unwrap();
-
-//     buffer.update(cx, |buffer, cx| {
-//         let edits = edits
-//             .into_iter()
-//             .map(|(range, text)| {
-//                 (
-//                     range.start.to_point(buffer)..range.end.to_point(buffer),
-//                     text,
-//                 )
-//             })
-//             .collect::<Vec<_>>();
-
-//         assert_eq!(
-//             edits,
-//             [
-//                 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
-//                 (Point::new(1, 0)..Point::new(2, 0), "".into())
-//             ]
-//         );
-
-//         for (range, new_text) in edits {
-//             buffer.edit([(range, new_text)], None, cx);
-//         }
-//         assert_eq!(
-//             buffer.text(),
-//             "
-//                 use a::{b, c};
-
-//                 fn f() {
-//                     b();
-//                     c();
-//                 }
-//             "
-//             .unindent()
-//         );
-//     });
-// }
-
-// fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
-//     buffer: &Buffer,
-//     range: Range<T>,
-// ) -> Vec<(String, Option<DiagnosticSeverity>)> {
-//     let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
-//     for chunk in buffer.snapshot().chunks(range, true) {
-//         if chunks.last().map_or(false, |prev_chunk| {
-//             prev_chunk.1 == chunk.diagnostic_severity
-//         }) {
-//             chunks.last_mut().unwrap().0.push_str(chunk.text);
-//         } else {
-//             chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
-//         }
-//     }
-//     chunks
-// }
-
-// #[gpui::test(iterations = 10)]
-// async fn test_definition(cx: &mut gpui::TestAppContext) {
-//     init_test(cx);
-
-//     let mut language = Language::new(
-//         LanguageConfig {
-//             name: "Rust".into(),
-//             path_suffixes: vec!["rs".to_string()],
-//             ..Default::default()
-//         },
-//         Some(tree_sitter_rust::language()),
-//     );
-//     let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/dir",
-//         json!({
-//             "a.rs": "const fn a() { A }",
-//             "b.rs": "const y: i32 = crate::a()",
-//         }),
-//     )
-//     .await;
-
-//     let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
-//     project.update(cx, |project, _| project.languages.add(Arc::new(language)));
-
-//     let buffer = project
-//         .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
-//         .await
-//         .unwrap();
-
-//     let fake_server = fake_servers.next().await.unwrap();
-//     fake_server.handle_request::<lsp2::request::GotoDefinition, _, _>(|params, _| async move {
-//         let params = params.text_document_position_params;
-//         assert_eq!(
-//             params.text_document.uri.to_file_path().unwrap(),
-//             Path::new("/dir/b.rs"),
-//         );
-//         assert_eq!(params.position, lsp2::Position::new(0, 22));
-
-//         Ok(Some(lsp2::GotoDefinitionResponse::Scalar(
-//             lsp2::Location::new(
-//                 lsp2::Url::from_file_path("/dir/a.rs").unwrap(),
-//                 lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 10)),
-//             ),
-//         )))
-//     });
-
-//     let mut definitions = project
-//         .update(cx, |project, cx| project.definition(&buffer, 22, cx))
-//         .await
-//         .unwrap();
-
-//     // Assert no new language server started
-//     cx.foreground().run_until_parked();
-//     assert!(fake_servers.try_next().is_err());
-
-//     assert_eq!(definitions.len(), 1);
-//     let definition = definitions.pop().unwrap();
-//     cx.update(|cx| {
-//         let target_buffer = definition.target.buffer.read(cx);
-//         assert_eq!(
-//             target_buffer
-//                 .file()
-//                 .unwrap()
-//                 .as_local()
-//                 .unwrap()
-//                 .abs_path(cx),
-//             Path::new("/dir/a.rs"),
-//         );
-//         assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
-//         assert_eq!(
-//             list_worktrees(&project, cx),
-//             [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
-//         );
-
-//         drop(definition);
-//     });
-//     cx.read(|cx| {
-//         assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
-//     });
-
-//     fn list_worktrees<'a>(
-//         project: &'a ModelHandle<Project>,
-//         cx: &'a AppContext,
-//     ) -> Vec<(&'a Path, bool)> {
-//         project
-//             .read(cx)
-//             .worktrees(cx)
-//             .map(|worktree| {
-//                 let worktree = worktree.read(cx);
-//                 (
-//                     worktree.as_local().unwrap().abs_path().as_ref(),
-//                     worktree.is_visible(),
-//                 )
-//             })
-//             .collect::<Vec<_>>()
-//     }
-// }
-
-// #[gpui::test]
-// async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
-//     init_test(cx);
-
-//     let mut language = Language::new(
-//         LanguageConfig {
-//             name: "TypeScript".into(),
-//             path_suffixes: vec!["ts".to_string()],
-//             ..Default::default()
-//         },
-//         Some(tree_sitter_typescript::language_typescript()),
-//     );
-//     let mut fake_language_servers = language
-//         .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
-//             capabilities: lsp::ServerCapabilities {
-//                 completion_provider: Some(lsp::CompletionOptions {
-//                     trigger_characters: Some(vec![":".to_string()]),
-//                     ..Default::default()
-//                 }),
-//                 ..Default::default()
-//             },
-//             ..Default::default()
-//         }))
-//         .await;
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/dir",
-//         json!({
-//             "a.ts": "",
-//         }),
-//     )
-//     .await;
-
-//     let project = Project::test(fs, ["/dir".as_ref()], cx).await;
-//     project.update(cx, |project, _| project.languages.add(Arc::new(language)));
-//     let buffer = project
-//         .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
-//         .await
-//         .unwrap();
-
-//     let fake_server = fake_language_servers.next().await.unwrap();
-
-//     let text = "let a = b.fqn";
-//     buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
-//     let completions = project.update(cx, |project, cx| {
-//         project.completions(&buffer, text.len(), cx)
-//     });
-
-//     fake_server
-//         .handle_request::<lsp2::request::Completion, _, _>(|_, _| async move {
-//             Ok(Some(lsp2::CompletionResponse::Array(vec![
-//                 lsp2::CompletionItem {
-//                     label: "fullyQualifiedName?".into(),
-//                     insert_text: Some("fullyQualifiedName".into()),
-//                     ..Default::default()
-//                 },
-//             ])))
-//         })
-//         .next()
-//         .await;
-//     let completions = completions.await.unwrap();
-//     let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
-//     assert_eq!(completions.len(), 1);
-//     assert_eq!(completions[0].new_text, "fullyQualifiedName");
-//     assert_eq!(
-//         completions[0].old_range.to_offset(&snapshot),
-//         text.len() - 3..text.len()
-//     );
-
-//     let text = "let a = \"atoms/cmp\"";
-//     buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
-//     let completions = project.update(cx, |project, cx| {
-//         project.completions(&buffer, text.len() - 1, cx)
-//     });
-
-//     fake_server
-//         .handle_request::<lsp2::request::Completion, _, _>(|_, _| async move {
-//             Ok(Some(lsp2::CompletionResponse::Array(vec![
-//                 lsp2::CompletionItem {
-//                     label: "component".into(),
-//                     ..Default::default()
-//                 },
-//             ])))
-//         })
-//         .next()
-//         .await;
-//     let completions = completions.await.unwrap();
-//     let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
-//     assert_eq!(completions.len(), 1);
-//     assert_eq!(completions[0].new_text, "component");
-//     assert_eq!(
-//         completions[0].old_range.to_offset(&snapshot),
-//         text.len() - 4..text.len() - 1
-//     );
-// }
-
-// #[gpui::test]
-// async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
-//     init_test(cx);
-
-//     let mut language = Language::new(
-//         LanguageConfig {
-//             name: "TypeScript".into(),
-//             path_suffixes: vec!["ts".to_string()],
-//             ..Default::default()
-//         },
-//         Some(tree_sitter_typescript::language_typescript()),
-//     );
-//     let mut fake_language_servers = language
-//         .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
-//             capabilities: lsp::ServerCapabilities {
-//                 completion_provider: Some(lsp::CompletionOptions {
-//                     trigger_characters: Some(vec![":".to_string()]),
-//                     ..Default::default()
-//                 }),
-//                 ..Default::default()
-//             },
-//             ..Default::default()
-//         }))
-//         .await;
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/dir",
-//         json!({
-//             "a.ts": "",
-//         }),
-//     )
-//     .await;
-
-//     let project = Project::test(fs, ["/dir".as_ref()], cx).await;
-//     project.update(cx, |project, _| project.languages.add(Arc::new(language)));
-//     let buffer = project
-//         .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
-//         .await
-//         .unwrap();
-
-//     let fake_server = fake_language_servers.next().await.unwrap();
-
-//     let text = "let a = b.fqn";
-//     buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
-//     let completions = project.update(cx, |project, cx| {
-//         project.completions(&buffer, text.len(), cx)
-//     });
-
-//     fake_server
-//         .handle_request::<lsp2::request::Completion, _, _>(|_, _| async move {
-//             Ok(Some(lsp2::CompletionResponse::Array(vec![
-//                 lsp2::CompletionItem {
-//                     label: "fullyQualifiedName?".into(),
-//                     insert_text: Some("fully\rQualified\r\nName".into()),
-//                     ..Default::default()
-//                 },
-//             ])))
-//         })
-//         .next()
-//         .await;
-//     let completions = completions.await.unwrap();
-//     assert_eq!(completions.len(), 1);
-//     assert_eq!(completions[0].new_text, "fully\nQualified\nName");
-// }
-
-// #[gpui::test(iterations = 10)]
-// async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
-//     init_test(cx);
-
-//     let mut language = Language::new(
-//         LanguageConfig {
-//             name: "TypeScript".into(),
-//             path_suffixes: vec!["ts".to_string()],
-//             ..Default::default()
-//         },
-//         None,
-//     );
-//     let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/dir",
-//         json!({
-//             "a.ts": "a",
-//         }),
-//     )
-//     .await;
-
-//     let project = Project::test(fs, ["/dir".as_ref()], cx).await;
-//     project.update(cx, |project, _| project.languages.add(Arc::new(language)));
-//     let buffer = project
-//         .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
-//         .await
-//         .unwrap();
-
-//     let fake_server = fake_language_servers.next().await.unwrap();
-
-//     // Language server returns code actions that contain commands, and not edits.
-//     let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
-//     fake_server
-//         .handle_request::<lsp2::request::CodeActionRequest, _, _>(|_, _| async move {
-//             Ok(Some(vec![
-//                 lsp2::CodeActionOrCommand::CodeAction(lsp2::CodeAction {
-//                     title: "The code action".into(),
-//                     command: Some(lsp::Command {
-//                         title: "The command".into(),
-//                         command: "_the/command".into(),
-//                         arguments: Some(vec![json!("the-argument")]),
-//                     }),
-//                     ..Default::default()
-//                 }),
-//                 lsp2::CodeActionOrCommand::CodeAction(lsp2::CodeAction {
-//                     title: "two".into(),
-//                     ..Default::default()
-//                 }),
-//             ]))
-//         })
-//         .next()
-//         .await;
-
-//     let action = actions.await.unwrap()[0].clone();
-//     let apply = project.update(cx, |project, cx| {
-//         project.apply_code_action(buffer.clone(), action, true, cx)
-//     });
-
-//     // Resolving the code action does not populate its edits. In absence of
-//     // edits, we must execute the given command.
-//     fake_server.handle_request::<lsp2::request::CodeActionResolveRequest, _, _>(
-//         |action, _| async move { Ok(action) },
-//     );
-
-//     // While executing the command, the language server sends the editor
-//     // a `workspaceEdit` request.
-//     fake_server
-//         .handle_request::<lsp2::request::ExecuteCommand, _, _>({
-//             let fake = fake_server.clone();
-//             move |params, _| {
-//                 assert_eq!(params.command, "_the/command");
-//                 let fake = fake.clone();
-//                 async move {
-//                     fake.server
-//                         .request::<lsp2::request::ApplyWorkspaceEdit>(
-//                             lsp2::ApplyWorkspaceEditParams {
-//                                 label: None,
-//                                 edit: lsp::WorkspaceEdit {
-//                                     changes: Some(
-//                                         [(
-//                                             lsp2::Url::from_file_path("/dir/a.ts").unwrap(),
-//                                             vec![lsp2::TextEdit {
-//                                                 range: lsp2::Range::new(
-//                                                     lsp2::Position::new(0, 0),
-//                                                     lsp2::Position::new(0, 0),
-//                                                 ),
-//                                                 new_text: "X".into(),
-//                                             }],
-//                                         )]
-//                                         .into_iter()
-//                                         .collect(),
-//                                     ),
-//                                     ..Default::default()
-//                                 },
-//                             },
-//                         )
-//                         .await
-//                         .unwrap();
-//                     Ok(Some(json!(null)))
-//                 }
-//             }
-//         })
-//         .next()
-//         .await;
-
-//     // Applying the code action returns a project transaction containing the edits
-//     // sent by the language server in its `workspaceEdit` request.
-//     let transaction = apply.await.unwrap();
-//     assert!(transaction.0.contains_key(&buffer));
-//     buffer.update(cx, |buffer, cx| {
-//         assert_eq!(buffer.text(), "Xa");
-//         buffer.undo(cx);
-//         assert_eq!(buffer.text(), "a");
-//     });
-// }
-
-// #[gpui::test(iterations = 10)]
-// async fn test_save_file(cx: &mut gpui::TestAppContext) {
-//     init_test(cx);
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/dir",
-//         json!({
-//             "file1": "the old contents",
-//         }),
-//     )
-//     .await;
-
-//     let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
-//     let buffer = project
-//         .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
-//         .await
-//         .unwrap();
-//     buffer.update(cx, |buffer, cx| {
-//         assert_eq!(buffer.text(), "the old contents");
-//         buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
-//     });
-
-//     project
-//         .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
-//         .await
-//         .unwrap();
-
-//     let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
-//     assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
-// }
-
-// #[gpui::test]
-// async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
-//     init_test(cx);
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/dir",
-//         json!({
-//             "file1": "the old contents",
-//         }),
-//     )
-//     .await;
-
-//     let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
-//     let buffer = project
-//         .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
-//         .await
-//         .unwrap();
-//     buffer.update(cx, |buffer, cx| {
-//         buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
-//     });
-
-//     project
-//         .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
-//         .await
-//         .unwrap();
-
-//     let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
-//     assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
-// }
-
-// #[gpui::test]
-// async fn test_save_as(cx: &mut gpui::TestAppContext) {
-//     init_test(cx);
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree("/dir", json!({})).await;
-
-//     let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
-
-//     let languages = project.read_with(cx, |project, _| project.languages().clone());
-//     languages.register(
-//         "/some/path",
-//         LanguageConfig {
-//             name: "Rust".into(),
-//             path_suffixes: vec!["rs".into()],
-//             ..Default::default()
-//         },
-//         tree_sitter_rust::language(),
-//         vec![],
-//         |_| Default::default(),
-//     );
-
-//     let buffer = project.update(cx, |project, cx| {
-//         project.create_buffer("", None, cx).unwrap()
-//     });
-//     buffer.update(cx, |buffer, cx| {
-//         buffer.edit([(0..0, "abc")], None, cx);
-//         assert!(buffer.is_dirty());
-//         assert!(!buffer.has_conflict());
-//         assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
-//     });
-//     project
-//         .update(cx, |project, cx| {
-//             project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
-//         })
-//         .await
-//         .unwrap();
-//     assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
-
-//     cx.foreground().run_until_parked();
-//     buffer.read_with(cx, |buffer, cx| {
-//         assert_eq!(
-//             buffer.file().unwrap().full_path(cx),
-//             Path::new("dir/file1.rs")
-//         );
-//         assert!(!buffer.is_dirty());
-//         assert!(!buffer.has_conflict());
-//         assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
-//     });
-
-//     let opened_buffer = project
-//         .update(cx, |project, cx| {
-//             project.open_local_buffer("/dir/file1.rs", cx)
-//         })
-//         .await
-//         .unwrap();
-//     assert_eq!(opened_buffer, buffer);
-// }
-
-// #[gpui::test(retries = 5)]
-// async fn test_rescan_and_remote_updates(
-//     deterministic: Arc<Deterministic>,
-//     cx: &mut gpui::TestAppContext,
-// ) {
-//     init_test(cx);
-//     cx.foreground().allow_parking();
-
-//     let dir = temp_tree(json!({
-//         "a": {
-//             "file1": "",
-//             "file2": "",
-//             "file3": "",
-//         },
-//         "b": {
-//             "c": {
-//                 "file4": "",
-//                 "file5": "",
-//             }
-//         }
-//     }));
-
-//     let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
-//     let rpc = project.read_with(cx, |p, _| p.client.clone());
-
-//     let buffer_for_path = |path: &'static str, cx: &mut gpui2::TestAppContext| {
-//         let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
-//         async move { buffer.await.unwrap() }
-//     };
-//     let id_for_path = |path: &'static str, cx: &gpui2::TestAppContext| {
-//         project.read_with(cx, |project, cx| {
-//             let tree = project.worktrees(cx).next().unwrap();
-//             tree.read(cx)
-//                 .entry_for_path(path)
-//                 .unwrap_or_else(|| panic!("no entry for path {}", path))
-//                 .id
-//         })
-//     };
-
-//     let buffer2 = buffer_for_path("a/file2", cx).await;
-//     let buffer3 = buffer_for_path("a/file3", cx).await;
-//     let buffer4 = buffer_for_path("b/c/file4", cx).await;
-//     let buffer5 = buffer_for_path("b/c/file5", cx).await;
-
-//     let file2_id = id_for_path("a/file2", cx);
-//     let file3_id = id_for_path("a/file3", cx);
-//     let file4_id = id_for_path("b/c/file4", cx);
-
-//     // Create a remote copy of this worktree.
-//     let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
-
-//     let metadata = tree.read_with(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
-
-//     let updates = Arc::new(Mutex::new(Vec::new()));
-//     tree.update(cx, |tree, cx| {
-//         let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
-//             let updates = updates.clone();
-//             move |update| {
-//                 updates.lock().push(update);
-//                 async { true }
-//             }
-//         });
-//     });
-
-//     let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
-//     deterministic.run_until_parked();
-
-//     cx.read(|cx| {
-//         assert!(!buffer2.read(cx).is_dirty());
-//         assert!(!buffer3.read(cx).is_dirty());
-//         assert!(!buffer4.read(cx).is_dirty());
-//         assert!(!buffer5.read(cx).is_dirty());
-//     });
-
-//     // Rename and delete files and directories.
-//     tree.flush_fs_events(cx).await;
-//     std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
-//     std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
-//     std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
-//     std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
-//     tree.flush_fs_events(cx).await;
-
-//     let expected_paths = vec![
-//         "a",
-//         "a/file1",
-//         "a/file2.new",
-//         "b",
-//         "d",
-//         "d/file3",
-//         "d/file4",
-//     ];
-
-//     cx.read(|app| {
-//         assert_eq!(
-//             tree.read(app)
-//                 .paths()
-//                 .map(|p| p.to_str().unwrap())
-//                 .collect::<Vec<_>>(),
-//             expected_paths
-//         );
-
-//         assert_eq!(id_for_path("a/file2.new", cx), file2_id);
-//         assert_eq!(id_for_path("d/file3", cx), file3_id);
-//         assert_eq!(id_for_path("d/file4", cx), file4_id);
-
-//         assert_eq!(
-//             buffer2.read(app).file().unwrap().path().as_ref(),
-//             Path::new("a/file2.new")
-//         );
-//         assert_eq!(
-//             buffer3.read(app).file().unwrap().path().as_ref(),
-//             Path::new("d/file3")
-//         );
-//         assert_eq!(
-//             buffer4.read(app).file().unwrap().path().as_ref(),
-//             Path::new("d/file4")
-//         );
-//         assert_eq!(
-//             buffer5.read(app).file().unwrap().path().as_ref(),
-//             Path::new("b/c/file5")
-//         );
-
-//         assert!(!buffer2.read(app).file().unwrap().is_deleted());
-//         assert!(!buffer3.read(app).file().unwrap().is_deleted());
-//         assert!(!buffer4.read(app).file().unwrap().is_deleted());
-//         assert!(buffer5.read(app).file().unwrap().is_deleted());
-//     });
-
-//     // Update the remote worktree. Check that it becomes consistent with the
-//     // local worktree.
-//     deterministic.run_until_parked();
-//     remote.update(cx, |remote, _| {
-//         for update in updates.lock().drain(..) {
-//             remote.as_remote_mut().unwrap().update_from_remote(update);
-//         }
-//     });
-//     deterministic.run_until_parked();
-//     remote.read_with(cx, |remote, _| {
-//         assert_eq!(
-//             remote
-//                 .paths()
-//                 .map(|p| p.to_str().unwrap())
-//                 .collect::<Vec<_>>(),
-//             expected_paths
-//         );
-//     });
-// }
-
-// #[gpui::test(iterations = 10)]
-// async fn test_buffer_identity_across_renames(
-//     deterministic: Arc<Deterministic>,
-//     cx: &mut gpui::TestAppContext,
-// ) {
-//     init_test(cx);
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/dir",
-//         json!({
-//             "a": {
-//                 "file1": "",
-//             }
-//         }),
-//     )
-//     .await;
-
-//     let project = Project::test(fs, [Path::new("/dir")], cx).await;
-//     let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
-//     let tree_id = tree.read_with(cx, |tree, _| tree.id());
-
-//     let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
-//         project.read_with(cx, |project, cx| {
-//             let tree = project.worktrees(cx).next().unwrap();
-//             tree.read(cx)
-//                 .entry_for_path(path)
-//                 .unwrap_or_else(|| panic!("no entry for path {}", path))
-//                 .id
-//         })
-//     };
-
-//     let dir_id = id_for_path("a", cx);
-//     let file_id = id_for_path("a/file1", cx);
-//     let buffer = project
-//         .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
-//         .await
-//         .unwrap();
-//     buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
-
-//     project
-//         .update(cx, |project, cx| {
-//             project.rename_entry(dir_id, Path::new("b"), cx)
-//         })
-//         .unwrap()
-//         .await
-//         .unwrap();
-//     deterministic.run_until_parked();
-//     assert_eq!(id_for_path("b", cx), dir_id);
-//     assert_eq!(id_for_path("b/file1", cx), file_id);
-//     buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
-// }
-
-// #[gpui2::test]
-// async fn test_buffer_deduping(cx: &mut gpui2::TestAppContext) {
-//     init_test(cx);
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/dir",
-//         json!({
-//             "a.txt": "a-contents",
-//             "b.txt": "b-contents",
-//         }),
-//     )
-//     .await;
-
-//     let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
-
-//     // Spawn multiple tasks to open paths, repeating some paths.
-//     let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
-//         (
-//             p.open_local_buffer("/dir/a.txt", cx),
-//             p.open_local_buffer("/dir/b.txt", cx),
-//             p.open_local_buffer("/dir/a.txt", cx),
-//         )
-//     });
-
-//     let buffer_a_1 = buffer_a_1.await.unwrap();
-//     let buffer_a_2 = buffer_a_2.await.unwrap();
-//     let buffer_b = buffer_b.await.unwrap();
-//     assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
-//     assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
-
-//     // There is only one buffer per path.
-//     let buffer_a_id = buffer_a_1.id();
-//     assert_eq!(buffer_a_2.id(), buffer_a_id);
-
-//     // Open the same path again while it is still open.
-//     drop(buffer_a_1);
-//     let buffer_a_3 = project
-//         .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
-//         .await
-//         .unwrap();
-
-//     // There's still only one buffer per path.
-//     assert_eq!(buffer_a_3.id(), buffer_a_id);
-// }
-
-// #[gpui2::test]
-// async fn test_buffer_is_dirty(cx: &mut gpui2::TestAppContext) {
-//     init_test(cx);
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/dir",
-//         json!({
-//             "file1": "abc",
-//             "file2": "def",
-//             "file3": "ghi",
-//         }),
-//     )
-//     .await;
-
-//     let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
-
-//     let buffer1 = project
-//         .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
-//         .await
-//         .unwrap();
-//     let events = Rc::new(RefCell::new(Vec::new()));
-
-//     // initially, the buffer isn't dirty.
-//     buffer1.update(cx, |buffer, cx| {
-//         cx.subscribe(&buffer1, {
-//             let events = events.clone();
-//             move |_, _, event, _| match event {
-//                 BufferEvent::Operation(_) => {}
-//                 _ => events.borrow_mut().push(event.clone()),
-//             }
-//         })
-//         .detach();
-
-//         assert!(!buffer.is_dirty());
-//         assert!(events.borrow().is_empty());
-
-//         buffer.edit([(1..2, "")], None, cx);
-//     });
-
-//     // after the first edit, the buffer is dirty, and emits a dirtied event.
-//     buffer1.update(cx, |buffer, cx| {
-//         assert!(buffer.text() == "ac");
-//         assert!(buffer.is_dirty());
-//         assert_eq!(
-//             *events.borrow(),
-//             &[language2::Event::Edited, language2::Event::DirtyChanged]
-//         );
-//         events.borrow_mut().clear();
-//         buffer.did_save(
-//             buffer.version(),
-//             buffer.as_rope().fingerprint(),
-//             buffer.file().unwrap().mtime(),
-//             cx,
-//         );
-//     });
-
-//     // after saving, the buffer is not dirty, and emits a saved event.
-//     buffer1.update(cx, |buffer, cx| {
-//         assert!(!buffer.is_dirty());
-//         assert_eq!(*events.borrow(), &[language2::Event::Saved]);
-//         events.borrow_mut().clear();
-
-//         buffer.edit([(1..1, "B")], None, cx);
-//         buffer.edit([(2..2, "D")], None, cx);
-//     });
-
-//     // after editing again, the buffer is dirty, and emits another dirty event.
-//     buffer1.update(cx, |buffer, cx| {
-//         assert!(buffer.text() == "aBDc");
-//         assert!(buffer.is_dirty());
-//         assert_eq!(
-//             *events.borrow(),
-//             &[
-//                 language2::Event::Edited,
-//                 language2::Event::DirtyChanged,
-//                 language2::Event::Edited,
-//             ],
-//         );
-//         events.borrow_mut().clear();
-
-//         // After restoring the buffer to its previously-saved state,
-//         // the buffer is not considered dirty anymore.
-//         buffer.edit([(1..3, "")], None, cx);
-//         assert!(buffer.text() == "ac");
-//         assert!(!buffer.is_dirty());
-//     });
-
-//     assert_eq!(
-//         *events.borrow(),
-//         &[language2::Event::Edited, language2::Event::DirtyChanged]
-//     );
-
-//     // When a file is deleted, the buffer is considered dirty.
-//     let events = Rc::new(RefCell::new(Vec::new()));
-//     let buffer2 = project
-//         .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
-//         .await
-//         .unwrap();
-//     buffer2.update(cx, |_, cx| {
-//         cx.subscribe(&buffer2, {
-//             let events = events.clone();
-//             move |_, _, event, _| events.borrow_mut().push(event.clone())
-//         })
-//         .detach();
-//     });
-
-//     fs.remove_file("/dir/file2".as_ref(), Default::default())
-//         .await
-//         .unwrap();
-//     cx.foreground().run_until_parked();
-//     buffer2.read_with(cx, |buffer, _| assert!(buffer.is_dirty()));
-//     assert_eq!(
-//         *events.borrow(),
-//         &[
-//             language2::Event::DirtyChanged,
-//             language2::Event::FileHandleChanged
-//         ]
-//     );
-
-//     // When a file is already dirty when deleted, we don't emit a Dirtied event.
-//     let events = Rc::new(RefCell::new(Vec::new()));
-//     let buffer3 = project
-//         .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
-//         .await
-//         .unwrap();
-//     buffer3.update(cx, |_, cx| {
-//         cx.subscribe(&buffer3, {
-//             let events = events.clone();
-//             move |_, _, event, _| events.borrow_mut().push(event.clone())
-//         })
-//         .detach();
-//     });
-
-//     buffer3.update(cx, |buffer, cx| {
-//         buffer.edit([(0..0, "x")], None, cx);
-//     });
-//     events.borrow_mut().clear();
-//     fs.remove_file("/dir/file3".as_ref(), Default::default())
-//         .await
-//         .unwrap();
-//     cx.foreground().run_until_parked();
-//     assert_eq!(*events.borrow(), &[language2::Event::FileHandleChanged]);
-//     cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
-// }
-
-// #[gpui::test]
-// async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
-//     init_test(cx);
-
-//     let initial_contents = "aaa\nbbbbb\nc\n";
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/dir",
-//         json!({
-//             "the-file": initial_contents,
-//         }),
-//     )
-//     .await;
-//     let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
-//     let buffer = project
-//         .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
-//         .await
-//         .unwrap();
-
-//     let anchors = (0..3)
-//         .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
-//         .collect::<Vec<_>>();
-
-//     // Change the file on disk, adding two new lines of text, and removing
-//     // one line.
-//     buffer.read_with(cx, |buffer, _| {
-//         assert!(!buffer.is_dirty());
-//         assert!(!buffer.has_conflict());
-//     });
-//     let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
-//     fs.save(
-//         "/dir/the-file".as_ref(),
-//         &new_contents.into(),
-//         LineEnding::Unix,
-//     )
-//     .await
-//     .unwrap();
-
-//     // Because the buffer was not modified, it is reloaded from disk. Its
-//     // contents are edited according to the diff between the old and new
-//     // file contents.
-//     cx.foreground().run_until_parked();
-//     buffer.update(cx, |buffer, _| {
-//         assert_eq!(buffer.text(), new_contents);
-//         assert!(!buffer.is_dirty());
-//         assert!(!buffer.has_conflict());
-
-//         let anchor_positions = anchors
-//             .iter()
-//             .map(|anchor| anchor.to_point(&*buffer))
-//             .collect::<Vec<_>>();
-//         assert_eq!(
-//             anchor_positions,
-//             [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
-//         );
-//     });
-
-//     // Modify the buffer
-//     buffer.update(cx, |buffer, cx| {
-//         buffer.edit([(0..0, " ")], None, cx);
-//         assert!(buffer.is_dirty());
-//         assert!(!buffer.has_conflict());
-//     });
-
-//     // Change the file on disk again, adding blank lines to the beginning.
-//     fs.save(
-//         "/dir/the-file".as_ref(),
-//         &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
-//         LineEnding::Unix,
-//     )
-//     .await
-//     .unwrap();
-
-//     // Because the buffer is modified, it doesn't reload from disk, but is
-//     // marked as having a conflict.
-//     cx.foreground().run_until_parked();
-//     buffer.read_with(cx, |buffer, _| {
-//         assert!(buffer.has_conflict());
-//     });
-// }
-
-// #[gpui::test]
-// async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
-//     init_test(cx);
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/dir",
-//         json!({
-//             "file1": "a\nb\nc\n",
-//             "file2": "one\r\ntwo\r\nthree\r\n",
-//         }),
-//     )
-//     .await;
-
-//     let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
-//     let buffer1 = project
-//         .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
-//         .await
-//         .unwrap();
-//     let buffer2 = project
-//         .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
-//         .await
-//         .unwrap();
-
-//     buffer1.read_with(cx, |buffer, _| {
-//         assert_eq!(buffer.text(), "a\nb\nc\n");
-//         assert_eq!(buffer.line_ending(), LineEnding::Unix);
-//     });
-//     buffer2.read_with(cx, |buffer, _| {
-//         assert_eq!(buffer.text(), "one\ntwo\nthree\n");
-//         assert_eq!(buffer.line_ending(), LineEnding::Windows);
-//     });
-
-//     // Change a file's line endings on disk from unix to windows. The buffer's
-//     // state updates correctly.
-//     fs.save(
-//         "/dir/file1".as_ref(),
-//         &"aaa\nb\nc\n".into(),
-//         LineEnding::Windows,
-//     )
-//     .await
-//     .unwrap();
-//     cx.foreground().run_until_parked();
-//     buffer1.read_with(cx, |buffer, _| {
-//         assert_eq!(buffer.text(), "aaa\nb\nc\n");
-//         assert_eq!(buffer.line_ending(), LineEnding::Windows);
-//     });
-
-//     // Save a file with windows line endings. The file is written correctly.
-//     buffer2.update(cx, |buffer, cx| {
-//         buffer.set_text("one\ntwo\nthree\nfour\n", cx);
-//     });
-//     project
-//         .update(cx, |project, cx| project.save_buffer(buffer2, cx))
-//         .await
-//         .unwrap();
-//     assert_eq!(
-//         fs.load("/dir/file2".as_ref()).await.unwrap(),
-//         "one\r\ntwo\r\nthree\r\nfour\r\n",
-//     );
-// }
-
-// #[gpui::test]
-// async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
-//     init_test(cx);
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/the-dir",
-//         json!({
-//             "a.rs": "
-//                 fn foo(mut v: Vec<usize>) {
-//                     for x in &v {
-//                         v.push(1);
-//                     }
-//                 }
-//             "
-//             .unindent(),
-//         }),
-//     )
-//     .await;
-
-//     let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
-//     let buffer = project
-//         .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
-//         .await
-//         .unwrap();
-
-//     let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
-//     let message = lsp::PublishDiagnosticsParams {
-//         uri: buffer_uri.clone(),
-//         diagnostics: vec![
-//             lsp2::Diagnostic {
-//                 range: lsp2::Range::new(lsp2::Position::new(1, 8), lsp2::Position::new(1, 9)),
-//                 severity: Some(DiagnosticSeverity::WARNING),
-//                 message: "error 1".to_string(),
-//                 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
-//                     location: lsp::Location {
-//                         uri: buffer_uri.clone(),
-//                         range: lsp2::Range::new(
-//                             lsp2::Position::new(1, 8),
-//                             lsp2::Position::new(1, 9),
-//                         ),
-//                     },
-//                     message: "error 1 hint 1".to_string(),
-//                 }]),
-//                 ..Default::default()
-//             },
-//             lsp2::Diagnostic {
-//                 range: lsp2::Range::new(lsp2::Position::new(1, 8), lsp2::Position::new(1, 9)),
-//                 severity: Some(DiagnosticSeverity::HINT),
-//                 message: "error 1 hint 1".to_string(),
-//                 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
-//                     location: lsp::Location {
-//                         uri: buffer_uri.clone(),
-//                         range: lsp2::Range::new(
-//                             lsp2::Position::new(1, 8),
-//                             lsp2::Position::new(1, 9),
-//                         ),
-//                     },
-//                     message: "original diagnostic".to_string(),
-//                 }]),
-//                 ..Default::default()
-//             },
-//             lsp2::Diagnostic {
-//                 range: lsp2::Range::new(lsp2::Position::new(2, 8), lsp2::Position::new(2, 17)),
-//                 severity: Some(DiagnosticSeverity::ERROR),
-//                 message: "error 2".to_string(),
-//                 related_information: Some(vec![
-//                     lsp::DiagnosticRelatedInformation {
-//                         location: lsp::Location {
-//                             uri: buffer_uri.clone(),
-//                             range: lsp2::Range::new(
-//                                 lsp2::Position::new(1, 13),
-//                                 lsp2::Position::new(1, 15),
-//                             ),
-//                         },
-//                         message: "error 2 hint 1".to_string(),
-//                     },
-//                     lsp::DiagnosticRelatedInformation {
-//                         location: lsp::Location {
-//                             uri: buffer_uri.clone(),
-//                             range: lsp2::Range::new(
-//                                 lsp2::Position::new(1, 13),
-//                                 lsp2::Position::new(1, 15),
-//                             ),
-//                         },
-//                         message: "error 2 hint 2".to_string(),
-//                     },
-//                 ]),
-//                 ..Default::default()
-//             },
-//             lsp2::Diagnostic {
-//                 range: lsp2::Range::new(lsp2::Position::new(1, 13), lsp2::Position::new(1, 15)),
-//                 severity: Some(DiagnosticSeverity::HINT),
-//                 message: "error 2 hint 1".to_string(),
-//                 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
-//                     location: lsp::Location {
-//                         uri: buffer_uri.clone(),
-//                         range: lsp2::Range::new(
-//                             lsp2::Position::new(2, 8),
-//                             lsp2::Position::new(2, 17),
-//                         ),
-//                     },
-//                     message: "original diagnostic".to_string(),
-//                 }]),
-//                 ..Default::default()
-//             },
-//             lsp2::Diagnostic {
-//                 range: lsp2::Range::new(lsp2::Position::new(1, 13), lsp2::Position::new(1, 15)),
-//                 severity: Some(DiagnosticSeverity::HINT),
-//                 message: "error 2 hint 2".to_string(),
-//                 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
-//                     location: lsp::Location {
-//                         uri: buffer_uri,
-//                         range: lsp2::Range::new(
-//                             lsp2::Position::new(2, 8),
-//                             lsp2::Position::new(2, 17),
-//                         ),
-//                     },
-//                     message: "original diagnostic".to_string(),
-//                 }]),
-//                 ..Default::default()
-//             },
-//         ],
-//         version: None,
-//     };
-
-//     project
-//         .update(cx, |p, cx| {
-//             p.update_diagnostics(LanguageServerId(0), message, &[], cx)
-//         })
-//         .unwrap();
-//     let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
-
-//     assert_eq!(
-//         buffer
-//             .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
-//             .collect::<Vec<_>>(),
-//         &[
-//             DiagnosticEntry {
-//                 range: Point::new(1, 8)..Point::new(1, 9),
-//                 diagnostic: Diagnostic {
-//                     severity: DiagnosticSeverity::WARNING,
-//                     message: "error 1".to_string(),
-//                     group_id: 1,
-//                     is_primary: true,
-//                     ..Default::default()
-//                 }
-//             },
-//             DiagnosticEntry {
-//                 range: Point::new(1, 8)..Point::new(1, 9),
-//                 diagnostic: Diagnostic {
-//                     severity: DiagnosticSeverity::HINT,
-//                     message: "error 1 hint 1".to_string(),
-//                     group_id: 1,
-//                     is_primary: false,
-//                     ..Default::default()
-//                 }
-//             },
-//             DiagnosticEntry {
-//                 range: Point::new(1, 13)..Point::new(1, 15),
-//                 diagnostic: Diagnostic {
-//                     severity: DiagnosticSeverity::HINT,
-//                     message: "error 2 hint 1".to_string(),
-//                     group_id: 0,
-//                     is_primary: false,
-//                     ..Default::default()
-//                 }
-//             },
-//             DiagnosticEntry {
-//                 range: Point::new(1, 13)..Point::new(1, 15),
-//                 diagnostic: Diagnostic {
-//                     severity: DiagnosticSeverity::HINT,
-//                     message: "error 2 hint 2".to_string(),
-//                     group_id: 0,
-//                     is_primary: false,
-//                     ..Default::default()
-//                 }
-//             },
-//             DiagnosticEntry {
-//                 range: Point::new(2, 8)..Point::new(2, 17),
-//                 diagnostic: Diagnostic {
-//                     severity: DiagnosticSeverity::ERROR,
-//                     message: "error 2".to_string(),
-//                     group_id: 0,
-//                     is_primary: true,
-//                     ..Default::default()
-//                 }
-//             }
-//         ]
-//     );
-
-//     assert_eq!(
-//         buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
-//         &[
-//             DiagnosticEntry {
-//                 range: Point::new(1, 13)..Point::new(1, 15),
-//                 diagnostic: Diagnostic {
-//                     severity: DiagnosticSeverity::HINT,
-//                     message: "error 2 hint 1".to_string(),
-//                     group_id: 0,
-//                     is_primary: false,
-//                     ..Default::default()
-//                 }
-//             },
-//             DiagnosticEntry {
-//                 range: Point::new(1, 13)..Point::new(1, 15),
-//                 diagnostic: Diagnostic {
-//                     severity: DiagnosticSeverity::HINT,
-//                     message: "error 2 hint 2".to_string(),
-//                     group_id: 0,
-//                     is_primary: false,
-//                     ..Default::default()
-//                 }
-//             },
-//             DiagnosticEntry {
-//                 range: Point::new(2, 8)..Point::new(2, 17),
-//                 diagnostic: Diagnostic {
-//                     severity: DiagnosticSeverity::ERROR,
-//                     message: "error 2".to_string(),
-//                     group_id: 0,
-//                     is_primary: true,
-//                     ..Default::default()
-//                 }
-//             }
-//         ]
-//     );
-
-//     assert_eq!(
-//         buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
-//         &[
-//             DiagnosticEntry {
-//                 range: Point::new(1, 8)..Point::new(1, 9),
-//                 diagnostic: Diagnostic {
-//                     severity: DiagnosticSeverity::WARNING,
-//                     message: "error 1".to_string(),
-//                     group_id: 1,
-//                     is_primary: true,
-//                     ..Default::default()
-//                 }
-//             },
-//             DiagnosticEntry {
-//                 range: Point::new(1, 8)..Point::new(1, 9),
-//                 diagnostic: Diagnostic {
-//                     severity: DiagnosticSeverity::HINT,
-//                     message: "error 1 hint 1".to_string(),
-//                     group_id: 1,
-//                     is_primary: false,
-//                     ..Default::default()
-//                 }
-//             },
-//         ]
-//     );
-// }
-
-// #[gpui::test]
-// async fn test_rename(cx: &mut gpui::TestAppContext) {
-//     init_test(cx);
-
-//     let mut language = Language::new(
-//         LanguageConfig {
-//             name: "Rust".into(),
-//             path_suffixes: vec!["rs".to_string()],
-//             ..Default::default()
-//         },
-//         Some(tree_sitter_rust::language()),
-//     );
-//     let mut fake_servers = language
-//         .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
-//             capabilities: lsp2::ServerCapabilities {
-//                 rename_provider: Some(lsp2::OneOf::Right(lsp2::RenameOptions {
-//                     prepare_provider: Some(true),
-//                     work_done_progress_options: Default::default(),
-//                 })),
-//                 ..Default::default()
-//             },
-//             ..Default::default()
-//         }))
-//         .await;
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/dir",
-//         json!({
-//             "one.rs": "const ONE: usize = 1;",
-//             "two.rs": "const TWO: usize = one::ONE + one::ONE;"
-//         }),
-//     )
-//     .await;
-
-//     let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
-//     project.update(cx, |project, _| project.languages.add(Arc::new(language)));
-//     let buffer = project
-//         .update(cx, |project, cx| {
-//             project.open_local_buffer("/dir/one.rs", cx)
-//         })
-//         .await
-//         .unwrap();
-
-//     let fake_server = fake_servers.next().await.unwrap();
-
-//     let response = project.update(cx, |project, cx| {
-//         project.prepare_rename(buffer.clone(), 7, cx)
-//     });
-//     fake_server
-//         .handle_request::<lsp2::request::PrepareRenameRequest, _, _>(|params, _| async move {
-//             assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
-//             assert_eq!(params.position, lsp2::Position::new(0, 7));
-//             Ok(Some(lsp2::PrepareRenameResponse::Range(lsp2::Range::new(
-//                 lsp2::Position::new(0, 6),
-//                 lsp2::Position::new(0, 9),
-//             ))))
-//         })
-//         .next()
-//         .await
-//         .unwrap();
-//     let range = response.await.unwrap().unwrap();
-//     let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
-//     assert_eq!(range, 6..9);
-
-//     let response = project.update(cx, |project, cx| {
-//         project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
-//     });
-//     fake_server
-//         .handle_request::<lsp2::request::Rename, _, _>(|params, _| async move {
-//             assert_eq!(
-//                 params.text_document_position.text_document.uri.as_str(),
-//                 "file:///dir/one.rs"
-//             );
-//             assert_eq!(
-//                 params.text_document_position.position,
-//                 lsp2::Position::new(0, 7)
-//             );
-//             assert_eq!(params.new_name, "THREE");
-//             Ok(Some(lsp::WorkspaceEdit {
-//                 changes: Some(
-//                     [
-//                         (
-//                             lsp2::Url::from_file_path("/dir/one.rs").unwrap(),
-//                             vec![lsp2::TextEdit::new(
-//                                 lsp2::Range::new(
-//                                     lsp2::Position::new(0, 6),
-//                                     lsp2::Position::new(0, 9),
-//                                 ),
-//                                 "THREE".to_string(),
-//                             )],
-//                         ),
-//                         (
-//                             lsp2::Url::from_file_path("/dir/two.rs").unwrap(),
-//                             vec![
-//                                 lsp2::TextEdit::new(
-//                                     lsp2::Range::new(
-//                                         lsp2::Position::new(0, 24),
-//                                         lsp2::Position::new(0, 27),
-//                                     ),
-//                                     "THREE".to_string(),
-//                                 ),
-//                                 lsp2::TextEdit::new(
-//                                     lsp2::Range::new(
-//                                         lsp2::Position::new(0, 35),
-//                                         lsp2::Position::new(0, 38),
-//                                     ),
-//                                     "THREE".to_string(),
-//                                 ),
-//                             ],
-//                         ),
-//                     ]
-//                     .into_iter()
-//                     .collect(),
-//                 ),
-//                 ..Default::default()
-//             }))
-//         })
-//         .next()
-//         .await
-//         .unwrap();
-//     let mut transaction = response.await.unwrap().0;
-//     assert_eq!(transaction.len(), 2);
-//     assert_eq!(
-//         transaction
-//             .remove_entry(&buffer)
-//             .unwrap()
-//             .0
-//             .read_with(cx, |buffer, _| buffer.text()),
-//         "const THREE: usize = 1;"
-//     );
-//     assert_eq!(
-//         transaction
-//             .into_keys()
-//             .next()
-//             .unwrap()
-//             .read_with(cx, |buffer, _| buffer.text()),
-//         "const TWO: usize = one::THREE + one::THREE;"
-//     );
-// }
-
-// #[gpui::test]
-// async fn test_search(cx: &mut gpui::TestAppContext) {
-//     init_test(cx);
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/dir",
-//         json!({
-//             "one.rs": "const ONE: usize = 1;",
-//             "two.rs": "const TWO: usize = one::ONE + one::ONE;",
-//             "three.rs": "const THREE: usize = one::ONE + two::TWO;",
-//             "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
-//         }),
-//     )
-//     .await;
-//     let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
-//     assert_eq!(
-//         search(
-//             &project,
-//             SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
-//             cx
-//         )
-//         .await
-//         .unwrap(),
-//         HashMap::from_iter([
-//             ("two.rs".to_string(), vec![6..9]),
-//             ("three.rs".to_string(), vec![37..40])
-//         ])
-//     );
-
-//     let buffer_4 = project
-//         .update(cx, |project, cx| {
-//             project.open_local_buffer("/dir/four.rs", cx)
-//         })
-//         .await
-//         .unwrap();
-//     buffer_4.update(cx, |buffer, cx| {
-//         let text = "two::TWO";
-//         buffer.edit([(20..28, text), (31..43, text)], None, cx);
-//     });
-
-//     assert_eq!(
-//         search(
-//             &project,
-//             SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
-//             cx
-//         )
-//         .await
-//         .unwrap(),
-//         HashMap::from_iter([
-//             ("two.rs".to_string(), vec![6..9]),
-//             ("three.rs".to_string(), vec![37..40]),
-//             ("four.rs".to_string(), vec![25..28, 36..39])
-//         ])
-//     );
-// }
-
-// #[gpui::test]
-// async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
-//     init_test(cx);
-
-//     let search_query = "file";
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/dir",
-//         json!({
-//             "one.rs": r#"// Rust file one"#,
-//             "one.ts": r#"// TypeScript file one"#,
-//             "two.rs": r#"// Rust file two"#,
-//             "two.ts": r#"// TypeScript file two"#,
-//         }),
-//     )
-//     .await;
-//     let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
-
-//     assert!(
-//         search(
-//             &project,
-//             SearchQuery::text(
-//                 search_query,
-//                 false,
-//                 true,
-//                 vec![PathMatcher::new("*.odd").unwrap()],
-//                 Vec::new()
-//             )
-//             .unwrap(),
-//             cx
-//         )
-//         .await
-//         .unwrap()
-//         .is_empty(),
-//         "If no inclusions match, no files should be returned"
-//     );
-
-//     assert_eq!(
-//         search(
-//             &project,
-//             SearchQuery::text(
-//                 search_query,
-//                 false,
-//                 true,
-//                 vec![PathMatcher::new("*.rs").unwrap()],
-//                 Vec::new()
-//             )
-//             .unwrap(),
-//             cx
-//         )
-//         .await
-//         .unwrap(),
-//         HashMap::from_iter([
-//             ("one.rs".to_string(), vec![8..12]),
-//             ("two.rs".to_string(), vec![8..12]),
-//         ]),
-//         "Rust only search should give only Rust files"
-//     );
-
-//     assert_eq!(
-//         search(
-//             &project,
-//             SearchQuery::text(
-//                 search_query,
-//                 false,
-//                 true,
-//                 vec![
-//                     PathMatcher::new("*.ts").unwrap(),
-//                     PathMatcher::new("*.odd").unwrap(),
-//                 ],
-//                 Vec::new()
-//             ).unwrap(),
-//             cx
-//         )
-//         .await
-//         .unwrap(),
-//         HashMap::from_iter([
-//             ("one.ts".to_string(), vec![14..18]),
-//             ("two.ts".to_string(), vec![14..18]),
-//         ]),
-//         "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
-//     );
-
-//     assert_eq!(
-//         search(
-//             &project,
-//             SearchQuery::text(
-//                 search_query,
-//                 false,
-//                 true,
-//                 vec![
-//                     PathMatcher::new("*.rs").unwrap(),
-//                     PathMatcher::new("*.ts").unwrap(),
-//                     PathMatcher::new("*.odd").unwrap(),
-//                 ],
-//                 Vec::new()
-//             ).unwrap(),
-//             cx
-//         )
-//         .await
-//         .unwrap(),
-//         HashMap::from_iter([
-//             ("one.rs".to_string(), vec![8..12]),
-//             ("one.ts".to_string(), vec![14..18]),
-//             ("two.rs".to_string(), vec![8..12]),
-//             ("two.ts".to_string(), vec![14..18]),
-//         ]),
-//         "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
-//     );
-// }
-
-// #[gpui::test]
-// async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
-//     init_test(cx);
-
-//     let search_query = "file";
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/dir",
-//         json!({
-//             "one.rs": r#"// Rust file one"#,
-//             "one.ts": r#"// TypeScript file one"#,
-//             "two.rs": r#"// Rust file two"#,
-//             "two.ts": r#"// TypeScript file two"#,
-//         }),
-//     )
-//     .await;
-//     let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
-
-//     assert_eq!(
-//         search(
-//             &project,
-//             SearchQuery::text(
-//                 search_query,
-//                 false,
-//                 true,
-//                 Vec::new(),
-//                 vec![PathMatcher::new("*.odd").unwrap()],
-//             )
-//             .unwrap(),
-//             cx
-//         )
-//         .await
-//         .unwrap(),
-//         HashMap::from_iter([
-//             ("one.rs".to_string(), vec![8..12]),
-//             ("one.ts".to_string(), vec![14..18]),
-//             ("two.rs".to_string(), vec![8..12]),
-//             ("two.ts".to_string(), vec![14..18]),
-//         ]),
-//         "If no exclusions match, all files should be returned"
-//     );
-
-//     assert_eq!(
-//         search(
-//             &project,
-//             SearchQuery::text(
-//                 search_query,
-//                 false,
-//                 true,
-//                 Vec::new(),
-//                 vec![PathMatcher::new("*.rs").unwrap()],
-//             )
-//             .unwrap(),
-//             cx
-//         )
-//         .await
-//         .unwrap(),
-//         HashMap::from_iter([
-//             ("one.ts".to_string(), vec![14..18]),
-//             ("two.ts".to_string(), vec![14..18]),
-//         ]),
-//         "Rust exclusion search should give only TypeScript files"
-//     );
-
-//     assert_eq!(
-//         search(
-//             &project,
-//             SearchQuery::text(
-//                 search_query,
-//                 false,
-//                 true,
-//                 Vec::new(),
-//                 vec![
-//                     PathMatcher::new("*.ts").unwrap(),
-//                     PathMatcher::new("*.odd").unwrap(),
-//                 ],
-//             ).unwrap(),
-//             cx
-//         )
-//         .await
-//         .unwrap(),
-//         HashMap::from_iter([
-//             ("one.rs".to_string(), vec![8..12]),
-//             ("two.rs".to_string(), vec![8..12]),
-//         ]),
-//         "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
-//     );
-
-//     assert!(
-//         search(
-//             &project,
-//             SearchQuery::text(
-//                 search_query,
-//                 false,
-//                 true,
-//                 Vec::new(),
-//                 vec![
-//                     PathMatcher::new("*.rs").unwrap(),
-//                     PathMatcher::new("*.ts").unwrap(),
-//                     PathMatcher::new("*.odd").unwrap(),
-//                 ],
-//             ).unwrap(),
-//             cx
-//         )
-//         .await
-//         .unwrap().is_empty(),
-//         "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
-//     );
-// }
-
-// #[gpui::test]
-// async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
-//     init_test(cx);
-
-//     let search_query = "file";
-
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/dir",
-//         json!({
-//             "one.rs": r#"// Rust file one"#,
-//             "one.ts": r#"// TypeScript file one"#,
-//             "two.rs": r#"// Rust file two"#,
-//             "two.ts": r#"// TypeScript file two"#,
-//         }),
-//     )
-//     .await;
-//     let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
-
-//     assert!(
-//         search(
-//             &project,
-//             SearchQuery::text(
-//                 search_query,
-//                 false,
-//                 true,
-//                 vec![PathMatcher::new("*.odd").unwrap()],
-//                 vec![PathMatcher::new("*.odd").unwrap()],
-//             )
-//             .unwrap(),
-//             cx
-//         )
-//         .await
-//         .unwrap()
-//         .is_empty(),
-//         "If both no exclusions and inclusions match, exclusions should win and return nothing"
-//     );
-
-//     assert!(
-//         search(
-//             &project,
-//             SearchQuery::text(
-//                 search_query,
-//                 false,
-//                 true,
-//                 vec![PathMatcher::new("*.ts").unwrap()],
-//                 vec![PathMatcher::new("*.ts").unwrap()],
-//             ).unwrap(),
-//             cx
-//         )
-//         .await
-//         .unwrap()
-//         .is_empty(),
-//         "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
-//     );
-
-//     assert!(
-//         search(
-//             &project,
-//             SearchQuery::text(
-//                 search_query,
-//                 false,
-//                 true,
-//                 vec![
-//                     PathMatcher::new("*.ts").unwrap(),
-//                     PathMatcher::new("*.odd").unwrap()
-//                 ],
-//                 vec![
-//                     PathMatcher::new("*.ts").unwrap(),
-//                     PathMatcher::new("*.odd").unwrap()
-//                 ],
-//             )
-//             .unwrap(),
-//             cx
-//         )
-//         .await
-//         .unwrap()
-//         .is_empty(),
-//         "Non-matching inclusions and exclusions should not change that."
-//     );
-
-//     assert_eq!(
-//         search(
-//             &project,
-//             SearchQuery::text(
-//                 search_query,
-//                 false,
-//                 true,
-//                 vec![
-//                     PathMatcher::new("*.ts").unwrap(),
-//                     PathMatcher::new("*.odd").unwrap()
-//                 ],
-//                 vec![
-//                     PathMatcher::new("*.rs").unwrap(),
-//                     PathMatcher::new("*.odd").unwrap()
-//                 ],
-//             )
-//             .unwrap(),
-//             cx
-//         )
-//         .await
-//         .unwrap(),
-//         HashMap::from_iter([
-//             ("one.ts".to_string(), vec![14..18]),
-//             ("two.ts".to_string(), vec![14..18]),
-//         ]),
-//         "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
-//     );
-// }
-
-// #[test]
-// fn test_glob_literal_prefix() {
-//     assert_eq!(glob_literal_prefix("**/*.js"), "");
-//     assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
-//     assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
-//     assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
-// }
-
-// async fn search(
-//     project: &ModelHandle<Project>,
-//     query: SearchQuery,
-//     cx: &mut gpui::TestAppContext,
-// ) -> Result<HashMap<String, Vec<Range<usize>>>> {
-//     let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
-//     let mut result = HashMap::default();
-//     while let Some((buffer, range)) = search_rx.next().await {
-//         result.entry(buffer).or_insert(range);
-//     }
-//     Ok(result
-//         .into_iter()
-//         .map(|(buffer, ranges)| {
-//             buffer.read_with(cx, |buffer, _| {
-//                 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
-//                 let ranges = ranges
-//                     .into_iter()
-//                     .map(|range| range.to_offset(buffer))
-//                     .collect::<Vec<_>>();
-//                 (path, ranges)
-//             })
-//         })
-//         .collect())
-// }
-
-// fn init_test(cx: &mut gpui::TestAppContext) {
-//     cx.foreground().forbid_parking();
-
-//     cx.update(|cx| {
-//         cx.set_global(SettingsStore::test(cx));
-//         language2::init(cx);
-//         Project::init_settings(cx);
-//     });
-// }
+use crate::{search::PathMatcher, Event, *};
+use fs2::FakeFs;
+use futures::{future, StreamExt};
+use gpui2::AppContext;
+use language2::{
+    language_settings::{AllLanguageSettings, LanguageSettingsContent},
+    tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
+    LineEnding, OffsetRangeExt, Point, ToPoint,
+};
+use lsp2::Url;
+use parking_lot::Mutex;
+use pretty_assertions::assert_eq;
+use serde_json::json;
+use std::{os, task::Poll};
+use unindent::Unindent as _;
+use util::{assert_set_eq, test::temp_tree};
+
+#[gpui2::test]
+async fn test_block_via_channel(cx: &mut gpui2::TestAppContext) {
+    cx.executor().allow_parking();
+
+    let (tx, mut rx) = futures::channel::mpsc::unbounded();
+    let _thread = std::thread::spawn(move || {
+        std::fs::metadata("/Users").unwrap();
+        std::thread::sleep(Duration::from_millis(1000));
+        tx.unbounded_send(1).unwrap();
+    });
+    rx.next().await.unwrap();
+}
+
+#[gpui2::test]
+async fn test_block_via_smol(cx: &mut gpui2::TestAppContext) {
+    cx.executor().allow_parking();
+
+    let io_task = smol::unblock(move || {
+        println!("sleeping on thread {:?}", std::thread::current().id());
+        std::thread::sleep(Duration::from_millis(10));
+        1
+    });
+
+    let task = cx.foreground_executor().spawn(async move {
+        io_task.await;
+    });
+
+    task.await;
+}
+
+#[gpui2::test]
+async fn test_symlinks(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+    cx.executor().allow_parking();
+
+    let dir = temp_tree(json!({
+        "root": {
+            "apple": "",
+            "banana": {
+                "carrot": {
+                    "date": "",
+                    "endive": "",
+                }
+            },
+            "fennel": {
+                "grape": "",
+            }
+        }
+    }));
+
+    let root_link_path = dir.path().join("root_link");
+    os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
+    os::unix::fs::symlink(
+        &dir.path().join("root/fennel"),
+        &dir.path().join("root/finnochio"),
+    )
+    .unwrap();
+
+    let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
+
+    project.update(cx, |project, cx| {
+        let tree = project.worktrees().next().unwrap().read(cx);
+        assert_eq!(tree.file_count(), 5);
+        assert_eq!(
+            tree.inode_for_path("fennel/grape"),
+            tree.inode_for_path("finnochio/grape")
+        );
+    });
+}
+
+#[gpui2::test]
+async fn test_managing_project_specific_settings(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree(
+        "/the-root",
+        json!({
+            ".zed": {
+                "settings.json": r#"{ "tab_size": 8 }"#
+            },
+            "a": {
+                "a.rs": "fn a() {\n    A\n}"
+            },
+            "b": {
+                ".zed": {
+                    "settings.json": r#"{ "tab_size": 2 }"#
+                },
+                "b.rs": "fn b() {\n  B\n}"
+            }
+        }),
+    )
+    .await;
+
+    let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
+    let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
+
+    cx.executor().run_until_parked();
+    cx.update(|cx| {
+        let tree = worktree.read(cx);
+
+        let settings_a = language_settings(
+            None,
+            Some(
+                &(File::for_entry(
+                    tree.entry_for_path("a/a.rs").unwrap().clone(),
+                    worktree.clone(),
+                ) as _),
+            ),
+            cx,
+        );
+        let settings_b = language_settings(
+            None,
+            Some(
+                &(File::for_entry(
+                    tree.entry_for_path("b/b.rs").unwrap().clone(),
+                    worktree.clone(),
+                ) as _),
+            ),
+            cx,
+        );
+
+        assert_eq!(settings_a.tab_size.get(), 8);
+        assert_eq!(settings_b.tab_size.get(), 2);
+    });
+}
+
+#[gpui2::test]
+async fn test_managing_language_servers(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let mut rust_language = Language::new(
+        LanguageConfig {
+            name: "Rust".into(),
+            path_suffixes: vec!["rs".to_string()],
+            ..Default::default()
+        },
+        Some(tree_sitter_rust::language()),
+    );
+    let mut json_language = Language::new(
+        LanguageConfig {
+            name: "JSON".into(),
+            path_suffixes: vec!["json".to_string()],
+            ..Default::default()
+        },
+        None,
+    );
+    let mut fake_rust_servers = rust_language
+        .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+            name: "the-rust-language-server",
+            capabilities: lsp2::ServerCapabilities {
+                completion_provider: Some(lsp2::CompletionOptions {
+                    trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
+                    ..Default::default()
+                }),
+                ..Default::default()
+            },
+            ..Default::default()
+        }))
+        .await;
+    let mut fake_json_servers = json_language
+        .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+            name: "the-json-language-server",
+            capabilities: lsp2::ServerCapabilities {
+                completion_provider: Some(lsp2::CompletionOptions {
+                    trigger_characters: Some(vec![":".to_string()]),
+                    ..Default::default()
+                }),
+                ..Default::default()
+            },
+            ..Default::default()
+        }))
+        .await;
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree(
+        "/the-root",
+        json!({
+            "test.rs": "const A: i32 = 1;",
+            "test2.rs": "",
+            "Cargo.toml": "a = 1",
+            "package.json": "{\"a\": 1}",
+        }),
+    )
+    .await;
+
+    let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
+
+    // Open a buffer without an associated language server.
+    let toml_buffer = project
+        .update(cx, |project, cx| {
+            project.open_local_buffer("/the-root/Cargo.toml", cx)
+        })
+        .await
+        .unwrap();
+
+    // Open a buffer with an associated language server before the language for it has been loaded.
+    let rust_buffer = project
+        .update(cx, |project, cx| {
+            project.open_local_buffer("/the-root/test.rs", cx)
+        })
+        .await
+        .unwrap();
+    rust_buffer.update(cx, |buffer, _| {
+        assert_eq!(buffer.language().map(|l| l.name()), None);
+    });
+
+    // Now we add the languages to the project, and ensure they get assigned to all
+    // the relevant open buffers.
+    project.update(cx, |project, _| {
+        project.languages.add(Arc::new(json_language));
+        project.languages.add(Arc::new(rust_language));
+    });
+    cx.executor().run_until_parked();
+    rust_buffer.update(cx, |buffer, _| {
+        assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
+    });
+
+    // A server is started up, and it is notified about Rust files.
+    let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
+    assert_eq!(
+        fake_rust_server
+            .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+            .await
+            .text_document,
+        lsp2::TextDocumentItem {
+            uri: lsp2::Url::from_file_path("/the-root/test.rs").unwrap(),
+            version: 0,
+            text: "const A: i32 = 1;".to_string(),
+            language_id: Default::default()
+        }
+    );
+
+    // The buffer is configured based on the language server's capabilities.
+    rust_buffer.update(cx, |buffer, _| {
+        assert_eq!(
+            buffer.completion_triggers(),
+            &[".".to_string(), "::".to_string()]
+        );
+    });
+    toml_buffer.update(cx, |buffer, _| {
+        assert!(buffer.completion_triggers().is_empty());
+    });
+
+    // Edit a buffer. The changes are reported to the language server.
+    rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
+    assert_eq!(
+        fake_rust_server
+            .receive_notification::<lsp2::notification::DidChangeTextDocument>()
+            .await
+            .text_document,
+        lsp2::VersionedTextDocumentIdentifier::new(
+            lsp2::Url::from_file_path("/the-root/test.rs").unwrap(),
+            1
+        )
+    );
+
+    // Open a third buffer with a different associated language server.
+    let json_buffer = project
+        .update(cx, |project, cx| {
+            project.open_local_buffer("/the-root/package.json", cx)
+        })
+        .await
+        .unwrap();
+
+    // A json language server is started up and is only notified about the json buffer.
+    let mut fake_json_server = fake_json_servers.next().await.unwrap();
+    assert_eq!(
+        fake_json_server
+            .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+            .await
+            .text_document,
+        lsp2::TextDocumentItem {
+            uri: lsp2::Url::from_file_path("/the-root/package.json").unwrap(),
+            version: 0,
+            text: "{\"a\": 1}".to_string(),
+            language_id: Default::default()
+        }
+    );
+
+    // This buffer is configured based on the second language server's
+    // capabilities.
+    json_buffer.update(cx, |buffer, _| {
+        assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
+    });
+
+    // When opening another buffer whose language server is already running,
+    // it is also configured based on the existing language server's capabilities.
+    let rust_buffer2 = project
+        .update(cx, |project, cx| {
+            project.open_local_buffer("/the-root/test2.rs", cx)
+        })
+        .await
+        .unwrap();
+    rust_buffer2.update(cx, |buffer, _| {
+        assert_eq!(
+            buffer.completion_triggers(),
+            &[".".to_string(), "::".to_string()]
+        );
+    });
+
+    // Changes are reported only to servers matching the buffer's language.
+    toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
+    rust_buffer2.update(cx, |buffer, cx| {
+        buffer.edit([(0..0, "let x = 1;")], None, cx)
+    });
+    assert_eq!(
+        fake_rust_server
+            .receive_notification::<lsp2::notification::DidChangeTextDocument>()
+            .await
+            .text_document,
+        lsp2::VersionedTextDocumentIdentifier::new(
+            lsp2::Url::from_file_path("/the-root/test2.rs").unwrap(),
+            1
+        )
+    );
+
+    // Save notifications are reported to all servers.
+    project
+        .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
+        .await
+        .unwrap();
+    assert_eq!(
+        fake_rust_server
+            .receive_notification::<lsp2::notification::DidSaveTextDocument>()
+            .await
+            .text_document,
+        lsp2::TextDocumentIdentifier::new(
+            lsp2::Url::from_file_path("/the-root/Cargo.toml").unwrap()
+        )
+    );
+    assert_eq!(
+        fake_json_server
+            .receive_notification::<lsp2::notification::DidSaveTextDocument>()
+            .await
+            .text_document,
+        lsp2::TextDocumentIdentifier::new(
+            lsp2::Url::from_file_path("/the-root/Cargo.toml").unwrap()
+        )
+    );
+
+    // Renames are reported only to servers matching the buffer's language.
+    fs.rename(
+        Path::new("/the-root/test2.rs"),
+        Path::new("/the-root/test3.rs"),
+        Default::default(),
+    )
+    .await
+    .unwrap();
+    assert_eq!(
+        fake_rust_server
+            .receive_notification::<lsp2::notification::DidCloseTextDocument>()
+            .await
+            .text_document,
+        lsp2::TextDocumentIdentifier::new(lsp2::Url::from_file_path("/the-root/test2.rs").unwrap()),
+    );
+    assert_eq!(
+        fake_rust_server
+            .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+            .await
+            .text_document,
+        lsp2::TextDocumentItem {
+            uri: lsp2::Url::from_file_path("/the-root/test3.rs").unwrap(),
+            version: 0,
+            text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
+            language_id: Default::default()
+        },
+    );
+
+    rust_buffer2.update(cx, |buffer, cx| {
+        buffer.update_diagnostics(
+            LanguageServerId(0),
+            DiagnosticSet::from_sorted_entries(
+                vec![DiagnosticEntry {
+                    diagnostic: Default::default(),
+                    range: Anchor::MIN..Anchor::MAX,
+                }],
+                &buffer.snapshot(),
+            ),
+            cx,
+        );
+        assert_eq!(
+            buffer
+                .snapshot()
+                .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
+                .count(),
+            1
+        );
+    });
+
+    // When the rename changes the extension of the file, the buffer gets closed on the old
+    // language server and gets opened on the new one.
+    fs.rename(
+        Path::new("/the-root/test3.rs"),
+        Path::new("/the-root/test3.json"),
+        Default::default(),
+    )
+    .await
+    .unwrap();
+    assert_eq!(
+        fake_rust_server
+            .receive_notification::<lsp2::notification::DidCloseTextDocument>()
+            .await
+            .text_document,
+        lsp2::TextDocumentIdentifier::new(lsp2::Url::from_file_path("/the-root/test3.rs").unwrap(),),
+    );
+    assert_eq!(
+        fake_json_server
+            .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+            .await
+            .text_document,
+        lsp2::TextDocumentItem {
+            uri: lsp2::Url::from_file_path("/the-root/test3.json").unwrap(),
+            version: 0,
+            text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
+            language_id: Default::default()
+        },
+    );
+
+    // We clear the diagnostics, since the language has changed.
+    rust_buffer2.update(cx, |buffer, _| {
+        assert_eq!(
+            buffer
+                .snapshot()
+                .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
+                .count(),
+            0
+        );
+    });
+
+    // The renamed file's version resets after changing language server.
+    rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
+    assert_eq!(
+        fake_json_server
+            .receive_notification::<lsp2::notification::DidChangeTextDocument>()
+            .await
+            .text_document,
+        lsp2::VersionedTextDocumentIdentifier::new(
+            lsp2::Url::from_file_path("/the-root/test3.json").unwrap(),
+            1
+        )
+    );
+
+    // Restart language servers
+    project.update(cx, |project, cx| {
+        project.restart_language_servers_for_buffers(
+            vec![rust_buffer.clone(), json_buffer.clone()],
+            cx,
+        );
+    });
+
+    let mut rust_shutdown_requests = fake_rust_server
+        .handle_request::<lsp2::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
+    let mut json_shutdown_requests = fake_json_server
+        .handle_request::<lsp2::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
+    futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
+
+    let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
+    let mut fake_json_server = fake_json_servers.next().await.unwrap();
+
+    // Ensure rust document is reopened in new rust language server
+    assert_eq!(
+        fake_rust_server
+            .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+            .await
+            .text_document,
+        lsp2::TextDocumentItem {
+            uri: lsp2::Url::from_file_path("/the-root/test.rs").unwrap(),
+            version: 0,
+            text: rust_buffer.update(cx, |buffer, _| buffer.text()),
+            language_id: Default::default()
+        }
+    );
+
+    // Ensure json documents are reopened in new json language server
+    assert_set_eq!(
+        [
+            fake_json_server
+                .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+                .await
+                .text_document,
+            fake_json_server
+                .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+                .await
+                .text_document,
+        ],
+        [
+            lsp2::TextDocumentItem {
+                uri: lsp2::Url::from_file_path("/the-root/package.json").unwrap(),
+                version: 0,
+                text: json_buffer.update(cx, |buffer, _| buffer.text()),
+                language_id: Default::default()
+            },
+            lsp2::TextDocumentItem {
+                uri: lsp2::Url::from_file_path("/the-root/test3.json").unwrap(),
+                version: 0,
+                text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
+                language_id: Default::default()
+            }
+        ]
+    );
+
+    // Close notifications are reported only to servers matching the buffer's language.
+    cx.update(|_| drop(json_buffer));
+    let close_message = lsp2::DidCloseTextDocumentParams {
+        text_document: lsp2::TextDocumentIdentifier::new(
+            lsp2::Url::from_file_path("/the-root/package.json").unwrap(),
+        ),
+    };
+    assert_eq!(
+        fake_json_server
+            .receive_notification::<lsp2::notification::DidCloseTextDocument>()
+            .await,
+        close_message,
+    );
+}
+
+#[gpui2::test]
+async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let mut language = Language::new(
+        LanguageConfig {
+            name: "Rust".into(),
+            path_suffixes: vec!["rs".to_string()],
+            ..Default::default()
+        },
+        Some(tree_sitter_rust::language()),
+    );
+    let mut fake_servers = language
+        .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+            name: "the-language-server",
+            ..Default::default()
+        }))
+        .await;
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree(
+        "/the-root",
+        json!({
+            ".gitignore": "target\n",
+            "src": {
+                "a.rs": "",
+                "b.rs": "",
+            },
+            "target": {
+                "x": {
+                    "out": {
+                        "x.rs": ""
+                    }
+                },
+                "y": {
+                    "out": {
+                        "y.rs": "",
+                    }
+                },
+                "z": {
+                    "out": {
+                        "z.rs": ""
+                    }
+                }
+            }
+        }),
+    )
+    .await;
+
+    let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
+    project.update(cx, |project, _| {
+        project.languages.add(Arc::new(language));
+    });
+    cx.executor().run_until_parked();
+
+    // Start the language server by opening a buffer with a compatible file extension.
+    let _buffer = project
+        .update(cx, |project, cx| {
+            project.open_local_buffer("/the-root/src/a.rs", cx)
+        })
+        .await
+        .unwrap();
+
+    // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
+    project.update(cx, |project, cx| {
+        let worktree = project.worktrees().next().unwrap();
+        assert_eq!(
+            worktree
+                .read(cx)
+                .snapshot()
+                .entries(true)
+                .map(|entry| (entry.path.as_ref(), entry.is_ignored))
+                .collect::<Vec<_>>(),
+            &[
+                (Path::new(""), false),
+                (Path::new(".gitignore"), false),
+                (Path::new("src"), false),
+                (Path::new("src/a.rs"), false),
+                (Path::new("src/b.rs"), false),
+                (Path::new("target"), true),
+            ]
+        );
+    });
+
+    let prev_read_dir_count = fs.read_dir_call_count();
+
+    // Keep track of the FS events reported to the language server.
+    let fake_server = fake_servers.next().await.unwrap();
+    let file_changes = Arc::new(Mutex::new(Vec::new()));
+    fake_server
+        .request::<lsp2::request::RegisterCapability>(lsp2::RegistrationParams {
+            registrations: vec![lsp2::Registration {
+                id: Default::default(),
+                method: "workspace/didChangeWatchedFiles".to_string(),
+                register_options: serde_json::to_value(
+                    lsp2::DidChangeWatchedFilesRegistrationOptions {
+                        watchers: vec![
+                            lsp2::FileSystemWatcher {
+                                glob_pattern: lsp2::GlobPattern::String(
+                                    "/the-root/Cargo.toml".to_string(),
+                                ),
+                                kind: None,
+                            },
+                            lsp2::FileSystemWatcher {
+                                glob_pattern: lsp2::GlobPattern::String(
+                                    "/the-root/src/*.{rs,c}".to_string(),
+                                ),
+                                kind: None,
+                            },
+                            lsp2::FileSystemWatcher {
+                                glob_pattern: lsp2::GlobPattern::String(
+                                    "/the-root/target/y/**/*.rs".to_string(),
+                                ),
+                                kind: None,
+                            },
+                        ],
+                    },
+                )
+                .ok(),
+            }],
+        })
+        .await
+        .unwrap();
+    fake_server.handle_notification::<lsp2::notification::DidChangeWatchedFiles, _>({
+        let file_changes = file_changes.clone();
+        move |params, _| {
+            let mut file_changes = file_changes.lock();
+            file_changes.extend(params.changes);
+            file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
+        }
+    });
+
+    cx.executor().run_until_parked();
+    assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
+    assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
+
+    // Now the language server has asked us to watch an ignored directory path,
+    // so we recursively load it.
+    project.update(cx, |project, cx| {
+        let worktree = project.worktrees().next().unwrap();
+        assert_eq!(
+            worktree
+                .read(cx)
+                .snapshot()
+                .entries(true)
+                .map(|entry| (entry.path.as_ref(), entry.is_ignored))
+                .collect::<Vec<_>>(),
+            &[
+                (Path::new(""), false),
+                (Path::new(".gitignore"), false),
+                (Path::new("src"), false),
+                (Path::new("src/a.rs"), false),
+                (Path::new("src/b.rs"), false),
+                (Path::new("target"), true),
+                (Path::new("target/x"), true),
+                (Path::new("target/y"), true),
+                (Path::new("target/y/out"), true),
+                (Path::new("target/y/out/y.rs"), true),
+                (Path::new("target/z"), true),
+            ]
+        );
+    });
+
+    // Perform some file system mutations, two of which match the watched patterns,
+    // and one of which does not.
+    fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
+        .await
+        .unwrap();
+    fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
+        .await
+        .unwrap();
+    fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
+        .await
+        .unwrap();
+    fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
+        .await
+        .unwrap();
+    fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
+        .await
+        .unwrap();
+
+    // The language server receives events for the FS mutations that match its watch patterns.
+    cx.executor().run_until_parked();
+    assert_eq!(
+        &*file_changes.lock(),
+        &[
+            lsp2::FileEvent {
+                uri: lsp2::Url::from_file_path("/the-root/src/b.rs").unwrap(),
+                typ: lsp2::FileChangeType::DELETED,
+            },
+            lsp2::FileEvent {
+                uri: lsp2::Url::from_file_path("/the-root/src/c.rs").unwrap(),
+                typ: lsp2::FileChangeType::CREATED,
+            },
+            lsp2::FileEvent {
+                uri: lsp2::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
+                typ: lsp2::FileChangeType::CREATED,
+            },
+        ]
+    );
+}
+
+#[gpui2::test]
+async fn test_single_file_worktrees_diagnostics(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree(
+        "/dir",
+        json!({
+            "a.rs": "let a = 1;",
+            "b.rs": "let b = 2;"
+        }),
+    )
+    .await;
+
+    let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
+
+    let buffer_a = project
+        .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+        .await
+        .unwrap();
+    let buffer_b = project
+        .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
+        .await
+        .unwrap();
+
+    project.update(cx, |project, cx| {
+        project
+            .update_diagnostics(
+                LanguageServerId(0),
+                lsp2::PublishDiagnosticsParams {
+                    uri: Url::from_file_path("/dir/a.rs").unwrap(),
+                    version: None,
+                    diagnostics: vec![lsp2::Diagnostic {
+                        range: lsp2::Range::new(
+                            lsp2::Position::new(0, 4),
+                            lsp2::Position::new(0, 5),
+                        ),
+                        severity: Some(lsp2::DiagnosticSeverity::ERROR),
+                        message: "error 1".to_string(),
+                        ..Default::default()
+                    }],
+                },
+                &[],
+                cx,
+            )
+            .unwrap();
+        project
+            .update_diagnostics(
+                LanguageServerId(0),
+                lsp2::PublishDiagnosticsParams {
+                    uri: Url::from_file_path("/dir/b.rs").unwrap(),
+                    version: None,
+                    diagnostics: vec![lsp2::Diagnostic {
+                        range: lsp2::Range::new(
+                            lsp2::Position::new(0, 4),
+                            lsp2::Position::new(0, 5),
+                        ),
+                        severity: Some(lsp2::DiagnosticSeverity::WARNING),
+                        message: "error 2".to_string(),
+                        ..Default::default()
+                    }],
+                },
+                &[],
+                cx,
+            )
+            .unwrap();
+    });
+
+    buffer_a.update(cx, |buffer, _| {
+        let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
+        assert_eq!(
+            chunks
+                .iter()
+                .map(|(s, d)| (s.as_str(), *d))
+                .collect::<Vec<_>>(),
+            &[
+                ("let ", None),
+                ("a", Some(DiagnosticSeverity::ERROR)),
+                (" = 1;", None),
+            ]
+        );
+    });
+    buffer_b.update(cx, |buffer, _| {
+        let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
+        assert_eq!(
+            chunks
+                .iter()
+                .map(|(s, d)| (s.as_str(), *d))
+                .collect::<Vec<_>>(),
+            &[
+                ("let ", None),
+                ("b", Some(DiagnosticSeverity::WARNING)),
+                (" = 2;", None),
+            ]
+        );
+    });
+}
+
+#[gpui2::test]
+async fn test_hidden_worktrees_diagnostics(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree(
+        "/root",
+        json!({
+            "dir": {
+                "a.rs": "let a = 1;",
+            },
+            "other.rs": "let b = c;"
+        }),
+    )
+    .await;
+
+    let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
+
+    let (worktree, _) = project
+        .update(cx, |project, cx| {
+            project.find_or_create_local_worktree("/root/other.rs", false, cx)
+        })
+        .await
+        .unwrap();
+    let worktree_id = worktree.update(cx, |tree, _| tree.id());
+
+    project.update(cx, |project, cx| {
+        project
+            .update_diagnostics(
+                LanguageServerId(0),
+                lsp2::PublishDiagnosticsParams {
+                    uri: Url::from_file_path("/root/other.rs").unwrap(),
+                    version: None,
+                    diagnostics: vec![lsp2::Diagnostic {
+                        range: lsp2::Range::new(
+                            lsp2::Position::new(0, 8),
+                            lsp2::Position::new(0, 9),
+                        ),
+                        severity: Some(lsp2::DiagnosticSeverity::ERROR),
+                        message: "unknown variable 'c'".to_string(),
+                        ..Default::default()
+                    }],
+                },
+                &[],
+                cx,
+            )
+            .unwrap();
+    });
+
+    let buffer = project
+        .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
+        .await
+        .unwrap();
+    buffer.update(cx, |buffer, _| {
+        let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
+        assert_eq!(
+            chunks
+                .iter()
+                .map(|(s, d)| (s.as_str(), *d))
+                .collect::<Vec<_>>(),
+            &[
+                ("let b = ", None),
+                ("c", Some(DiagnosticSeverity::ERROR)),
+                (";", None),
+            ]
+        );
+    });
+
+    project.update(cx, |project, cx| {
+        assert_eq!(project.diagnostic_summaries(cx).next(), None);
+        assert_eq!(project.diagnostic_summary(cx).error_count, 0);
+    });
+}
+
+#[gpui2::test]
+async fn test_disk_based_diagnostics_progress(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let progress_token = "the-progress-token";
+    let mut language = Language::new(
+        LanguageConfig {
+            name: "Rust".into(),
+            path_suffixes: vec!["rs".to_string()],
+            ..Default::default()
+        },
+        Some(tree_sitter_rust::language()),
+    );
+    let mut fake_servers = language
+        .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+            disk_based_diagnostics_progress_token: Some(progress_token.into()),
+            disk_based_diagnostics_sources: vec!["disk".into()],
+            ..Default::default()
+        }))
+        .await;
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree(
+        "/dir",
+        json!({
+            "a.rs": "fn a() { A }",
+            "b.rs": "const y: i32 = 1",
+        }),
+    )
+    .await;
+
+    let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+    project.update(cx, |project, _| project.languages.add(Arc::new(language)));
+    let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
+
+    // Cause worktree to start the fake language server
+    let _buffer = project
+        .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
+        .await
+        .unwrap();
+
+    let mut events = cx.events(&project);
+
+    let fake_server = fake_servers.next().await.unwrap();
+    assert_eq!(
+        events.next().await.unwrap(),
+        Event::LanguageServerAdded(LanguageServerId(0)),
+    );
+
+    fake_server
+        .start_progress(format!("{}/0", progress_token))
+        .await;
+    assert_eq!(
+        events.next().await.unwrap(),
+        Event::DiskBasedDiagnosticsStarted {
+            language_server_id: LanguageServerId(0),
+        }
+    );
+
+    fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
+        uri: Url::from_file_path("/dir/a.rs").unwrap(),
+        version: None,
+        diagnostics: vec![lsp2::Diagnostic {
+            range: lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 10)),
+            severity: Some(lsp2::DiagnosticSeverity::ERROR),
+            message: "undefined variable 'A'".to_string(),
+            ..Default::default()
+        }],
+    });
+    assert_eq!(
+        events.next().await.unwrap(),
+        Event::DiagnosticsUpdated {
+            language_server_id: LanguageServerId(0),
+            path: (worktree_id, Path::new("a.rs")).into()
+        }
+    );
+
+    fake_server.end_progress(format!("{}/0", progress_token));
+    assert_eq!(
+        events.next().await.unwrap(),
+        Event::DiskBasedDiagnosticsFinished {
+            language_server_id: LanguageServerId(0)
+        }
+    );
+
+    let buffer = project
+        .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
+        .await
+        .unwrap();
+
+    buffer.update(cx, |buffer, _| {
+        let snapshot = buffer.snapshot();
+        let diagnostics = snapshot
+            .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
+            .collect::<Vec<_>>();
+        assert_eq!(
+            diagnostics,
+            &[DiagnosticEntry {
+                range: Point::new(0, 9)..Point::new(0, 10),
+                diagnostic: Diagnostic {
+                    severity: lsp2::DiagnosticSeverity::ERROR,
+                    message: "undefined variable 'A'".to_string(),
+                    group_id: 0,
+                    is_primary: true,
+                    ..Default::default()
+                }
+            }]
+        )
+    });
+
+    // Ensure publishing empty diagnostics twice only results in one update event.
+    fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
+        uri: Url::from_file_path("/dir/a.rs").unwrap(),
+        version: None,
+        diagnostics: Default::default(),
+    });
+    assert_eq!(
+        events.next().await.unwrap(),
+        Event::DiagnosticsUpdated {
+            language_server_id: LanguageServerId(0),
+            path: (worktree_id, Path::new("a.rs")).into()
+        }
+    );
+
+    fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
+        uri: Url::from_file_path("/dir/a.rs").unwrap(),
+        version: None,
+        diagnostics: Default::default(),
+    });
+    cx.executor().run_until_parked();
+    assert_eq!(futures::poll!(events.next()), Poll::Pending);
+}
+
+#[gpui2::test]
+async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let progress_token = "the-progress-token";
+    let mut language = Language::new(
+        LanguageConfig {
+            path_suffixes: vec!["rs".to_string()],
+            ..Default::default()
+        },
+        None,
+    );
+    let mut fake_servers = language
+        .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+            disk_based_diagnostics_sources: vec!["disk".into()],
+            disk_based_diagnostics_progress_token: Some(progress_token.into()),
+            ..Default::default()
+        }))
+        .await;
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
+
+    let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+    project.update(cx, |project, _| project.languages.add(Arc::new(language)));
+
+    let buffer = project
+        .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+        .await
+        .unwrap();
+
+    // Simulate diagnostics starting to update.
+    let fake_server = fake_servers.next().await.unwrap();
+    fake_server.start_progress(progress_token).await;
+
+    // Restart the server before the diagnostics finish updating.
+    project.update(cx, |project, cx| {
+        project.restart_language_servers_for_buffers([buffer], cx);
+    });
+    let mut events = cx.events(&project);
+
+    // Simulate the newly started server sending more diagnostics.
+    let fake_server = fake_servers.next().await.unwrap();
+    assert_eq!(
+        events.next().await.unwrap(),
+        Event::LanguageServerAdded(LanguageServerId(1))
+    );
+    fake_server.start_progress(progress_token).await;
+    assert_eq!(
+        events.next().await.unwrap(),
+        Event::DiskBasedDiagnosticsStarted {
+            language_server_id: LanguageServerId(1)
+        }
+    );
+    project.update(cx, |project, _| {
+        assert_eq!(
+            project
+                .language_servers_running_disk_based_diagnostics()
+                .collect::<Vec<_>>(),
+            [LanguageServerId(1)]
+        );
+    });
+
+    // All diagnostics are considered done, despite the old server's diagnostic
+    // task never completing.
+    fake_server.end_progress(progress_token);
+    assert_eq!(
+        events.next().await.unwrap(),
+        Event::DiskBasedDiagnosticsFinished {
+            language_server_id: LanguageServerId(1)
+        }
+    );
+    project.update(cx, |project, _| {
+        assert_eq!(
+            project
+                .language_servers_running_disk_based_diagnostics()
+                .collect::<Vec<_>>(),
+            [LanguageServerId(0); 0]
+        );
+    });
+}
+
+#[gpui2::test]
+async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let mut language = Language::new(
+        LanguageConfig {
+            path_suffixes: vec!["rs".to_string()],
+            ..Default::default()
+        },
+        None,
+    );
+    let mut fake_servers = language
+        .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+            ..Default::default()
+        }))
+        .await;
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
+
+    let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+    project.update(cx, |project, _| project.languages.add(Arc::new(language)));
+
+    let buffer = project
+        .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+        .await
+        .unwrap();
+
+    // Publish diagnostics
+    let fake_server = fake_servers.next().await.unwrap();
+    fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
+        uri: Url::from_file_path("/dir/a.rs").unwrap(),
+        version: None,
+        diagnostics: vec![lsp2::Diagnostic {
+            range: lsp2::Range::new(lsp2::Position::new(0, 0), lsp2::Position::new(0, 0)),
+            severity: Some(lsp2::DiagnosticSeverity::ERROR),
+            message: "the message".to_string(),
+            ..Default::default()
+        }],
+    });
+
+    cx.executor().run_until_parked();
+    buffer.update(cx, |buffer, _| {
+        assert_eq!(
+            buffer
+                .snapshot()
+                .diagnostics_in_range::<_, usize>(0..1, false)
+                .map(|entry| entry.diagnostic.message.clone())
+                .collect::<Vec<_>>(),
+            ["the message".to_string()]
+        );
+    });
+    project.update(cx, |project, cx| {
+        assert_eq!(
+            project.diagnostic_summary(cx),
+            DiagnosticSummary {
+                error_count: 1,
+                warning_count: 0,
+            }
+        );
+    });
+
+    project.update(cx, |project, cx| {
+        project.restart_language_servers_for_buffers([buffer.clone()], cx);
+    });
+
+    // The diagnostics are cleared.
+    cx.executor().run_until_parked();
+    buffer.update(cx, |buffer, _| {
+        assert_eq!(
+            buffer
+                .snapshot()
+                .diagnostics_in_range::<_, usize>(0..1, false)
+                .map(|entry| entry.diagnostic.message.clone())
+                .collect::<Vec<_>>(),
+            Vec::<String>::new(),
+        );
+    });
+    project.update(cx, |project, cx| {
+        assert_eq!(
+            project.diagnostic_summary(cx),
+            DiagnosticSummary {
+                error_count: 0,
+                warning_count: 0,
+            }
+        );
+    });
+}
+
+#[gpui2::test]
+async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let mut language = Language::new(
+        LanguageConfig {
+            path_suffixes: vec!["rs".to_string()],
+            ..Default::default()
+        },
+        None,
+    );
+    let mut fake_servers = language
+        .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+            name: "the-lsp",
+            ..Default::default()
+        }))
+        .await;
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
+
+    let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+    project.update(cx, |project, _| project.languages.add(Arc::new(language)));
+
+    let buffer = project
+        .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+        .await
+        .unwrap();
+
+    // Before restarting the server, report diagnostics with an unknown buffer version.
+    let fake_server = fake_servers.next().await.unwrap();
+    fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
+        uri: lsp2::Url::from_file_path("/dir/a.rs").unwrap(),
+        version: Some(10000),
+        diagnostics: Vec::new(),
+    });
+    cx.executor().run_until_parked();
+
+    project.update(cx, |project, cx| {
+        project.restart_language_servers_for_buffers([buffer.clone()], cx);
+    });
+    let mut fake_server = fake_servers.next().await.unwrap();
+    let notification = fake_server
+        .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+        .await
+        .text_document;
+    assert_eq!(notification.version, 0);
+}
+
+#[gpui2::test]
+async fn test_toggling_enable_language_server(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let mut rust = Language::new(
+        LanguageConfig {
+            name: Arc::from("Rust"),
+            path_suffixes: vec!["rs".to_string()],
+            ..Default::default()
+        },
+        None,
+    );
+    let mut fake_rust_servers = rust
+        .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+            name: "rust-lsp",
+            ..Default::default()
+        }))
+        .await;
+    let mut js = Language::new(
+        LanguageConfig {
+            name: Arc::from("JavaScript"),
+            path_suffixes: vec!["js".to_string()],
+            ..Default::default()
+        },
+        None,
+    );
+    let mut fake_js_servers = js
+        .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+            name: "js-lsp",
+            ..Default::default()
+        }))
+        .await;
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
+        .await;
+
+    let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+    project.update(cx, |project, _| {
+        project.languages.add(Arc::new(rust));
+        project.languages.add(Arc::new(js));
+    });
+
+    let _rs_buffer = project
+        .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+        .await
+        .unwrap();
+    let _js_buffer = project
+        .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
+        .await
+        .unwrap();
+
+    let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
+    assert_eq!(
+        fake_rust_server_1
+            .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+            .await
+            .text_document
+            .uri
+            .as_str(),
+        "file:///dir/a.rs"
+    );
+
+    let mut fake_js_server = fake_js_servers.next().await.unwrap();
+    assert_eq!(
+        fake_js_server
+            .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+            .await
+            .text_document
+            .uri
+            .as_str(),
+        "file:///dir/b.js"
+    );
+
+    // Disable Rust language server, ensuring only that server gets stopped.
+    cx.update(|cx| {
+        cx.update_global(|settings: &mut SettingsStore, cx| {
+            settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
+                settings.languages.insert(
+                    Arc::from("Rust"),
+                    LanguageSettingsContent {
+                        enable_language_server: Some(false),
+                        ..Default::default()
+                    },
+                );
+            });
+        })
+    });
+    fake_rust_server_1
+        .receive_notification::<lsp2::notification::Exit>()
+        .await;
+
+    // Enable Rust and disable JavaScript language servers, ensuring that the
+    // former gets started again and that the latter stops.
+    cx.update(|cx| {
+        cx.update_global(|settings: &mut SettingsStore, cx| {
+            settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
+                settings.languages.insert(
+                    Arc::from("Rust"),
+                    LanguageSettingsContent {
+                        enable_language_server: Some(true),
+                        ..Default::default()
+                    },
+                );
+                settings.languages.insert(
+                    Arc::from("JavaScript"),
+                    LanguageSettingsContent {
+                        enable_language_server: Some(false),
+                        ..Default::default()
+                    },
+                );
+            });
+        })
+    });
+    let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
+    assert_eq!(
+        fake_rust_server_2
+            .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+            .await
+            .text_document
+            .uri
+            .as_str(),
+        "file:///dir/a.rs"
+    );
+    fake_js_server
+        .receive_notification::<lsp2::notification::Exit>()
+        .await;
+}
+
+#[gpui2::test(iterations = 3)]
+async fn test_transforming_diagnostics(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let mut language = Language::new(
+        LanguageConfig {
+            name: "Rust".into(),
+            path_suffixes: vec!["rs".to_string()],
+            ..Default::default()
+        },
+        Some(tree_sitter_rust::language()),
+    );
+    let mut fake_servers = language
+        .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+            disk_based_diagnostics_sources: vec!["disk".into()],
+            ..Default::default()
+        }))
+        .await;
+
+    let text = "
+        fn a() { A }
+        fn b() { BB }
+        fn c() { CCC }
+    "
+    .unindent();
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree("/dir", json!({ "a.rs": text })).await;
+
+    let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+    project.update(cx, |project, _| project.languages.add(Arc::new(language)));
+
+    let buffer = project
+        .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+        .await
+        .unwrap();
+
+    let mut fake_server = fake_servers.next().await.unwrap();
+    let open_notification = fake_server
+        .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+        .await;
+
+    // Edit the buffer, moving the content down
+    buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
+    let change_notification_1 = fake_server
+        .receive_notification::<lsp2::notification::DidChangeTextDocument>()
+        .await;
+    assert!(change_notification_1.text_document.version > open_notification.text_document.version);
+
+    // Report some diagnostics for the initial version of the buffer
+    fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
+        uri: lsp2::Url::from_file_path("/dir/a.rs").unwrap(),
+        version: Some(open_notification.text_document.version),
+        diagnostics: vec![
+            lsp2::Diagnostic {
+                range: lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 10)),
+                severity: Some(DiagnosticSeverity::ERROR),
+                message: "undefined variable 'A'".to_string(),
+                source: Some("disk".to_string()),
+                ..Default::default()
+            },
+            lsp2::Diagnostic {
+                range: lsp2::Range::new(lsp2::Position::new(1, 9), lsp2::Position::new(1, 11)),
+                severity: Some(DiagnosticSeverity::ERROR),
+                message: "undefined variable 'BB'".to_string(),
+                source: Some("disk".to_string()),
+                ..Default::default()
+            },
+            lsp2::Diagnostic {
+                range: lsp2::Range::new(lsp2::Position::new(2, 9), lsp2::Position::new(2, 12)),
+                severity: Some(DiagnosticSeverity::ERROR),
+                source: Some("disk".to_string()),
+                message: "undefined variable 'CCC'".to_string(),
+                ..Default::default()
+            },
+        ],
+    });
+
+    // The diagnostics have moved down since they were created.
+    cx.executor().run_until_parked();
+    buffer.update(cx, |buffer, _| {
+        assert_eq!(
+            buffer
+                .snapshot()
+                .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
+                .collect::<Vec<_>>(),
+            &[
+                DiagnosticEntry {
+                    range: Point::new(3, 9)..Point::new(3, 11),
+                    diagnostic: Diagnostic {
+                        source: Some("disk".into()),
+                        severity: DiagnosticSeverity::ERROR,
+                        message: "undefined variable 'BB'".to_string(),
+                        is_disk_based: true,
+                        group_id: 1,
+                        is_primary: true,
+                        ..Default::default()
+                    },
+                },
+                DiagnosticEntry {
+                    range: Point::new(4, 9)..Point::new(4, 12),
+                    diagnostic: Diagnostic {
+                        source: Some("disk".into()),
+                        severity: DiagnosticSeverity::ERROR,
+                        message: "undefined variable 'CCC'".to_string(),
+                        is_disk_based: true,
+                        group_id: 2,
+                        is_primary: true,
+                        ..Default::default()
+                    }
+                }
+            ]
+        );
+        assert_eq!(
+            chunks_with_diagnostics(buffer, 0..buffer.len()),
+            [
+                ("\n\nfn a() { ".to_string(), None),
+                ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
+                (" }\nfn b() { ".to_string(), None),
+                ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
+                (" }\nfn c() { ".to_string(), None),
+                ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
+                (" }\n".to_string(), None),
+            ]
+        );
+        assert_eq!(
+            chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
+            [
+                ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
+                (" }\nfn c() { ".to_string(), None),
+                ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
+            ]
+        );
+    });
+
+    // Ensure overlapping diagnostics are highlighted correctly.
+    fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
+        uri: lsp2::Url::from_file_path("/dir/a.rs").unwrap(),
+        version: Some(open_notification.text_document.version),
+        diagnostics: vec![
+            lsp2::Diagnostic {
+                range: lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 10)),
+                severity: Some(DiagnosticSeverity::ERROR),
+                message: "undefined variable 'A'".to_string(),
+                source: Some("disk".to_string()),
+                ..Default::default()
+            },
+            lsp2::Diagnostic {
+                range: lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 12)),
+                severity: Some(DiagnosticSeverity::WARNING),
+                message: "unreachable statement".to_string(),
+                source: Some("disk".to_string()),
+                ..Default::default()
+            },
+        ],
+    });
+
+    cx.executor().run_until_parked();
+    buffer.update(cx, |buffer, _| {
+        assert_eq!(
+            buffer
+                .snapshot()
+                .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
+                .collect::<Vec<_>>(),
+            &[
+                DiagnosticEntry {
+                    range: Point::new(2, 9)..Point::new(2, 12),
+                    diagnostic: Diagnostic {
+                        source: Some("disk".into()),
+                        severity: DiagnosticSeverity::WARNING,
+                        message: "unreachable statement".to_string(),
+                        is_disk_based: true,
+                        group_id: 4,
+                        is_primary: true,
+                        ..Default::default()
+                    }
+                },
+                DiagnosticEntry {
+                    range: Point::new(2, 9)..Point::new(2, 10),
+                    diagnostic: Diagnostic {
+                        source: Some("disk".into()),
+                        severity: DiagnosticSeverity::ERROR,
+                        message: "undefined variable 'A'".to_string(),
+                        is_disk_based: true,
+                        group_id: 3,
+                        is_primary: true,
+                        ..Default::default()
+                    },
+                }
+            ]
+        );
+        assert_eq!(
+            chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
+            [
+                ("fn a() { ".to_string(), None),
+                ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
+                (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
+                ("\n".to_string(), None),
+            ]
+        );
+        assert_eq!(
+            chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
+            [
+                (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
+                ("\n".to_string(), None),
+            ]
+        );
+    });
+
+    // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
+    // changes since the last save.
+    buffer.update(cx, |buffer, cx| {
+        buffer.edit([(Point::new(2, 0)..Point::new(2, 0), "    ")], None, cx);
+        buffer.edit(
+            [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
+            None,
+            cx,
+        );
+        buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
+    });
+    let change_notification_2 = fake_server
+        .receive_notification::<lsp2::notification::DidChangeTextDocument>()
+        .await;
+    assert!(
+        change_notification_2.text_document.version > change_notification_1.text_document.version
+    );
+
+    // Handle out-of-order diagnostics
+    fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
+        uri: lsp2::Url::from_file_path("/dir/a.rs").unwrap(),
+        version: Some(change_notification_2.text_document.version),
+        diagnostics: vec![
+            lsp2::Diagnostic {
+                range: lsp2::Range::new(lsp2::Position::new(1, 9), lsp2::Position::new(1, 11)),
+                severity: Some(DiagnosticSeverity::ERROR),
+                message: "undefined variable 'BB'".to_string(),
+                source: Some("disk".to_string()),
+                ..Default::default()
+            },
+            lsp2::Diagnostic {
+                range: lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 10)),
+                severity: Some(DiagnosticSeverity::WARNING),
+                message: "undefined variable 'A'".to_string(),
+                source: Some("disk".to_string()),
+                ..Default::default()
+            },
+        ],
+    });
+
+    cx.executor().run_until_parked();
+    buffer.update(cx, |buffer, _| {
+        assert_eq!(
+            buffer
+                .snapshot()
+                .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
+                .collect::<Vec<_>>(),
+            &[
+                DiagnosticEntry {
+                    range: Point::new(2, 21)..Point::new(2, 22),
+                    diagnostic: Diagnostic {
+                        source: Some("disk".into()),
+                        severity: DiagnosticSeverity::WARNING,
+                        message: "undefined variable 'A'".to_string(),
+                        is_disk_based: true,
+                        group_id: 6,
+                        is_primary: true,
+                        ..Default::default()
+                    }
+                },
+                DiagnosticEntry {
+                    range: Point::new(3, 9)..Point::new(3, 14),
+                    diagnostic: Diagnostic {
+                        source: Some("disk".into()),
+                        severity: DiagnosticSeverity::ERROR,
+                        message: "undefined variable 'BB'".to_string(),
+                        is_disk_based: true,
+                        group_id: 5,
+                        is_primary: true,
+                        ..Default::default()
+                    },
+                }
+            ]
+        );
+    });
+}
+
+#[gpui2::test]
+async fn test_empty_diagnostic_ranges(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let text = concat!(
+        "let one = ;\n", //
+        "let two = \n",
+        "let three = 3;\n",
+    );
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree("/dir", json!({ "a.rs": text })).await;
+
+    let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+    let buffer = project
+        .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+        .await
+        .unwrap();
+
+    project.update(cx, |project, cx| {
+        project
+            .update_buffer_diagnostics(
+                &buffer,
+                LanguageServerId(0),
+                None,
+                vec![
+                    DiagnosticEntry {
+                        range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
+                        diagnostic: Diagnostic {
+                            severity: DiagnosticSeverity::ERROR,
+                            message: "syntax error 1".to_string(),
+                            ..Default::default()
+                        },
+                    },
+                    DiagnosticEntry {
+                        range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
+                        diagnostic: Diagnostic {
+                            severity: DiagnosticSeverity::ERROR,
+                            message: "syntax error 2".to_string(),
+                            ..Default::default()
+                        },
+                    },
+                ],
+                cx,
+            )
+            .unwrap();
+    });
+
+    // An empty range is extended forward to include the following character.
+    // At the end of a line, an empty range is extended backward to include
+    // the preceding character.
+    buffer.update(cx, |buffer, _| {
+        let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
+        assert_eq!(
+            chunks
+                .iter()
+                .map(|(s, d)| (s.as_str(), *d))
+                .collect::<Vec<_>>(),
+            &[
+                ("let one = ", None),
+                (";", Some(DiagnosticSeverity::ERROR)),
+                ("\nlet two =", None),
+                (" ", Some(DiagnosticSeverity::ERROR)),
+                ("\nlet three = 3;\n", None)
+            ]
+        );
+    });
+}
+
+#[gpui2::test]
+async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
+        .await;
+
+    let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+
+    project.update(cx, |project, cx| {
+        project
+            .update_diagnostic_entries(
+                LanguageServerId(0),
+                Path::new("/dir/a.rs").to_owned(),
+                None,
+                vec![DiagnosticEntry {
+                    range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
+                    diagnostic: Diagnostic {
+                        severity: DiagnosticSeverity::ERROR,
+                        is_primary: true,
+                        message: "syntax error a1".to_string(),
+                        ..Default::default()
+                    },
+                }],
+                cx,
+            )
+            .unwrap();
+        project
+            .update_diagnostic_entries(
+                LanguageServerId(1),
+                Path::new("/dir/a.rs").to_owned(),
+                None,
+                vec![DiagnosticEntry {
+                    range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
+                    diagnostic: Diagnostic {
+                        severity: DiagnosticSeverity::ERROR,
+                        is_primary: true,
+                        message: "syntax error b1".to_string(),
+                        ..Default::default()
+                    },
+                }],
+                cx,
+            )
+            .unwrap();
+
+        assert_eq!(
+            project.diagnostic_summary(cx),
+            DiagnosticSummary {
+                error_count: 2,
+                warning_count: 0,
+            }
+        );
+    });
+}
+
+#[gpui2::test]
+async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let mut language = Language::new(
+        LanguageConfig {
+            name: "Rust".into(),
+            path_suffixes: vec!["rs".to_string()],
+            ..Default::default()
+        },
+        Some(tree_sitter_rust::language()),
+    );
+    let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
+
+    let text = "
+        fn a() {
+            f1();
+        }
+        fn b() {
+            f2();
+        }
+        fn c() {
+            f3();
+        }
+    "
+    .unindent();
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree(
+        "/dir",
+        json!({
+            "a.rs": text.clone(),
+        }),
+    )
+    .await;
+
+    let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+    project.update(cx, |project, _| project.languages.add(Arc::new(language)));
+    let buffer = project
+        .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+        .await
+        .unwrap();
+
+    let mut fake_server = fake_servers.next().await.unwrap();
+    let lsp_document_version = fake_server
+        .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+        .await
+        .text_document
+        .version;
+
+    // Simulate editing the buffer after the language server computes some edits.
+    buffer.update(cx, |buffer, cx| {
+        buffer.edit(
+            [(
+                Point::new(0, 0)..Point::new(0, 0),
+                "// above first function\n",
+            )],
+            None,
+            cx,
+        );
+        buffer.edit(
+            [(
+                Point::new(2, 0)..Point::new(2, 0),
+                "    // inside first function\n",
+            )],
+            None,
+            cx,
+        );
+        buffer.edit(
+            [(
+                Point::new(6, 4)..Point::new(6, 4),
+                "// inside second function ",
+            )],
+            None,
+            cx,
+        );
+
+        assert_eq!(
+            buffer.text(),
+            "
+                // above first function
+                fn a() {
+                    // inside first function
+                    f1();
+                }
+                fn b() {
+                    // inside second function f2();
+                }
+                fn c() {
+                    f3();
+                }
+            "
+            .unindent()
+        );
+    });
+
+    let edits = project
+        .update(cx, |project, cx| {
+            project.edits_from_lsp(
+                &buffer,
+                vec![
+                    // replace body of first function
+                    lsp2::TextEdit {
+                        range: lsp2::Range::new(
+                            lsp2::Position::new(0, 0),
+                            lsp2::Position::new(3, 0),
+                        ),
+                        new_text: "
+                            fn a() {
+                                f10();
+                            }
+                            "
+                        .unindent(),
+                    },
+                    // edit inside second function
+                    lsp2::TextEdit {
+                        range: lsp2::Range::new(
+                            lsp2::Position::new(4, 6),
+                            lsp2::Position::new(4, 6),
+                        ),
+                        new_text: "00".into(),
+                    },
+                    // edit inside third function via two distinct edits
+                    lsp2::TextEdit {
+                        range: lsp2::Range::new(
+                            lsp2::Position::new(7, 5),
+                            lsp2::Position::new(7, 5),
+                        ),
+                        new_text: "4000".into(),
+                    },
+                    lsp2::TextEdit {
+                        range: lsp2::Range::new(
+                            lsp2::Position::new(7, 5),
+                            lsp2::Position::new(7, 6),
+                        ),
+                        new_text: "".into(),
+                    },
+                ],
+                LanguageServerId(0),
+                Some(lsp_document_version),
+                cx,
+            )
+        })
+        .await
+        .unwrap();
+
+    buffer.update(cx, |buffer, cx| {
+        for (range, new_text) in edits {
+            buffer.edit([(range, new_text)], None, cx);
+        }
+        assert_eq!(
+            buffer.text(),
+            "
+                // above first function
+                fn a() {
+                    // inside first function
+                    f10();
+                }
+                fn b() {
+                    // inside second function f200();
+                }
+                fn c() {
+                    f4000();
+                }
+                "
+            .unindent()
+        );
+    });
+}
+
+#[gpui2::test]
+async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let text = "
+        use a::b;
+        use a::c;
+
+        fn f() {
+            b();
+            c();
+        }
+    "
+    .unindent();
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree(
+        "/dir",
+        json!({
+            "a.rs": text.clone(),
+        }),
+    )
+    .await;
+
+    let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+    let buffer = project
+        .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+        .await
+        .unwrap();
+
+    // Simulate the language server sending us a small edit in the form of a very large diff.
+    // Rust-analyzer does this when performing a merge-imports code action.
+    let edits = project
+        .update(cx, |project, cx| {
+            project.edits_from_lsp(
+                &buffer,
+                [
+                    // Replace the first use statement without editing the semicolon.
+                    lsp2::TextEdit {
+                        range: lsp2::Range::new(
+                            lsp2::Position::new(0, 4),
+                            lsp2::Position::new(0, 8),
+                        ),
+                        new_text: "a::{b, c}".into(),
+                    },
+                    // Reinsert the remainder of the file between the semicolon and the final
+                    // newline of the file.
+                    lsp2::TextEdit {
+                        range: lsp2::Range::new(
+                            lsp2::Position::new(0, 9),
+                            lsp2::Position::new(0, 9),
+                        ),
+                        new_text: "\n\n".into(),
+                    },
+                    lsp2::TextEdit {
+                        range: lsp2::Range::new(
+                            lsp2::Position::new(0, 9),
+                            lsp2::Position::new(0, 9),
+                        ),
+                        new_text: "
+                            fn f() {
+                                b();
+                                c();
+                            }"
+                        .unindent(),
+                    },
+                    // Delete everything after the first newline of the file.
+                    lsp2::TextEdit {
+                        range: lsp2::Range::new(
+                            lsp2::Position::new(1, 0),
+                            lsp2::Position::new(7, 0),
+                        ),
+                        new_text: "".into(),
+                    },
+                ],
+                LanguageServerId(0),
+                None,
+                cx,
+            )
+        })
+        .await
+        .unwrap();
+
+    buffer.update(cx, |buffer, cx| {
+        let edits = edits
+            .into_iter()
+            .map(|(range, text)| {
+                (
+                    range.start.to_point(buffer)..range.end.to_point(buffer),
+                    text,
+                )
+            })
+            .collect::<Vec<_>>();
+
+        assert_eq!(
+            edits,
+            [
+                (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
+                (Point::new(1, 0)..Point::new(2, 0), "".into())
+            ]
+        );
+
+        for (range, new_text) in edits {
+            buffer.edit([(range, new_text)], None, cx);
+        }
+        assert_eq!(
+            buffer.text(),
+            "
+                use a::{b, c};
+
+                fn f() {
+                    b();
+                    c();
+                }
+            "
+            .unindent()
+        );
+    });
+}
+
+#[gpui2::test]
+async fn test_invalid_edits_from_lsp2(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let text = "
+        use a::b;
+        use a::c;
+
+        fn f() {
+            b();
+            c();
+        }
+    "
+    .unindent();
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree(
+        "/dir",
+        json!({
+            "a.rs": text.clone(),
+        }),
+    )
+    .await;
+
+    let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+    let buffer = project
+        .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+        .await
+        .unwrap();
+
+    // Simulate the language server sending us edits in a non-ordered fashion,
+    // with ranges sometimes being inverted or pointing to invalid locations.
+    let edits = project
+        .update(cx, |project, cx| {
+            project.edits_from_lsp(
+                &buffer,
+                [
+                    lsp2::TextEdit {
+                        range: lsp2::Range::new(
+                            lsp2::Position::new(0, 9),
+                            lsp2::Position::new(0, 9),
+                        ),
+                        new_text: "\n\n".into(),
+                    },
+                    lsp2::TextEdit {
+                        range: lsp2::Range::new(
+                            lsp2::Position::new(0, 8),
+                            lsp2::Position::new(0, 4),
+                        ),
+                        new_text: "a::{b, c}".into(),
+                    },
+                    lsp2::TextEdit {
+                        range: lsp2::Range::new(
+                            lsp2::Position::new(1, 0),
+                            lsp2::Position::new(99, 0),
+                        ),
+                        new_text: "".into(),
+                    },
+                    lsp2::TextEdit {
+                        range: lsp2::Range::new(
+                            lsp2::Position::new(0, 9),
+                            lsp2::Position::new(0, 9),
+                        ),
+                        new_text: "
+                            fn f() {
+                                b();
+                                c();
+                            }"
+                        .unindent(),
+                    },
+                ],
+                LanguageServerId(0),
+                None,
+                cx,
+            )
+        })
+        .await
+        .unwrap();
+
+    buffer.update(cx, |buffer, cx| {
+        let edits = edits
+            .into_iter()
+            .map(|(range, text)| {
+                (
+                    range.start.to_point(buffer)..range.end.to_point(buffer),
+                    text,
+                )
+            })
+            .collect::<Vec<_>>();
+
+        assert_eq!(
+            edits,
+            [
+                (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
+                (Point::new(1, 0)..Point::new(2, 0), "".into())
+            ]
+        );
+
+        for (range, new_text) in edits {
+            buffer.edit([(range, new_text)], None, cx);
+        }
+        assert_eq!(
+            buffer.text(),
+            "
+                use a::{b, c};
+
+                fn f() {
+                    b();
+                    c();
+                }
+            "
+            .unindent()
+        );
+    });
+}
+
+fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
+    buffer: &Buffer,
+    range: Range<T>,
+) -> Vec<(String, Option<DiagnosticSeverity>)> {
+    let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
+    for chunk in buffer.snapshot().chunks(range, true) {
+        if chunks.last().map_or(false, |prev_chunk| {
+            prev_chunk.1 == chunk.diagnostic_severity
+        }) {
+            chunks.last_mut().unwrap().0.push_str(chunk.text);
+        } else {
+            chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
+        }
+    }
+    chunks
+}
+
+#[gpui2::test(iterations = 10)]
+async fn test_definition(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let mut language = Language::new(
+        LanguageConfig {
+            name: "Rust".into(),
+            path_suffixes: vec!["rs".to_string()],
+            ..Default::default()
+        },
+        Some(tree_sitter_rust::language()),
+    );
+    let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree(
+        "/dir",
+        json!({
+            "a.rs": "const fn a() { A }",
+            "b.rs": "const y: i32 = crate::a()",
+        }),
+    )
+    .await;
+
+    let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
+    project.update(cx, |project, _| project.languages.add(Arc::new(language)));
+
+    let buffer = project
+        .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
+        .await
+        .unwrap();
+
+    let fake_server = fake_servers.next().await.unwrap();
+    fake_server.handle_request::<lsp2::request::GotoDefinition, _, _>(|params, _| async move {
+        let params = params.text_document_position_params;
+        assert_eq!(
+            params.text_document.uri.to_file_path().unwrap(),
+            Path::new("/dir/b.rs"),
+        );
+        assert_eq!(params.position, lsp2::Position::new(0, 22));
+
+        Ok(Some(lsp2::GotoDefinitionResponse::Scalar(
+            lsp2::Location::new(
+                lsp2::Url::from_file_path("/dir/a.rs").unwrap(),
+                lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 10)),
+            ),
+        )))
+    });
+
+    let mut definitions = project
+        .update(cx, |project, cx| project.definition(&buffer, 22, cx))
+        .await
+        .unwrap();
+
+    // Assert no new language server started
+    cx.executor().run_until_parked();
+    assert!(fake_servers.try_next().is_err());
+
+    assert_eq!(definitions.len(), 1);
+    let definition = definitions.pop().unwrap();
+    cx.update(|cx| {
+        let target_buffer = definition.target.buffer.read(cx);
+        assert_eq!(
+            target_buffer
+                .file()
+                .unwrap()
+                .as_local()
+                .unwrap()
+                .abs_path(cx),
+            Path::new("/dir/a.rs"),
+        );
+        assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
+        assert_eq!(
+            list_worktrees(&project, cx),
+            [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
+        );
+
+        drop(definition);
+    });
+    cx.update(|cx| {
+        assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
+    });
+
+    fn list_worktrees<'a>(
+        project: &'a Model<Project>,
+        cx: &'a AppContext,
+    ) -> Vec<(&'a Path, bool)> {
+        project
+            .read(cx)
+            .worktrees()
+            .map(|worktree| {
+                let worktree = worktree.read(cx);
+                (
+                    worktree.as_local().unwrap().abs_path().as_ref(),
+                    worktree.is_visible(),
+                )
+            })
+            .collect::<Vec<_>>()
+    }
+}
+
+#[gpui2::test]
+async fn test_completions_without_edit_ranges(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let mut language = Language::new(
+        LanguageConfig {
+            name: "TypeScript".into(),
+            path_suffixes: vec!["ts".to_string()],
+            ..Default::default()
+        },
+        Some(tree_sitter_typescript::language_typescript()),
+    );
+    let mut fake_language_servers = language
+        .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+            capabilities: lsp2::ServerCapabilities {
+                completion_provider: Some(lsp2::CompletionOptions {
+                    trigger_characters: Some(vec![":".to_string()]),
+                    ..Default::default()
+                }),
+                ..Default::default()
+            },
+            ..Default::default()
+        }))
+        .await;
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree(
+        "/dir",
+        json!({
+            "a.ts": "",
+        }),
+    )
+    .await;
+
+    let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+    project.update(cx, |project, _| project.languages.add(Arc::new(language)));
+    let buffer = project
+        .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
+        .await
+        .unwrap();
+
+    let fake_server = fake_language_servers.next().await.unwrap();
+
+    let text = "let a = b.fqn";
+    buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
+    let completions = project.update(cx, |project, cx| {
+        project.completions(&buffer, text.len(), cx)
+    });
+
+    fake_server
+        .handle_request::<lsp2::request::Completion, _, _>(|_, _| async move {
+            Ok(Some(lsp2::CompletionResponse::Array(vec![
+                lsp2::CompletionItem {
+                    label: "fullyQualifiedName?".into(),
+                    insert_text: Some("fullyQualifiedName".into()),
+                    ..Default::default()
+                },
+            ])))
+        })
+        .next()
+        .await;
+    let completions = completions.await.unwrap();
+    let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
+    assert_eq!(completions.len(), 1);
+    assert_eq!(completions[0].new_text, "fullyQualifiedName");
+    assert_eq!(
+        completions[0].old_range.to_offset(&snapshot),
+        text.len() - 3..text.len()
+    );
+
+    let text = "let a = \"atoms/cmp\"";
+    buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
+    let completions = project.update(cx, |project, cx| {
+        project.completions(&buffer, text.len() - 1, cx)
+    });
+
+    fake_server
+        .handle_request::<lsp2::request::Completion, _, _>(|_, _| async move {
+            Ok(Some(lsp2::CompletionResponse::Array(vec![
+                lsp2::CompletionItem {
+                    label: "component".into(),
+                    ..Default::default()
+                },
+            ])))
+        })
+        .next()
+        .await;
+    let completions = completions.await.unwrap();
+    let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
+    assert_eq!(completions.len(), 1);
+    assert_eq!(completions[0].new_text, "component");
+    assert_eq!(
+        completions[0].old_range.to_offset(&snapshot),
+        text.len() - 4..text.len() - 1
+    );
+}
+
+#[gpui2::test]
+async fn test_completions_with_carriage_returns(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let mut language = Language::new(
+        LanguageConfig {
+            name: "TypeScript".into(),
+            path_suffixes: vec!["ts".to_string()],
+            ..Default::default()
+        },
+        Some(tree_sitter_typescript::language_typescript()),
+    );
+    let mut fake_language_servers = language
+        .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+            capabilities: lsp2::ServerCapabilities {
+                completion_provider: Some(lsp2::CompletionOptions {
+                    trigger_characters: Some(vec![":".to_string()]),
+                    ..Default::default()
+                }),
+                ..Default::default()
+            },
+            ..Default::default()
+        }))
+        .await;
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree(
+        "/dir",
+        json!({
+            "a.ts": "",
+        }),
+    )
+    .await;
+
+    let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+    project.update(cx, |project, _| project.languages.add(Arc::new(language)));
+    let buffer = project
+        .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
+        .await
+        .unwrap();
+
+    let fake_server = fake_language_servers.next().await.unwrap();
+
+    let text = "let a = b.fqn";
+    buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
+    let completions = project.update(cx, |project, cx| {
+        project.completions(&buffer, text.len(), cx)
+    });
+
+    fake_server
+        .handle_request::<lsp2::request::Completion, _, _>(|_, _| async move {
+            Ok(Some(lsp2::CompletionResponse::Array(vec![
+                lsp2::CompletionItem {
+                    label: "fullyQualifiedName?".into(),
+                    insert_text: Some("fully\rQualified\r\nName".into()),
+                    ..Default::default()
+                },
+            ])))
+        })
+        .next()
+        .await;
+    let completions = completions.await.unwrap();
+    assert_eq!(completions.len(), 1);
+    assert_eq!(completions[0].new_text, "fully\nQualified\nName");
+}
+
+#[gpui2::test(iterations = 10)]
+async fn test_apply_code_actions_with_commands(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let mut language = Language::new(
+        LanguageConfig {
+            name: "TypeScript".into(),
+            path_suffixes: vec!["ts".to_string()],
+            ..Default::default()
+        },
+        None,
+    );
+    let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree(
+        "/dir",
+        json!({
+            "a.ts": "a",
+        }),
+    )
+    .await;
+
+    let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+    project.update(cx, |project, _| project.languages.add(Arc::new(language)));
+    let buffer = project
+        .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
+        .await
+        .unwrap();
+
+    let fake_server = fake_language_servers.next().await.unwrap();
+
+    // Language server returns code actions that contain commands, and not edits.
+    let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
+    fake_server
+        .handle_request::<lsp2::request::CodeActionRequest, _, _>(|_, _| async move {
+            Ok(Some(vec![
+                lsp2::CodeActionOrCommand::CodeAction(lsp2::CodeAction {
+                    title: "The code action".into(),
+                    command: Some(lsp2::Command {
+                        title: "The command".into(),
+                        command: "_the/command".into(),
+                        arguments: Some(vec![json!("the-argument")]),
+                    }),
+                    ..Default::default()
+                }),
+                lsp2::CodeActionOrCommand::CodeAction(lsp2::CodeAction {
+                    title: "two".into(),
+                    ..Default::default()
+                }),
+            ]))
+        })
+        .next()
+        .await;
+
+    let action = actions.await.unwrap()[0].clone();
+    let apply = project.update(cx, |project, cx| {
+        project.apply_code_action(buffer.clone(), action, true, cx)
+    });
+
+    // Resolving the code action does not populate its edits. In absence of
+    // edits, we must execute the given command.
+    fake_server.handle_request::<lsp2::request::CodeActionResolveRequest, _, _>(
+        |action, _| async move { Ok(action) },
+    );
+
+    // While executing the command, the language server sends the editor
+    // a `workspaceEdit` request.
+    fake_server
+        .handle_request::<lsp2::request::ExecuteCommand, _, _>({
+            let fake = fake_server.clone();
+            move |params, _| {
+                assert_eq!(params.command, "_the/command");
+                let fake = fake.clone();
+                async move {
+                    fake.server
+                        .request::<lsp2::request::ApplyWorkspaceEdit>(
+                            lsp2::ApplyWorkspaceEditParams {
+                                label: None,
+                                edit: lsp2::WorkspaceEdit {
+                                    changes: Some(
+                                        [(
+                                            lsp2::Url::from_file_path("/dir/a.ts").unwrap(),
+                                            vec![lsp2::TextEdit {
+                                                range: lsp2::Range::new(
+                                                    lsp2::Position::new(0, 0),
+                                                    lsp2::Position::new(0, 0),
+                                                ),
+                                                new_text: "X".into(),
+                                            }],
+                                        )]
+                                        .into_iter()
+                                        .collect(),
+                                    ),
+                                    ..Default::default()
+                                },
+                            },
+                        )
+                        .await
+                        .unwrap();
+                    Ok(Some(json!(null)))
+                }
+            }
+        })
+        .next()
+        .await;
+
+    // Applying the code action returns a project transaction containing the edits
+    // sent by the language server in its `workspaceEdit` request.
+    let transaction = apply.await.unwrap();
+    assert!(transaction.0.contains_key(&buffer));
+    buffer.update(cx, |buffer, cx| {
+        assert_eq!(buffer.text(), "Xa");
+        buffer.undo(cx);
+        assert_eq!(buffer.text(), "a");
+    });
+}
+
+#[gpui2::test(iterations = 10)]
+async fn test_save_file(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree(
+        "/dir",
+        json!({
+            "file1": "the old contents",
+        }),
+    )
+    .await;
+
+    let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+    let buffer = project
+        .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
+        .await
+        .unwrap();
+    buffer.update(cx, |buffer, cx| {
+        assert_eq!(buffer.text(), "the old contents");
+        buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
+    });
+
+    project
+        .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
+        .await
+        .unwrap();
+
+    let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
+    assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
+}
+
+#[gpui2::test]
+async fn test_save_in_single_file_worktree(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree(
+        "/dir",
+        json!({
+            "file1": "the old contents",
+        }),
+    )
+    .await;
+
+    let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
+    let buffer = project
+        .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
+        .await
+        .unwrap();
+    buffer.update(cx, |buffer, cx| {
+        buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
+    });
+
+    project
+        .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
+        .await
+        .unwrap();
+
+    let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
+    assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
+}
+
+#[gpui2::test]
+async fn test_save_as(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree("/dir", json!({})).await;
+
+    let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+
+    let languages = project.update(cx, |project, _| project.languages().clone());
+    languages.register(
+        "/some/path",
+        LanguageConfig {
+            name: "Rust".into(),
+            path_suffixes: vec!["rs".into()],
+            ..Default::default()
+        },
+        tree_sitter_rust::language(),
+        vec![],
+        |_| Default::default(),
+    );
+
+    let buffer = project.update(cx, |project, cx| {
+        project.create_buffer("", None, cx).unwrap()
+    });
+    buffer.update(cx, |buffer, cx| {
+        buffer.edit([(0..0, "abc")], None, cx);
+        assert!(buffer.is_dirty());
+        assert!(!buffer.has_conflict());
+        assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
+    });
+    project
+        .update(cx, |project, cx| {
+            project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
+        })
+        .await
+        .unwrap();
+    assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
+
+    cx.executor().run_until_parked();
+    buffer.update(cx, |buffer, cx| {
+        assert_eq!(
+            buffer.file().unwrap().full_path(cx),
+            Path::new("dir/file1.rs")
+        );
+        assert!(!buffer.is_dirty());
+        assert!(!buffer.has_conflict());
+        assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
+    });
+
+    let opened_buffer = project
+        .update(cx, |project, cx| {
+            project.open_local_buffer("/dir/file1.rs", cx)
+        })
+        .await
+        .unwrap();
+    assert_eq!(opened_buffer, buffer);
+}
+
+#[gpui2::test(retries = 5)]
+async fn test_rescan_and_remote_updates(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+    cx.executor().allow_parking();
+
+    let dir = temp_tree(json!({
+        "a": {
+            "file1": "",
+            "file2": "",
+            "file3": "",
+        },
+        "b": {
+            "c": {
+                "file4": "",
+                "file5": "",
+            }
+        }
+    }));
+
+    let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
+    let rpc = project.update(cx, |p, _| p.client.clone());
+
+    let buffer_for_path = |path: &'static str, cx: &mut gpui2::TestAppContext| {
+        let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
+        async move { buffer.await.unwrap() }
+    };
+    let id_for_path = |path: &'static str, cx: &mut gpui2::TestAppContext| {
+        project.update(cx, |project, cx| {
+            let tree = project.worktrees().next().unwrap();
+            tree.read(cx)
+                .entry_for_path(path)
+                .unwrap_or_else(|| panic!("no entry for path {}", path))
+                .id
+        })
+    };
+
+    let buffer2 = buffer_for_path("a/file2", cx).await;
+    let buffer3 = buffer_for_path("a/file3", cx).await;
+    let buffer4 = buffer_for_path("b/c/file4", cx).await;
+    let buffer5 = buffer_for_path("b/c/file5", cx).await;
+
+    let file2_id = id_for_path("a/file2", cx);
+    let file3_id = id_for_path("a/file3", cx);
+    let file4_id = id_for_path("b/c/file4", cx);
+
+    // Create a remote copy of this worktree.
+    let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
+
+    let metadata = tree.update(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
+
+    let updates = Arc::new(Mutex::new(Vec::new()));
+    tree.update(cx, |tree, cx| {
+        let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
+            let updates = updates.clone();
+            move |update| {
+                updates.lock().push(update);
+                async { true }
+            }
+        });
+    });
+
+    let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
+
+    cx.executor().run_until_parked();
+
+    cx.update(|cx| {
+        assert!(!buffer2.read(cx).is_dirty());
+        assert!(!buffer3.read(cx).is_dirty());
+        assert!(!buffer4.read(cx).is_dirty());
+        assert!(!buffer5.read(cx).is_dirty());
+    });
+
+    // Rename and delete files and directories.
+    tree.flush_fs_events(cx).await;
+    std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
+    std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
+    std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
+    std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
+    tree.flush_fs_events(cx).await;
+
+    let expected_paths = vec![
+        "a",
+        "a/file1",
+        "a/file2.new",
+        "b",
+        "d",
+        "d/file3",
+        "d/file4",
+    ];
+
+    cx.update(|app| {
+        assert_eq!(
+            tree.read(app)
+                .paths()
+                .map(|p| p.to_str().unwrap())
+                .collect::<Vec<_>>(),
+            expected_paths
+        );
+    });
+
+    assert_eq!(id_for_path("a/file2.new", cx), file2_id);
+    assert_eq!(id_for_path("d/file3", cx), file3_id);
+    assert_eq!(id_for_path("d/file4", cx), file4_id);
+
+    cx.update(|cx| {
+        assert_eq!(
+            buffer2.read(cx).file().unwrap().path().as_ref(),
+            Path::new("a/file2.new")
+        );
+        assert_eq!(
+            buffer3.read(cx).file().unwrap().path().as_ref(),
+            Path::new("d/file3")
+        );
+        assert_eq!(
+            buffer4.read(cx).file().unwrap().path().as_ref(),
+            Path::new("d/file4")
+        );
+        assert_eq!(
+            buffer5.read(cx).file().unwrap().path().as_ref(),
+            Path::new("b/c/file5")
+        );
+
+        assert!(!buffer2.read(cx).file().unwrap().is_deleted());
+        assert!(!buffer3.read(cx).file().unwrap().is_deleted());
+        assert!(!buffer4.read(cx).file().unwrap().is_deleted());
+        assert!(buffer5.read(cx).file().unwrap().is_deleted());
+    });
+
+    // Update the remote worktree. Check that it becomes consistent with the
+    // local worktree.
+    cx.executor().run_until_parked();
+
+    remote.update(cx, |remote, _| {
+        for update in updates.lock().drain(..) {
+            remote.as_remote_mut().unwrap().update_from_remote(update);
+        }
+    });
+    cx.executor().run_until_parked();
+    remote.update(cx, |remote, _| {
+        assert_eq!(
+            remote
+                .paths()
+                .map(|p| p.to_str().unwrap())
+                .collect::<Vec<_>>(),
+            expected_paths
+        );
+    });
+}
+
+#[gpui2::test(iterations = 10)]
+async fn test_buffer_identity_across_renames(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree(
+        "/dir",
+        json!({
+            "a": {
+                "file1": "",
+            }
+        }),
+    )
+    .await;
+
+    let project = Project::test(fs, [Path::new("/dir")], cx).await;
+    let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
+    let tree_id = tree.update(cx, |tree, _| tree.id());
+
+    let id_for_path = |path: &'static str, cx: &mut gpui2::TestAppContext| {
+        project.update(cx, |project, cx| {
+            let tree = project.worktrees().next().unwrap();
+            tree.read(cx)
+                .entry_for_path(path)
+                .unwrap_or_else(|| panic!("no entry for path {}", path))
+                .id
+        })
+    };
+
+    let dir_id = id_for_path("a", cx);
+    let file_id = id_for_path("a/file1", cx);
+    let buffer = project
+        .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
+        .await
+        .unwrap();
+    buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
+
+    project
+        .update(cx, |project, cx| {
+            project.rename_entry(dir_id, Path::new("b"), cx)
+        })
+        .unwrap()
+        .await
+        .unwrap();
+    cx.executor().run_until_parked();
+
+    assert_eq!(id_for_path("b", cx), dir_id);
+    assert_eq!(id_for_path("b/file1", cx), file_id);
+    buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
+}
+
+#[gpui2::test]
+async fn test_buffer_deduping(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree(
+        "/dir",
+        json!({
+            "a.txt": "a-contents",
+            "b.txt": "b-contents",
+        }),
+    )
+    .await;
+
+    let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+
+    // Spawn multiple tasks to open paths, repeating some paths.
+    let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
+        (
+            p.open_local_buffer("/dir/a.txt", cx),
+            p.open_local_buffer("/dir/b.txt", cx),
+            p.open_local_buffer("/dir/a.txt", cx),
+        )
+    });
+
+    let buffer_a_1 = buffer_a_1.await.unwrap();
+    let buffer_a_2 = buffer_a_2.await.unwrap();
+    let buffer_b = buffer_b.await.unwrap();
+    assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
+    assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
+
+    // There is only one buffer per path.
+    let buffer_a_id = buffer_a_1.entity_id();
+    assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
+
+    // Open the same path again while it is still open.
+    drop(buffer_a_1);
+    let buffer_a_3 = project
+        .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
+        .await
+        .unwrap();
+
+    // There's still only one buffer per path.
+    assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
+}
+
+#[gpui2::test]
+async fn test_buffer_is_dirty(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree(
+        "/dir",
+        json!({
+            "file1": "abc",
+            "file2": "def",
+            "file3": "ghi",
+        }),
+    )
+    .await;
+
+    let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+
+    let buffer1 = project
+        .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
+        .await
+        .unwrap();
+    let events = Arc::new(Mutex::new(Vec::new()));
+
+    // initially, the buffer isn't dirty.
+    buffer1.update(cx, |buffer, cx| {
+        cx.subscribe(&buffer1, {
+            let events = events.clone();
+            move |_, _, event, _| match event {
+                BufferEvent::Operation(_) => {}
+                _ => events.lock().push(event.clone()),
+            }
+        })
+        .detach();
+
+        assert!(!buffer.is_dirty());
+        assert!(events.lock().is_empty());
+
+        buffer.edit([(1..2, "")], None, cx);
+    });
+
+    // after the first edit, the buffer is dirty, and emits a dirtied event.
+    buffer1.update(cx, |buffer, cx| {
+        assert!(buffer.text() == "ac");
+        assert!(buffer.is_dirty());
+        assert_eq!(
+            *events.lock(),
+            &[language2::Event::Edited, language2::Event::DirtyChanged]
+        );
+        events.lock().clear();
+        buffer.did_save(
+            buffer.version(),
+            buffer.as_rope().fingerprint(),
+            buffer.file().unwrap().mtime(),
+            cx,
+        );
+    });
+
+    // after saving, the buffer is not dirty, and emits a saved event.
+    buffer1.update(cx, |buffer, cx| {
+        assert!(!buffer.is_dirty());
+        assert_eq!(*events.lock(), &[language2::Event::Saved]);
+        events.lock().clear();
+
+        buffer.edit([(1..1, "B")], None, cx);
+        buffer.edit([(2..2, "D")], None, cx);
+    });
+
+    // after editing again, the buffer is dirty, and emits another dirty event.
+    buffer1.update(cx, |buffer, cx| {
+        assert!(buffer.text() == "aBDc");
+        assert!(buffer.is_dirty());
+        assert_eq!(
+            *events.lock(),
+            &[
+                language2::Event::Edited,
+                language2::Event::DirtyChanged,
+                language2::Event::Edited,
+            ],
+        );
+        events.lock().clear();
+
+        // After restoring the buffer to its previously-saved state,
+        // the buffer is not considered dirty anymore.
+        buffer.edit([(1..3, "")], None, cx);
+        assert!(buffer.text() == "ac");
+        assert!(!buffer.is_dirty());
+    });
+
+    assert_eq!(
+        *events.lock(),
+        &[language2::Event::Edited, language2::Event::DirtyChanged]
+    );
+
+    // When a file is deleted, the buffer is considered dirty.
+    let events = Arc::new(Mutex::new(Vec::new()));
+    let buffer2 = project
+        .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
+        .await
+        .unwrap();
+    buffer2.update(cx, |_, cx| {
+        cx.subscribe(&buffer2, {
+            let events = events.clone();
+            move |_, _, event, _| events.lock().push(event.clone())
+        })
+        .detach();
+    });
+
+    fs.remove_file("/dir/file2".as_ref(), Default::default())
+        .await
+        .unwrap();
+    cx.executor().run_until_parked();
+    buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
+    assert_eq!(
+        *events.lock(),
+        &[
+            language2::Event::DirtyChanged,
+            language2::Event::FileHandleChanged
+        ]
+    );
+
+    // When a file is already dirty when deleted, we don't emit a Dirtied event.
+    let events = Arc::new(Mutex::new(Vec::new()));
+    let buffer3 = project
+        .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
+        .await
+        .unwrap();
+    buffer3.update(cx, |_, cx| {
+        cx.subscribe(&buffer3, {
+            let events = events.clone();
+            move |_, _, event, _| events.lock().push(event.clone())
+        })
+        .detach();
+    });
+
+    buffer3.update(cx, |buffer, cx| {
+        buffer.edit([(0..0, "x")], None, cx);
+    });
+    events.lock().clear();
+    fs.remove_file("/dir/file3".as_ref(), Default::default())
+        .await
+        .unwrap();
+    cx.executor().run_until_parked();
+    assert_eq!(*events.lock(), &[language2::Event::FileHandleChanged]);
+    cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
+}
+
+#[gpui2::test]
+async fn test_buffer_file_changes_on_disk(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let initial_contents = "aaa\nbbbbb\nc\n";
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree(
+        "/dir",
+        json!({
+            "the-file": initial_contents,
+        }),
+    )
+    .await;
+    let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+    let buffer = project
+        .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
+        .await
+        .unwrap();
+
+    let anchors = (0..3)
+        .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
+        .collect::<Vec<_>>();
+
+    // Change the file on disk, adding two new lines of text, and removing
+    // one line.
+    buffer.update(cx, |buffer, _| {
+        assert!(!buffer.is_dirty());
+        assert!(!buffer.has_conflict());
+    });
+    let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
+    fs.save(
+        "/dir/the-file".as_ref(),
+        &new_contents.into(),
+        LineEnding::Unix,
+    )
+    .await
+    .unwrap();
+
+    // Because the buffer was not modified, it is reloaded from disk. Its
+    // contents are edited according to the diff between the old and new
+    // file contents.
+    cx.executor().run_until_parked();
+    buffer.update(cx, |buffer, _| {
+        assert_eq!(buffer.text(), new_contents);
+        assert!(!buffer.is_dirty());
+        assert!(!buffer.has_conflict());
+
+        let anchor_positions = anchors
+            .iter()
+            .map(|anchor| anchor.to_point(&*buffer))
+            .collect::<Vec<_>>();
+        assert_eq!(
+            anchor_positions,
+            [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
+        );
+    });
+
+    // Modify the buffer
+    buffer.update(cx, |buffer, cx| {
+        buffer.edit([(0..0, " ")], None, cx);
+        assert!(buffer.is_dirty());
+        assert!(!buffer.has_conflict());
+    });
+
+    // Change the file on disk again, adding blank lines to the beginning.
+    fs.save(
+        "/dir/the-file".as_ref(),
+        &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
+        LineEnding::Unix,
+    )
+    .await
+    .unwrap();
+
+    // Because the buffer is modified, it doesn't reload from disk, but is
+    // marked as having a conflict.
+    cx.executor().run_until_parked();
+    buffer.update(cx, |buffer, _| {
+        assert!(buffer.has_conflict());
+    });
+}
+
+#[gpui2::test]
+async fn test_buffer_line_endings(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree(
+        "/dir",
+        json!({
+            "file1": "a\nb\nc\n",
+            "file2": "one\r\ntwo\r\nthree\r\n",
+        }),
+    )
+    .await;
+
+    let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+    let buffer1 = project
+        .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
+        .await
+        .unwrap();
+    let buffer2 = project
+        .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
+        .await
+        .unwrap();
+
+    buffer1.update(cx, |buffer, _| {
+        assert_eq!(buffer.text(), "a\nb\nc\n");
+        assert_eq!(buffer.line_ending(), LineEnding::Unix);
+    });
+    buffer2.update(cx, |buffer, _| {
+        assert_eq!(buffer.text(), "one\ntwo\nthree\n");
+        assert_eq!(buffer.line_ending(), LineEnding::Windows);
+    });
+
+    // Change a file's line endings on disk from unix to windows. The buffer's
+    // state updates correctly.
+    fs.save(
+        "/dir/file1".as_ref(),
+        &"aaa\nb\nc\n".into(),
+        LineEnding::Windows,
+    )
+    .await
+    .unwrap();
+    cx.executor().run_until_parked();
+    buffer1.update(cx, |buffer, _| {
+        assert_eq!(buffer.text(), "aaa\nb\nc\n");
+        assert_eq!(buffer.line_ending(), LineEnding::Windows);
+    });
+
+    // Save a file with windows line endings. The file is written correctly.
+    buffer2.update(cx, |buffer, cx| {
+        buffer.set_text("one\ntwo\nthree\nfour\n", cx);
+    });
+    project
+        .update(cx, |project, cx| project.save_buffer(buffer2, cx))
+        .await
+        .unwrap();
+    assert_eq!(
+        fs.load("/dir/file2".as_ref()).await.unwrap(),
+        "one\r\ntwo\r\nthree\r\nfour\r\n",
+    );
+}
+
+#[gpui2::test]
+async fn test_grouped_diagnostics(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree(
+        "/the-dir",
+        json!({
+            "a.rs": "
+                fn foo(mut v: Vec<usize>) {
+                    for x in &v {
+                        v.push(1);
+                    }
+                }
+            "
+            .unindent(),
+        }),
+    )
+    .await;
+
+    let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
+    let buffer = project
+        .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
+        .await
+        .unwrap();
+
+    let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
+    let message = lsp2::PublishDiagnosticsParams {
+        uri: buffer_uri.clone(),
+        diagnostics: vec![
+            lsp2::Diagnostic {
+                range: lsp2::Range::new(lsp2::Position::new(1, 8), lsp2::Position::new(1, 9)),
+                severity: Some(DiagnosticSeverity::WARNING),
+                message: "error 1".to_string(),
+                related_information: Some(vec![lsp2::DiagnosticRelatedInformation {
+                    location: lsp2::Location {
+                        uri: buffer_uri.clone(),
+                        range: lsp2::Range::new(
+                            lsp2::Position::new(1, 8),
+                            lsp2::Position::new(1, 9),
+                        ),
+                    },
+                    message: "error 1 hint 1".to_string(),
+                }]),
+                ..Default::default()
+            },
+            lsp2::Diagnostic {
+                range: lsp2::Range::new(lsp2::Position::new(1, 8), lsp2::Position::new(1, 9)),
+                severity: Some(DiagnosticSeverity::HINT),
+                message: "error 1 hint 1".to_string(),
+                related_information: Some(vec![lsp2::DiagnosticRelatedInformation {
+                    location: lsp2::Location {
+                        uri: buffer_uri.clone(),
+                        range: lsp2::Range::new(
+                            lsp2::Position::new(1, 8),
+                            lsp2::Position::new(1, 9),
+                        ),
+                    },
+                    message: "original diagnostic".to_string(),
+                }]),
+                ..Default::default()
+            },
+            lsp2::Diagnostic {
+                range: lsp2::Range::new(lsp2::Position::new(2, 8), lsp2::Position::new(2, 17)),
+                severity: Some(DiagnosticSeverity::ERROR),
+                message: "error 2".to_string(),
+                related_information: Some(vec![
+                    lsp2::DiagnosticRelatedInformation {
+                        location: lsp2::Location {
+                            uri: buffer_uri.clone(),
+                            range: lsp2::Range::new(
+                                lsp2::Position::new(1, 13),
+                                lsp2::Position::new(1, 15),
+                            ),
+                        },
+                        message: "error 2 hint 1".to_string(),
+                    },
+                    lsp2::DiagnosticRelatedInformation {
+                        location: lsp2::Location {
+                            uri: buffer_uri.clone(),
+                            range: lsp2::Range::new(
+                                lsp2::Position::new(1, 13),
+                                lsp2::Position::new(1, 15),
+                            ),
+                        },
+                        message: "error 2 hint 2".to_string(),
+                    },
+                ]),
+                ..Default::default()
+            },
+            lsp2::Diagnostic {
+                range: lsp2::Range::new(lsp2::Position::new(1, 13), lsp2::Position::new(1, 15)),
+                severity: Some(DiagnosticSeverity::HINT),
+                message: "error 2 hint 1".to_string(),
+                related_information: Some(vec![lsp2::DiagnosticRelatedInformation {
+                    location: lsp2::Location {
+                        uri: buffer_uri.clone(),
+                        range: lsp2::Range::new(
+                            lsp2::Position::new(2, 8),
+                            lsp2::Position::new(2, 17),
+                        ),
+                    },
+                    message: "original diagnostic".to_string(),
+                }]),
+                ..Default::default()
+            },
+            lsp2::Diagnostic {
+                range: lsp2::Range::new(lsp2::Position::new(1, 13), lsp2::Position::new(1, 15)),
+                severity: Some(DiagnosticSeverity::HINT),
+                message: "error 2 hint 2".to_string(),
+                related_information: Some(vec![lsp2::DiagnosticRelatedInformation {
+                    location: lsp2::Location {
+                        uri: buffer_uri,
+                        range: lsp2::Range::new(
+                            lsp2::Position::new(2, 8),
+                            lsp2::Position::new(2, 17),
+                        ),
+                    },
+                    message: "original diagnostic".to_string(),
+                }]),
+                ..Default::default()
+            },
+        ],
+        version: None,
+    };
+
+    project
+        .update(cx, |p, cx| {
+            p.update_diagnostics(LanguageServerId(0), message, &[], cx)
+        })
+        .unwrap();
+    let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
+
+    assert_eq!(
+        buffer
+            .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
+            .collect::<Vec<_>>(),
+        &[
+            DiagnosticEntry {
+                range: Point::new(1, 8)..Point::new(1, 9),
+                diagnostic: Diagnostic {
+                    severity: DiagnosticSeverity::WARNING,
+                    message: "error 1".to_string(),
+                    group_id: 1,
+                    is_primary: true,
+                    ..Default::default()
+                }
+            },
+            DiagnosticEntry {
+                range: Point::new(1, 8)..Point::new(1, 9),
+                diagnostic: Diagnostic {
+                    severity: DiagnosticSeverity::HINT,
+                    message: "error 1 hint 1".to_string(),
+                    group_id: 1,
+                    is_primary: false,
+                    ..Default::default()
+                }
+            },
+            DiagnosticEntry {
+                range: Point::new(1, 13)..Point::new(1, 15),
+                diagnostic: Diagnostic {
+                    severity: DiagnosticSeverity::HINT,
+                    message: "error 2 hint 1".to_string(),
+                    group_id: 0,
+                    is_primary: false,
+                    ..Default::default()
+                }
+            },
+            DiagnosticEntry {
+                range: Point::new(1, 13)..Point::new(1, 15),
+                diagnostic: Diagnostic {
+                    severity: DiagnosticSeverity::HINT,
+                    message: "error 2 hint 2".to_string(),
+                    group_id: 0,
+                    is_primary: false,
+                    ..Default::default()
+                }
+            },
+            DiagnosticEntry {
+                range: Point::new(2, 8)..Point::new(2, 17),
+                diagnostic: Diagnostic {
+                    severity: DiagnosticSeverity::ERROR,
+                    message: "error 2".to_string(),
+                    group_id: 0,
+                    is_primary: true,
+                    ..Default::default()
+                }
+            }
+        ]
+    );
+
+    assert_eq!(
+        buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
+        &[
+            DiagnosticEntry {
+                range: Point::new(1, 13)..Point::new(1, 15),
+                diagnostic: Diagnostic {
+                    severity: DiagnosticSeverity::HINT,
+                    message: "error 2 hint 1".to_string(),
+                    group_id: 0,
+                    is_primary: false,
+                    ..Default::default()
+                }
+            },
+            DiagnosticEntry {
+                range: Point::new(1, 13)..Point::new(1, 15),
+                diagnostic: Diagnostic {
+                    severity: DiagnosticSeverity::HINT,
+                    message: "error 2 hint 2".to_string(),
+                    group_id: 0,
+                    is_primary: false,
+                    ..Default::default()
+                }
+            },
+            DiagnosticEntry {
+                range: Point::new(2, 8)..Point::new(2, 17),
+                diagnostic: Diagnostic {
+                    severity: DiagnosticSeverity::ERROR,
+                    message: "error 2".to_string(),
+                    group_id: 0,
+                    is_primary: true,
+                    ..Default::default()
+                }
+            }
+        ]
+    );
+
+    assert_eq!(
+        buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
+        &[
+            DiagnosticEntry {
+                range: Point::new(1, 8)..Point::new(1, 9),
+                diagnostic: Diagnostic {
+                    severity: DiagnosticSeverity::WARNING,
+                    message: "error 1".to_string(),
+                    group_id: 1,
+                    is_primary: true,
+                    ..Default::default()
+                }
+            },
+            DiagnosticEntry {
+                range: Point::new(1, 8)..Point::new(1, 9),
+                diagnostic: Diagnostic {
+                    severity: DiagnosticSeverity::HINT,
+                    message: "error 1 hint 1".to_string(),
+                    group_id: 1,
+                    is_primary: false,
+                    ..Default::default()
+                }
+            },
+        ]
+    );
+}
+
+#[gpui2::test]
+async fn test_rename(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let mut language = Language::new(
+        LanguageConfig {
+            name: "Rust".into(),
+            path_suffixes: vec!["rs".to_string()],
+            ..Default::default()
+        },
+        Some(tree_sitter_rust::language()),
+    );
+    let mut fake_servers = language
+        .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+            capabilities: lsp2::ServerCapabilities {
+                rename_provider: Some(lsp2::OneOf::Right(lsp2::RenameOptions {
+                    prepare_provider: Some(true),
+                    work_done_progress_options: Default::default(),
+                })),
+                ..Default::default()
+            },
+            ..Default::default()
+        }))
+        .await;
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree(
+        "/dir",
+        json!({
+            "one.rs": "const ONE: usize = 1;",
+            "two.rs": "const TWO: usize = one::ONE + one::ONE;"
+        }),
+    )
+    .await;
+
+    let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+    project.update(cx, |project, _| project.languages.add(Arc::new(language)));
+    let buffer = project
+        .update(cx, |project, cx| {
+            project.open_local_buffer("/dir/one.rs", cx)
+        })
+        .await
+        .unwrap();
+
+    let fake_server = fake_servers.next().await.unwrap();
+
+    let response = project.update(cx, |project, cx| {
+        project.prepare_rename(buffer.clone(), 7, cx)
+    });
+    fake_server
+        .handle_request::<lsp2::request::PrepareRenameRequest, _, _>(|params, _| async move {
+            assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
+            assert_eq!(params.position, lsp2::Position::new(0, 7));
+            Ok(Some(lsp2::PrepareRenameResponse::Range(lsp2::Range::new(
+                lsp2::Position::new(0, 6),
+                lsp2::Position::new(0, 9),
+            ))))
+        })
+        .next()
+        .await
+        .unwrap();
+    let range = response.await.unwrap().unwrap();
+    let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
+    assert_eq!(range, 6..9);
+
+    let response = project.update(cx, |project, cx| {
+        project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
+    });
+    fake_server
+        .handle_request::<lsp2::request::Rename, _, _>(|params, _| async move {
+            assert_eq!(
+                params.text_document_position.text_document.uri.as_str(),
+                "file:///dir/one.rs"
+            );
+            assert_eq!(
+                params.text_document_position.position,
+                lsp2::Position::new(0, 7)
+            );
+            assert_eq!(params.new_name, "THREE");
+            Ok(Some(lsp2::WorkspaceEdit {
+                changes: Some(
+                    [
+                        (
+                            lsp2::Url::from_file_path("/dir/one.rs").unwrap(),
+                            vec![lsp2::TextEdit::new(
+                                lsp2::Range::new(
+                                    lsp2::Position::new(0, 6),
+                                    lsp2::Position::new(0, 9),
+                                ),
+                                "THREE".to_string(),
+                            )],
+                        ),
+                        (
+                            lsp2::Url::from_file_path("/dir/two.rs").unwrap(),
+                            vec![
+                                lsp2::TextEdit::new(
+                                    lsp2::Range::new(
+                                        lsp2::Position::new(0, 24),
+                                        lsp2::Position::new(0, 27),
+                                    ),
+                                    "THREE".to_string(),
+                                ),
+                                lsp2::TextEdit::new(
+                                    lsp2::Range::new(
+                                        lsp2::Position::new(0, 35),
+                                        lsp2::Position::new(0, 38),
+                                    ),
+                                    "THREE".to_string(),
+                                ),
+                            ],
+                        ),
+                    ]
+                    .into_iter()
+                    .collect(),
+                ),
+                ..Default::default()
+            }))
+        })
+        .next()
+        .await
+        .unwrap();
+    let mut transaction = response.await.unwrap().0;
+    assert_eq!(transaction.len(), 2);
+    assert_eq!(
+        transaction
+            .remove_entry(&buffer)
+            .unwrap()
+            .0
+            .update(cx, |buffer, _| buffer.text()),
+        "const THREE: usize = 1;"
+    );
+    assert_eq!(
+        transaction
+            .into_keys()
+            .next()
+            .unwrap()
+            .update(cx, |buffer, _| buffer.text()),
+        "const TWO: usize = one::THREE + one::THREE;"
+    );
+}
+
+#[gpui2::test]
+async fn test_search(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree(
+        "/dir",
+        json!({
+            "one.rs": "const ONE: usize = 1;",
+            "two.rs": "const TWO: usize = one::ONE + one::ONE;",
+            "three.rs": "const THREE: usize = one::ONE + two::TWO;",
+            "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
+        }),
+    )
+    .await;
+    let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+    assert_eq!(
+        search(
+            &project,
+            SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
+            cx
+        )
+        .await
+        .unwrap(),
+        HashMap::from_iter([
+            ("two.rs".to_string(), vec![6..9]),
+            ("three.rs".to_string(), vec![37..40])
+        ])
+    );
+
+    let buffer_4 = project
+        .update(cx, |project, cx| {
+            project.open_local_buffer("/dir/four.rs", cx)
+        })
+        .await
+        .unwrap();
+    buffer_4.update(cx, |buffer, cx| {
+        let text = "two::TWO";
+        buffer.edit([(20..28, text), (31..43, text)], None, cx);
+    });
+
+    assert_eq!(
+        search(
+            &project,
+            SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
+            cx
+        )
+        .await
+        .unwrap(),
+        HashMap::from_iter([
+            ("two.rs".to_string(), vec![6..9]),
+            ("three.rs".to_string(), vec![37..40]),
+            ("four.rs".to_string(), vec![25..28, 36..39])
+        ])
+    );
+}
+
+#[gpui2::test]
+async fn test_search_with_inclusions(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let search_query = "file";
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree(
+        "/dir",
+        json!({
+            "one.rs": r#"// Rust file one"#,
+            "one.ts": r#"// TypeScript file one"#,
+            "two.rs": r#"// Rust file two"#,
+            "two.ts": r#"// TypeScript file two"#,
+        }),
+    )
+    .await;
+    let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+
+    assert!(
+        search(
+            &project,
+            SearchQuery::text(
+                search_query,
+                false,
+                true,
+                vec![PathMatcher::new("*.odd").unwrap()],
+                Vec::new()
+            )
+            .unwrap(),
+            cx
+        )
+        .await
+        .unwrap()
+        .is_empty(),
+        "If no inclusions match, no files should be returned"
+    );
+
+    assert_eq!(
+        search(
+            &project,
+            SearchQuery::text(
+                search_query,
+                false,
+                true,
+                vec![PathMatcher::new("*.rs").unwrap()],
+                Vec::new()
+            )
+            .unwrap(),
+            cx
+        )
+        .await
+        .unwrap(),
+        HashMap::from_iter([
+            ("one.rs".to_string(), vec![8..12]),
+            ("two.rs".to_string(), vec![8..12]),
+        ]),
+        "Rust only search should give only Rust files"
+    );
+
+    assert_eq!(
+        search(
+            &project,
+            SearchQuery::text(
+                search_query,
+                false,
+                true,
+                vec![
+                    PathMatcher::new("*.ts").unwrap(),
+                    PathMatcher::new("*.odd").unwrap(),
+                ],
+                Vec::new()
+            ).unwrap(),
+            cx
+        )
+        .await
+        .unwrap(),
+        HashMap::from_iter([
+            ("one.ts".to_string(), vec![14..18]),
+            ("two.ts".to_string(), vec![14..18]),
+        ]),
+        "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
+    );
+
+    assert_eq!(
+        search(
+            &project,
+            SearchQuery::text(
+                search_query,
+                false,
+                true,
+                vec![
+                    PathMatcher::new("*.rs").unwrap(),
+                    PathMatcher::new("*.ts").unwrap(),
+                    PathMatcher::new("*.odd").unwrap(),
+                ],
+                Vec::new()
+            ).unwrap(),
+            cx
+        )
+        .await
+        .unwrap(),
+        HashMap::from_iter([
+            ("one.rs".to_string(), vec![8..12]),
+            ("one.ts".to_string(), vec![14..18]),
+            ("two.rs".to_string(), vec![8..12]),
+            ("two.ts".to_string(), vec![14..18]),
+        ]),
+        "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
+    );
+}
+
+#[gpui2::test]
+async fn test_search_with_exclusions(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let search_query = "file";
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree(
+        "/dir",
+        json!({
+            "one.rs": r#"// Rust file one"#,
+            "one.ts": r#"// TypeScript file one"#,
+            "two.rs": r#"// Rust file two"#,
+            "two.ts": r#"// TypeScript file two"#,
+        }),
+    )
+    .await;
+    let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+
+    assert_eq!(
+        search(
+            &project,
+            SearchQuery::text(
+                search_query,
+                false,
+                true,
+                Vec::new(),
+                vec![PathMatcher::new("*.odd").unwrap()],
+            )
+            .unwrap(),
+            cx
+        )
+        .await
+        .unwrap(),
+        HashMap::from_iter([
+            ("one.rs".to_string(), vec![8..12]),
+            ("one.ts".to_string(), vec![14..18]),
+            ("two.rs".to_string(), vec![8..12]),
+            ("two.ts".to_string(), vec![14..18]),
+        ]),
+        "If no exclusions match, all files should be returned"
+    );
+
+    assert_eq!(
+        search(
+            &project,
+            SearchQuery::text(
+                search_query,
+                false,
+                true,
+                Vec::new(),
+                vec![PathMatcher::new("*.rs").unwrap()],
+            )
+            .unwrap(),
+            cx
+        )
+        .await
+        .unwrap(),
+        HashMap::from_iter([
+            ("one.ts".to_string(), vec![14..18]),
+            ("two.ts".to_string(), vec![14..18]),
+        ]),
+        "Rust exclusion search should give only TypeScript files"
+    );
+
+    assert_eq!(
+        search(
+            &project,
+            SearchQuery::text(
+                search_query,
+                false,
+                true,
+                Vec::new(),
+                vec![
+                    PathMatcher::new("*.ts").unwrap(),
+                    PathMatcher::new("*.odd").unwrap(),
+                ],
+            ).unwrap(),
+            cx
+        )
+        .await
+        .unwrap(),
+        HashMap::from_iter([
+            ("one.rs".to_string(), vec![8..12]),
+            ("two.rs".to_string(), vec![8..12]),
+        ]),
+        "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
+    );
+
+    assert!(
+        search(
+            &project,
+            SearchQuery::text(
+                search_query,
+                false,
+                true,
+                Vec::new(),
+                vec![
+                    PathMatcher::new("*.rs").unwrap(),
+                    PathMatcher::new("*.ts").unwrap(),
+                    PathMatcher::new("*.odd").unwrap(),
+                ],
+            ).unwrap(),
+            cx
+        )
+        .await
+        .unwrap().is_empty(),
+        "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
+    );
+}
+
+#[gpui2::test]
+async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui2::TestAppContext) {
+    init_test(cx);
+
+    let search_query = "file";
+
+    let fs = FakeFs::new(cx.executor().clone());
+    fs.insert_tree(
+        "/dir",
+        json!({
+            "one.rs": r#"// Rust file one"#,
+            "one.ts": r#"// TypeScript file one"#,
+            "two.rs": r#"// Rust file two"#,
+            "two.ts": r#"// TypeScript file two"#,
+        }),
+    )
+    .await;
+    let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+
+    assert!(
+        search(
+            &project,
+            SearchQuery::text(
+                search_query,
+                false,
+                true,
+                vec![PathMatcher::new("*.odd").unwrap()],
+                vec![PathMatcher::new("*.odd").unwrap()],
+            )
+            .unwrap(),
+            cx
+        )
+        .await
+        .unwrap()
+        .is_empty(),
+        "If both no exclusions and inclusions match, exclusions should win and return nothing"
+    );
+
+    assert!(
+        search(
+            &project,
+            SearchQuery::text(
+                search_query,
+                false,
+                true,
+                vec![PathMatcher::new("*.ts").unwrap()],
+                vec![PathMatcher::new("*.ts").unwrap()],
+            ).unwrap(),
+            cx
+        )
+        .await
+        .unwrap()
+        .is_empty(),
+        "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
+    );
+
+    assert!(
+        search(
+            &project,
+            SearchQuery::text(
+                search_query,
+                false,
+                true,
+                vec![
+                    PathMatcher::new("*.ts").unwrap(),
+                    PathMatcher::new("*.odd").unwrap()
+                ],
+                vec![
+                    PathMatcher::new("*.ts").unwrap(),
+                    PathMatcher::new("*.odd").unwrap()
+                ],
+            )
+            .unwrap(),
+            cx
+        )
+        .await
+        .unwrap()
+        .is_empty(),
+        "Non-matching inclusions and exclusions should not change that."
+    );
+
+    assert_eq!(
+        search(
+            &project,
+            SearchQuery::text(
+                search_query,
+                false,
+                true,
+                vec![
+                    PathMatcher::new("*.ts").unwrap(),
+                    PathMatcher::new("*.odd").unwrap()
+                ],
+                vec![
+                    PathMatcher::new("*.rs").unwrap(),
+                    PathMatcher::new("*.odd").unwrap()
+                ],
+            )
+            .unwrap(),
+            cx
+        )
+        .await
+        .unwrap(),
+        HashMap::from_iter([
+            ("one.ts".to_string(), vec![14..18]),
+            ("two.ts".to_string(), vec![14..18]),
+        ]),
+        "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
+    );
+}
+
+#[test]
+fn test_glob_literal_prefix() {
+    assert_eq!(glob_literal_prefix("**/*.js"), "");
+    assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
+    assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
+    assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
+}
+
+async fn search(
+    project: &Model<Project>,
+    query: SearchQuery,
+    cx: &mut gpui2::TestAppContext,
+) -> Result<HashMap<String, Vec<Range<usize>>>> {
+    let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
+    let mut result = HashMap::default();
+    while let Some((buffer, range)) = search_rx.next().await {
+        result.entry(buffer).or_insert(range);
+    }
+    Ok(result
+        .into_iter()
+        .map(|(buffer, ranges)| {
+            buffer.update(cx, |buffer, _| {
+                let path = buffer.file().unwrap().path().to_string_lossy().to_string();
+                let ranges = ranges
+                    .into_iter()
+                    .map(|range| range.to_offset(buffer))
+                    .collect::<Vec<_>>();
+                (path, ranges)
+            })
+        })
+        .collect())
+}
+
+fn init_test(cx: &mut gpui2::TestAppContext) {
+    if std::env::var("RUST_LOG").is_ok() {
+        env_logger::init();
+    }
+
+    cx.update(|cx| {
+        let settings_store = SettingsStore::test(cx);
+        cx.set_global(settings_store);
+        language2::init(cx);
+        Project::init_settings(cx);
+    });
+}

crates/project2/src/worktree.rs 🔗

@@ -17,12 +17,13 @@ use futures::{
     },
     select_biased,
     task::Poll,
-    FutureExt, Stream, StreamExt,
+    FutureExt as _, Stream, StreamExt,
 };
 use fuzzy2::CharBag;
 use git::{DOT_GIT, GITIGNORE};
 use gpui2::{
-    AppContext, AsyncAppContext, Context, EventEmitter, Executor, Model, ModelContext, Task,
+    AppContext, AsyncAppContext, BackgroundExecutor, Context, EventEmitter, Model, ModelContext,
+    Task,
 };
 use language2::{
     proto::{
@@ -296,6 +297,7 @@ impl Worktree {
         // After determining whether the root entry is a file or a directory, populate the
         // snapshot's "root name", which will be used for the purpose of fuzzy matching.
         let abs_path = path.into();
+
         let metadata = fs
             .metadata(&abs_path)
             .await
@@ -364,10 +366,10 @@ impl Worktree {
             })
             .detach();
 
-            let background_scanner_task = cx.executor().spawn({
+            let background_scanner_task = cx.background_executor().spawn({
                 let fs = fs.clone();
                 let snapshot = snapshot.clone();
-                let background = cx.executor().clone();
+                let background = cx.background_executor().clone();
                 async move {
                     let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
                     BackgroundScanner::new(
@@ -428,7 +430,7 @@ impl Worktree {
             let background_snapshot = Arc::new(Mutex::new(snapshot.clone()));
             let (mut snapshot_updated_tx, mut snapshot_updated_rx) = watch::channel();
 
-            cx.executor()
+            cx.background_executor()
                 .spawn({
                     let background_snapshot = background_snapshot.clone();
                     async move {
@@ -600,7 +602,7 @@ impl LocalWorktree {
                 .update(&mut cx, |t, cx| t.as_local().unwrap().load(&path, cx))?
                 .await?;
             let text_buffer = cx
-                .executor()
+                .background_executor()
                 .spawn(async move { text::Buffer::new(0, id, contents) })
                 .await;
             cx.build_model(|_| Buffer::build(text_buffer, diff_base, Some(Arc::new(file))))
@@ -888,7 +890,7 @@ impl LocalWorktree {
                 if let Some(repo) = snapshot.git_repositories.get(&*repo.work_directory) {
                     let repo = repo.repo_ptr.clone();
                     index_task = Some(
-                        cx.executor()
+                        cx.background_executor()
                             .spawn(async move { repo.lock().load_index_text(&repo_path) }),
                     );
                 }
@@ -1007,7 +1009,7 @@ impl LocalWorktree {
         let lowest_ancestor = self.lowest_ancestor(&path);
         let abs_path = self.absolutize(&path);
         let fs = self.fs.clone();
-        let write = cx.executor().spawn(async move {
+        let write = cx.background_executor().spawn(async move {
             if is_dir {
                 fs.create_dir(&abs_path).await
             } else {
@@ -1057,7 +1059,7 @@ impl LocalWorktree {
         let abs_path = self.absolutize(&path);
         let fs = self.fs.clone();
         let write = cx
-            .executor()
+            .background_executor()
             .spawn(async move { fs.save(&abs_path, &text, line_ending).await });
 
         cx.spawn(|this, mut cx| async move {
@@ -1078,7 +1080,7 @@ impl LocalWorktree {
         let abs_path = self.absolutize(&entry.path);
         let fs = self.fs.clone();
 
-        let delete = cx.executor().spawn(async move {
+        let delete = cx.background_executor().spawn(async move {
             if entry.is_file() {
                 fs.remove_file(&abs_path, Default::default()).await?;
             } else {
@@ -1118,7 +1120,7 @@ impl LocalWorktree {
         let abs_old_path = self.absolutize(&old_path);
         let abs_new_path = self.absolutize(&new_path);
         let fs = self.fs.clone();
-        let rename = cx.executor().spawn(async move {
+        let rename = cx.background_executor().spawn(async move {
             fs.rename(&abs_old_path, &abs_new_path, Default::default())
                 .await
         });
@@ -1145,7 +1147,7 @@ impl LocalWorktree {
         let abs_old_path = self.absolutize(&old_path);
         let abs_new_path = self.absolutize(&new_path);
         let fs = self.fs.clone();
-        let copy = cx.executor().spawn(async move {
+        let copy = cx.background_executor().spawn(async move {
             copy_recursive(
                 fs.as_ref(),
                 &abs_old_path,
@@ -1173,7 +1175,7 @@ impl LocalWorktree {
     ) -> Option<Task<Result<()>>> {
         let path = self.entry_for_id(entry_id)?.path.clone();
         let mut refresh = self.refresh_entries_for_paths(vec![path]);
-        Some(cx.executor().spawn(async move {
+        Some(cx.background_executor().spawn(async move {
             refresh.next().await;
             Ok(())
         }))
@@ -1247,7 +1249,7 @@ impl LocalWorktree {
             .ok();
 
         let worktree_id = cx.entity_id().as_u64();
-        let _maintain_remote_snapshot = cx.executor().spawn(async move {
+        let _maintain_remote_snapshot = cx.background_executor().spawn(async move {
             let mut is_first = true;
             while let Some((snapshot, entry_changes, repo_changes)) = snapshots_rx.next().await {
                 let update;
@@ -1305,7 +1307,7 @@ impl LocalWorktree {
         let rx = self.observe_updates(project_id, cx, move |update| {
             client.request(update).map(|result| result.is_ok())
         });
-        cx.executor()
+        cx.background_executor()
             .spawn(async move { rx.await.map_err(|_| anyhow!("share ended")) })
     }
 
@@ -2671,7 +2673,8 @@ impl language2::LocalFile for File {
         let worktree = self.worktree.read(cx).as_local().unwrap();
         let abs_path = worktree.absolutize(&self.path);
         let fs = worktree.fs.clone();
-        cx.executor().spawn(async move { fs.load(&abs_path).await })
+        cx.background_executor()
+            .spawn(async move { fs.load(&abs_path).await })
     }
 
     fn buffer_reloaded(
@@ -3012,7 +3015,7 @@ struct BackgroundScanner {
     state: Mutex<BackgroundScannerState>,
     fs: Arc<dyn Fs>,
     status_updates_tx: UnboundedSender<ScanState>,
-    executor: Executor,
+    executor: BackgroundExecutor,
     scan_requests_rx: channel::Receiver<ScanRequest>,
     path_prefixes_to_scan_rx: channel::Receiver<Arc<Path>>,
     next_entry_id: Arc<AtomicUsize>,
@@ -3032,7 +3035,7 @@ impl BackgroundScanner {
         next_entry_id: Arc<AtomicUsize>,
         fs: Arc<dyn Fs>,
         status_updates_tx: UnboundedSender<ScanState>,
-        executor: Executor,
+        executor: BackgroundExecutor,
         scan_requests_rx: channel::Receiver<ScanRequest>,
         path_prefixes_to_scan_rx: channel::Receiver<Arc<Path>>,
     ) -> Self {
@@ -4030,53 +4033,54 @@ struct UpdateIgnoreStatusJob {
     scan_queue: Sender<ScanJob>,
 }
 
-// todo!("re-enable when we have tests")
-// pub trait WorktreeModelHandle {
-// #[cfg(any(test, feature = "test-support"))]
-// fn flush_fs_events<'a>(
-//     &self,
-//     cx: &'a gpui::TestAppContext,
-// ) -> futures::future::LocalBoxFuture<'a, ()>;
-// }
-
-// impl WorktreeModelHandle for Handle<Worktree> {
-//     // When the worktree's FS event stream sometimes delivers "redundant" events for FS changes that
-//     // occurred before the worktree was constructed. These events can cause the worktree to perform
-//     // extra directory scans, and emit extra scan-state notifications.
-//     //
-//     // This function mutates the worktree's directory and waits for those mutations to be picked up,
-//     // to ensure that all redundant FS events have already been processed.
-//     #[cfg(any(test, feature = "test-support"))]
-//     fn flush_fs_events<'a>(
-//         &self,
-//         cx: &'a gpui::TestAppContext,
-//     ) -> futures::future::LocalBoxFuture<'a, ()> {
-//         let filename = "fs-event-sentinel";
-//         let tree = self.clone();
-//         let (fs, root_path) = self.read_with(cx, |tree, _| {
-//             let tree = tree.as_local().unwrap();
-//             (tree.fs.clone(), tree.abs_path().clone())
-//         });
-
-//         async move {
-//             fs.create_file(&root_path.join(filename), Default::default())
-//                 .await
-//                 .unwrap();
-//             tree.condition(cx, |tree, _| tree.entry_for_path(filename).is_some())
-//                 .await;
-
-//             fs.remove_file(&root_path.join(filename), Default::default())
-//                 .await
-//                 .unwrap();
-//             tree.condition(cx, |tree, _| tree.entry_for_path(filename).is_none())
-//                 .await;
-
-//             cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-//                 .await;
-//         }
-//         .boxed_local()
-//     }
-// }
+pub trait WorktreeModelHandle {
+    #[cfg(any(test, feature = "test-support"))]
+    fn flush_fs_events<'a>(
+        &self,
+        cx: &'a mut gpui2::TestAppContext,
+    ) -> futures::future::LocalBoxFuture<'a, ()>;
+}
+
+impl WorktreeModelHandle for Model<Worktree> {
+    // When the worktree's FS event stream sometimes delivers "redundant" events for FS changes that
+    // occurred before the worktree was constructed. These events can cause the worktree to perform
+    // extra directory scans, and emit extra scan-state notifications.
+    //
+    // This function mutates the worktree's directory and waits for those mutations to be picked up,
+    // to ensure that all redundant FS events have already been processed.
+    #[cfg(any(test, feature = "test-support"))]
+    fn flush_fs_events<'a>(
+        &self,
+        cx: &'a mut gpui2::TestAppContext,
+    ) -> futures::future::LocalBoxFuture<'a, ()> {
+        let file_name = "fs-event-sentinel";
+
+        let tree = self.clone();
+        let (fs, root_path) = self.update(cx, |tree, _| {
+            let tree = tree.as_local().unwrap();
+            (tree.fs.clone(), tree.abs_path().clone())
+        });
+
+        async move {
+            fs.create_file(&root_path.join(file_name), Default::default())
+                .await
+                .unwrap();
+
+            cx.condition(&tree, |tree, _| tree.entry_for_path(file_name).is_some())
+                .await;
+
+            fs.remove_file(&root_path.join(file_name), Default::default())
+                .await
+                .unwrap();
+            cx.condition(&tree, |tree, _| tree.entry_for_path(file_name).is_none())
+                .await;
+
+            cx.update(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+                .await;
+        }
+        .boxed_local()
+    }
+}
 
 #[derive(Clone, Debug)]
 struct TraversalProgress<'a> {

crates/rpc2/src/conn.rs 🔗

@@ -34,7 +34,7 @@ impl Connection {
 
     #[cfg(any(test, feature = "test-support"))]
     pub fn in_memory(
-        executor: gpui2::Executor,
+        executor: gpui2::BackgroundExecutor,
     ) -> (Self, Self, std::sync::Arc<std::sync::atomic::AtomicBool>) {
         use std::sync::{
             atomic::{AtomicBool, Ordering::SeqCst},
@@ -53,7 +53,7 @@ impl Connection {
         #[allow(clippy::type_complexity)]
         fn channel(
             killed: Arc<AtomicBool>,
-            executor: gpui2::Executor,
+            executor: gpui2::BackgroundExecutor,
         ) -> (
             Box<dyn Send + Unpin + futures::Sink<WebSocketMessage, Error = anyhow::Error>>,
             Box<dyn Send + Unpin + futures::Stream<Item = Result<WebSocketMessage, anyhow::Error>>>,

crates/rpc2/src/peer.rs 🔗

@@ -342,7 +342,7 @@ impl Peer {
     pub fn add_test_connection(
         self: &Arc<Self>,
         connection: Connection,
-        executor: gpui2::Executor,
+        executor: gpui2::BackgroundExecutor,
     ) -> (
         ConnectionId,
         impl Future<Output = anyhow::Result<()>> + Send,
@@ -559,7 +559,6 @@ mod tests {
     use async_tungstenite::tungstenite::Message as WebSocketMessage;
     use gpui2::TestAppContext;
 
-    #[ctor::ctor]
     fn init_logger() {
         if std::env::var("RUST_LOG").is_ok() {
             env_logger::init();
@@ -568,6 +567,8 @@ mod tests {
 
     #[gpui2::test(iterations = 50)]
     async fn test_request_response(cx: &mut TestAppContext) {
+        init_logger();
+
         let executor = cx.executor();
 
         // create 2 clients connected to 1 server

crates/settings2/src/settings_file.rs 🔗

@@ -2,7 +2,7 @@ use crate::{settings_store::SettingsStore, Settings};
 use anyhow::Result;
 use fs2::Fs;
 use futures::{channel::mpsc, StreamExt};
-use gpui2::{AppContext, Executor};
+use gpui2::{AppContext, BackgroundExecutor};
 use std::{io::ErrorKind, path::PathBuf, str, sync::Arc, time::Duration};
 use util::{paths, ResultExt};
 
@@ -28,7 +28,7 @@ pub fn test_settings() -> String {
 }
 
 pub fn watch_config_file(
-    executor: &Executor,
+    executor: &BackgroundExecutor,
     fs: Arc<dyn Fs>,
     path: PathBuf,
 ) -> mpsc::UnboundedReceiver<String> {
@@ -63,7 +63,10 @@ pub fn handle_settings_file_changes(
     mut user_settings_file_rx: mpsc::UnboundedReceiver<String>,
     cx: &mut AppContext,
 ) {
-    let user_settings_content = cx.executor().block(user_settings_file_rx.next()).unwrap();
+    let user_settings_content = cx
+        .background_executor()
+        .block(user_settings_file_rx.next())
+        .unwrap();
     cx.update_global(|store: &mut SettingsStore, cx| {
         store
             .set_user_settings(&user_settings_content, cx)

crates/sqlez/src/thread_safe_connection.rs 🔗

@@ -336,13 +336,13 @@ mod test {
                         FOREIGN KEY(dock_pane) REFERENCES panes(pane_id),
                         FOREIGN KEY(active_pane) REFERENCES panes(pane_id)
                     ) STRICT;
-                    
+
                     CREATE TABLE panes(
                         pane_id INTEGER PRIMARY KEY,
                         workspace_id INTEGER NOT NULL,
                         active INTEGER NOT NULL, -- Boolean
-                        FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id) 
-                            ON DELETE CASCADE 
+                        FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
+                            ON DELETE CASCADE
                             ON UPDATE CASCADE
                     ) STRICT;
                 "]

crates/storybook2/src/stories/colors.rs 🔗

@@ -1,6 +1,6 @@
 use crate::story::Story;
 use gpui2::{px, Div, Render};
-use theme2::default_color_scales;
+use theme2::{default_color_scales, ColorScaleStep};
 use ui::prelude::*;
 
 pub struct ColorsStory;
@@ -30,9 +30,14 @@ impl Render for ColorsStory {
                                     .line_height(px(24.))
                                     .child(scale.name().to_string()),
                             )
-                            .child(div().flex().gap_1().children(
-                                (1..=12).map(|step| div().flex().size_6().bg(scale.step(cx, step))),
-                            ))
+                            .child(
+                                div()
+                                    .flex()
+                                    .gap_1()
+                                    .children(ColorScaleStep::ALL.map(|step| {
+                                        div().flex().size_6().bg(scale.step(cx, step))
+                                    })),
+                            )
                     })),
             )
     }

crates/terminal2/src/terminal2.rs 🔗

@@ -51,8 +51,8 @@ use thiserror::Error;
 
 use gpui2::{
     px, AnyWindowHandle, AppContext, Bounds, ClipboardItem, EventEmitter, Hsla, Keystroke,
-    MainThread, ModelContext, Modifiers, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent,
-    Pixels, Point, ScrollWheelEvent, Size, Task, TouchPhase,
+    ModelContext, Modifiers, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Pixels,
+    Point, ScrollWheelEvent, Size, Task, TouchPhase,
 };
 
 use crate::mappings::{colors::to_alac_rgb, keys::to_esc_str};
@@ -403,7 +403,7 @@ impl TerminalBuilder {
 
     pub fn subscribe(mut self, cx: &mut ModelContext<Terminal>) -> Terminal {
         //Event loop
-        cx.spawn_on_main(|this, mut cx| async move {
+        cx.spawn(|this, mut cx| async move {
             use futures::StreamExt;
 
             while let Some(event) = self.events_rx.next().await {
@@ -414,7 +414,10 @@ impl TerminalBuilder {
 
                 'outer: loop {
                     let mut events = vec![];
-                    let mut timer = cx.executor().timer(Duration::from_millis(4)).fuse();
+                    let mut timer = cx
+                        .background_executor()
+                        .timer(Duration::from_millis(4))
+                        .fuse();
                     let mut wakeup = false;
                     loop {
                         futures::select_biased! {
@@ -551,7 +554,7 @@ pub struct Terminal {
 }
 
 impl Terminal {
-    fn process_event(&mut self, event: &AlacTermEvent, cx: &mut MainThread<ModelContext<Self>>) {
+    fn process_event(&mut self, event: &AlacTermEvent, cx: &mut ModelContext<Self>) {
         match event {
             AlacTermEvent::Title(title) => {
                 self.breadcrumb_text = title.to_string();
@@ -708,8 +711,7 @@ impl Terminal {
 
             InternalEvent::Copy => {
                 if let Some(txt) = term.selection_to_string() {
-                    cx.run_on_main(|cx| cx.write_to_clipboard(ClipboardItem::new(txt)))
-                        .detach();
+                    cx.write_to_clipboard(ClipboardItem::new(txt))
                 }
             }
             InternalEvent::ScrollToAlacPoint(point) => {
@@ -982,7 +984,7 @@ impl Terminal {
             term.lock_unfair() //It's been too long, force block
         } else if let None = self.sync_task {
             //Skip this frame
-            let delay = cx.executor().timer(Duration::from_millis(16));
+            let delay = cx.background_executor().timer(Duration::from_millis(16));
             self.sync_task = Some(cx.spawn(|weak_handle, mut cx| async move {
                 delay.await;
                 if let Some(handle) = weak_handle.upgrade() {
@@ -1189,7 +1191,7 @@ impl Terminal {
         &mut self,
         e: &MouseUpEvent,
         origin: Point<Pixels>,
-        cx: &mut MainThread<ModelContext<Self>>,
+        cx: &mut ModelContext<Self>,
     ) {
         let setting = TerminalSettings::get_global(cx);
 
@@ -1300,7 +1302,7 @@ impl Terminal {
         cx: &mut ModelContext<Self>,
     ) -> Task<Vec<RangeInclusive<AlacPoint>>> {
         let term = self.term.clone();
-        cx.executor().spawn(async move {
+        cx.background_executor().spawn(async move {
             let term = term.lock();
 
             all_search_matches(&term, &searcher).collect()

crates/text2/Cargo.toml 🔗

@@ -0,0 +1,37 @@
+[package]
+name = "text2"
+version = "0.1.0"
+edition = "2021"
+publish = false
+
+[lib]
+path = "src/text2.rs"
+doctest = false
+
+[features]
+test-support = ["rand"]
+
+[dependencies]
+clock = { path = "../clock" }
+collections = { path = "../collections" }
+rope = { path = "../rope" }
+sum_tree = { path = "../sum_tree" }
+util = { path = "../util" }
+
+anyhow.workspace = true
+digest = { version = "0.9", features = ["std"] }
+lazy_static.workspace = true
+log.workspace = true
+parking_lot.workspace = true
+postage.workspace = true
+rand = { workspace = true, optional = true }
+smallvec.workspace = true
+regex.workspace = true
+
+[dev-dependencies]
+collections = { path = "../collections", features = ["test-support"] }
+gpui2 = { path = "../gpui2", features = ["test-support"] }
+util = { path = "../util", features = ["test-support"] }
+ctor.workspace = true
+env_logger.workspace = true
+rand.workspace = true

crates/text2/src/anchor.rs 🔗

@@ -0,0 +1,144 @@
+use crate::{
+    locator::Locator, BufferSnapshot, Point, PointUtf16, TextDimension, ToOffset, ToPoint,
+    ToPointUtf16,
+};
+use anyhow::Result;
+use std::{cmp::Ordering, fmt::Debug, ops::Range};
+use sum_tree::Bias;
+
+#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash, Default)]
+pub struct Anchor {
+    pub timestamp: clock::Lamport,
+    pub offset: usize,
+    pub bias: Bias,
+    pub buffer_id: Option<u64>,
+}
+
+impl Anchor {
+    pub const MIN: Self = Self {
+        timestamp: clock::Lamport::MIN,
+        offset: usize::MIN,
+        bias: Bias::Left,
+        buffer_id: None,
+    };
+
+    pub const MAX: Self = Self {
+        timestamp: clock::Lamport::MAX,
+        offset: usize::MAX,
+        bias: Bias::Right,
+        buffer_id: None,
+    };
+
+    pub fn cmp(&self, other: &Anchor, buffer: &BufferSnapshot) -> Ordering {
+        let fragment_id_comparison = if self.timestamp == other.timestamp {
+            Ordering::Equal
+        } else {
+            buffer
+                .fragment_id_for_anchor(self)
+                .cmp(buffer.fragment_id_for_anchor(other))
+        };
+
+        fragment_id_comparison
+            .then_with(|| self.offset.cmp(&other.offset))
+            .then_with(|| self.bias.cmp(&other.bias))
+    }
+
+    pub fn min(&self, other: &Self, buffer: &BufferSnapshot) -> Self {
+        if self.cmp(other, buffer).is_le() {
+            *self
+        } else {
+            *other
+        }
+    }
+
+    pub fn max(&self, other: &Self, buffer: &BufferSnapshot) -> Self {
+        if self.cmp(other, buffer).is_ge() {
+            *self
+        } else {
+            *other
+        }
+    }
+
+    pub fn bias(&self, bias: Bias, buffer: &BufferSnapshot) -> Anchor {
+        if bias == Bias::Left {
+            self.bias_left(buffer)
+        } else {
+            self.bias_right(buffer)
+        }
+    }
+
+    pub fn bias_left(&self, buffer: &BufferSnapshot) -> Anchor {
+        if self.bias == Bias::Left {
+            *self
+        } else {
+            buffer.anchor_before(self)
+        }
+    }
+
+    pub fn bias_right(&self, buffer: &BufferSnapshot) -> Anchor {
+        if self.bias == Bias::Right {
+            *self
+        } else {
+            buffer.anchor_after(self)
+        }
+    }
+
+    pub fn summary<D>(&self, content: &BufferSnapshot) -> D
+    where
+        D: TextDimension,
+    {
+        content.summary_for_anchor(self)
+    }
+
+    /// Returns true when the [Anchor] is located inside a visible fragment.
+    pub fn is_valid(&self, buffer: &BufferSnapshot) -> bool {
+        if *self == Anchor::MIN || *self == Anchor::MAX {
+            true
+        } else {
+            let fragment_id = buffer.fragment_id_for_anchor(self);
+            let mut fragment_cursor = buffer.fragments.cursor::<(Option<&Locator>, usize)>();
+            fragment_cursor.seek(&Some(fragment_id), Bias::Left, &None);
+            fragment_cursor
+                .item()
+                .map_or(false, |fragment| fragment.visible)
+        }
+    }
+}
+
+pub trait OffsetRangeExt {
+    fn to_offset(&self, snapshot: &BufferSnapshot) -> Range<usize>;
+    fn to_point(&self, snapshot: &BufferSnapshot) -> Range<Point>;
+    fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> Range<PointUtf16>;
+}
+
+impl<T> OffsetRangeExt for Range<T>
+where
+    T: ToOffset,
+{
+    fn to_offset(&self, snapshot: &BufferSnapshot) -> Range<usize> {
+        self.start.to_offset(snapshot)..self.end.to_offset(snapshot)
+    }
+
+    fn to_point(&self, snapshot: &BufferSnapshot) -> Range<Point> {
+        self.start.to_offset(snapshot).to_point(snapshot)
+            ..self.end.to_offset(snapshot).to_point(snapshot)
+    }
+
+    fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> Range<PointUtf16> {
+        self.start.to_offset(snapshot).to_point_utf16(snapshot)
+            ..self.end.to_offset(snapshot).to_point_utf16(snapshot)
+    }
+}
+
+pub trait AnchorRangeExt {
+    fn cmp(&self, b: &Range<Anchor>, buffer: &BufferSnapshot) -> Result<Ordering>;
+}
+
+impl AnchorRangeExt for Range<Anchor> {
+    fn cmp(&self, other: &Range<Anchor>, buffer: &BufferSnapshot) -> Result<Ordering> {
+        Ok(match self.start.cmp(&other.start, buffer) {
+            Ordering::Equal => other.end.cmp(&self.end, buffer),
+            ord => ord,
+        })
+    }
+}

crates/text2/src/locator.rs 🔗

@@ -0,0 +1,125 @@
+use lazy_static::lazy_static;
+use smallvec::{smallvec, SmallVec};
+use std::iter;
+
+lazy_static! {
+    static ref MIN: Locator = Locator::min();
+    static ref MAX: Locator = Locator::max();
+}
+
+#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct Locator(SmallVec<[u64; 4]>);
+
+impl Locator {
+    pub fn min() -> Self {
+        Self(smallvec![u64::MIN])
+    }
+
+    pub fn max() -> Self {
+        Self(smallvec![u64::MAX])
+    }
+
+    pub fn min_ref() -> &'static Self {
+        &*MIN
+    }
+
+    pub fn max_ref() -> &'static Self {
+        &*MAX
+    }
+
+    pub fn assign(&mut self, other: &Self) {
+        self.0.resize(other.0.len(), 0);
+        self.0.copy_from_slice(&other.0);
+    }
+
+    pub fn between(lhs: &Self, rhs: &Self) -> Self {
+        let lhs = lhs.0.iter().copied().chain(iter::repeat(u64::MIN));
+        let rhs = rhs.0.iter().copied().chain(iter::repeat(u64::MAX));
+        let mut location = SmallVec::new();
+        for (lhs, rhs) in lhs.zip(rhs) {
+            let mid = lhs + ((rhs.saturating_sub(lhs)) >> 48);
+            location.push(mid);
+            if mid > lhs {
+                break;
+            }
+        }
+        Self(location)
+    }
+
+    pub fn len(&self) -> usize {
+        self.0.len()
+    }
+
+    pub fn is_empty(&self) -> bool {
+        self.len() == 0
+    }
+}
+
+impl Default for Locator {
+    fn default() -> Self {
+        Self::min()
+    }
+}
+
+impl sum_tree::Item for Locator {
+    type Summary = Locator;
+
+    fn summary(&self) -> Self::Summary {
+        self.clone()
+    }
+}
+
+impl sum_tree::KeyedItem for Locator {
+    type Key = Locator;
+
+    fn key(&self) -> Self::Key {
+        self.clone()
+    }
+}
+
+impl sum_tree::Summary for Locator {
+    type Context = ();
+
+    fn add_summary(&mut self, summary: &Self, _: &()) {
+        self.assign(summary);
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use rand::prelude::*;
+    use std::mem;
+
+    #[gpui2::test(iterations = 100)]
+    fn test_locators(mut rng: StdRng) {
+        let mut lhs = Default::default();
+        let mut rhs = Default::default();
+        while lhs == rhs {
+            lhs = Locator(
+                (0..rng.gen_range(1..=5))
+                    .map(|_| rng.gen_range(0..=100))
+                    .collect(),
+            );
+            rhs = Locator(
+                (0..rng.gen_range(1..=5))
+                    .map(|_| rng.gen_range(0..=100))
+                    .collect(),
+            );
+        }
+
+        if lhs > rhs {
+            mem::swap(&mut lhs, &mut rhs);
+        }
+
+        let middle = Locator::between(&lhs, &rhs);
+        assert!(middle > lhs);
+        assert!(middle < rhs);
+        for ix in 0..middle.0.len() - 1 {
+            assert!(
+                middle.0[ix] == *lhs.0.get(ix).unwrap_or(&0)
+                    || middle.0[ix] == *rhs.0.get(ix).unwrap_or(&0)
+            );
+        }
+    }
+}

crates/text2/src/network.rs 🔗

@@ -0,0 +1,69 @@
+use clock::ReplicaId;
+
+pub struct Network<T: Clone, R: rand::Rng> {
+    inboxes: std::collections::BTreeMap<ReplicaId, Vec<Envelope<T>>>,
+    all_messages: Vec<T>,
+    rng: R,
+}
+
+#[derive(Clone)]
+struct Envelope<T: Clone> {
+    message: T,
+}
+
+impl<T: Clone, R: rand::Rng> Network<T, R> {
+    pub fn new(rng: R) -> Self {
+        Network {
+            inboxes: Default::default(),
+            all_messages: Vec::new(),
+            rng,
+        }
+    }
+
+    pub fn add_peer(&mut self, id: ReplicaId) {
+        self.inboxes.insert(id, Vec::new());
+    }
+
+    pub fn replicate(&mut self, old_replica_id: ReplicaId, new_replica_id: ReplicaId) {
+        self.inboxes
+            .insert(new_replica_id, self.inboxes[&old_replica_id].clone());
+    }
+
+    pub fn is_idle(&self) -> bool {
+        self.inboxes.values().all(|i| i.is_empty())
+    }
+
+    pub fn broadcast(&mut self, sender: ReplicaId, messages: Vec<T>) {
+        for (replica, inbox) in self.inboxes.iter_mut() {
+            if *replica != sender {
+                for message in &messages {
+                    // Insert one or more duplicates of this message, potentially *before* the previous
+                    // message sent by this peer to simulate out-of-order delivery.
+                    for _ in 0..self.rng.gen_range(1..4) {
+                        let insertion_index = self.rng.gen_range(0..inbox.len() + 1);
+                        inbox.insert(
+                            insertion_index,
+                            Envelope {
+                                message: message.clone(),
+                            },
+                        );
+                    }
+                }
+            }
+        }
+        self.all_messages.extend(messages);
+    }
+
+    pub fn has_unreceived(&self, receiver: ReplicaId) -> bool {
+        !self.inboxes[&receiver].is_empty()
+    }
+
+    pub fn receive(&mut self, receiver: ReplicaId) -> Vec<T> {
+        let inbox = self.inboxes.get_mut(&receiver).unwrap();
+        let count = self.rng.gen_range(0..inbox.len() + 1);
+        inbox
+            .drain(0..count)
+            .map(|envelope| envelope.message)
+            .collect()
+    }
+}

crates/text2/src/operation_queue.rs 🔗

@@ -0,0 +1,153 @@
+use std::{fmt::Debug, ops::Add};
+use sum_tree::{Dimension, Edit, Item, KeyedItem, SumTree, Summary};
+
+pub trait Operation: Clone + Debug {
+    fn lamport_timestamp(&self) -> clock::Lamport;
+}
+
+#[derive(Clone, Debug)]
+struct OperationItem<T>(T);
+
+#[derive(Clone, Debug)]
+pub struct OperationQueue<T: Operation>(SumTree<OperationItem<T>>);
+
+#[derive(Clone, Copy, Debug, Default, Eq, Ord, PartialEq, PartialOrd)]
+pub struct OperationKey(clock::Lamport);
+
+#[derive(Clone, Copy, Debug, Default, Eq, PartialEq)]
+pub struct OperationSummary {
+    pub key: OperationKey,
+    pub len: usize,
+}
+
+impl OperationKey {
+    pub fn new(timestamp: clock::Lamport) -> Self {
+        Self(timestamp)
+    }
+}
+
+impl<T: Operation> Default for OperationQueue<T> {
+    fn default() -> Self {
+        OperationQueue::new()
+    }
+}
+
+impl<T: Operation> OperationQueue<T> {
+    pub fn new() -> Self {
+        OperationQueue(SumTree::new())
+    }
+
+    pub fn len(&self) -> usize {
+        self.0.summary().len
+    }
+
+    pub fn is_empty(&self) -> bool {
+        self.len() == 0
+    }
+
+    pub fn insert(&mut self, mut ops: Vec<T>) {
+        ops.sort_by_key(|op| op.lamport_timestamp());
+        ops.dedup_by_key(|op| op.lamport_timestamp());
+        self.0.edit(
+            ops.into_iter()
+                .map(|op| Edit::Insert(OperationItem(op)))
+                .collect(),
+            &(),
+        );
+    }
+
+    pub fn drain(&mut self) -> Self {
+        let clone = self.clone();
+        self.0 = SumTree::new();
+        clone
+    }
+
+    pub fn iter(&self) -> impl Iterator<Item = &T> {
+        self.0.iter().map(|i| &i.0)
+    }
+}
+
+impl Summary for OperationSummary {
+    type Context = ();
+
+    fn add_summary(&mut self, other: &Self, _: &()) {
+        assert!(self.key < other.key);
+        self.key = other.key;
+        self.len += other.len;
+    }
+}
+
+impl<'a> Add<&'a Self> for OperationSummary {
+    type Output = Self;
+
+    fn add(self, other: &Self) -> Self {
+        assert!(self.key < other.key);
+        OperationSummary {
+            key: other.key,
+            len: self.len + other.len,
+        }
+    }
+}
+
+impl<'a> Dimension<'a, OperationSummary> for OperationKey {
+    fn add_summary(&mut self, summary: &OperationSummary, _: &()) {
+        assert!(*self <= summary.key);
+        *self = summary.key;
+    }
+}
+
+impl<T: Operation> Item for OperationItem<T> {
+    type Summary = OperationSummary;
+
+    fn summary(&self) -> Self::Summary {
+        OperationSummary {
+            key: OperationKey::new(self.0.lamport_timestamp()),
+            len: 1,
+        }
+    }
+}
+
+impl<T: Operation> KeyedItem for OperationItem<T> {
+    type Key = OperationKey;
+
+    fn key(&self) -> Self::Key {
+        OperationKey::new(self.0.lamport_timestamp())
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn test_len() {
+        let mut clock = clock::Lamport::new(0);
+
+        let mut queue = OperationQueue::new();
+        assert_eq!(queue.len(), 0);
+
+        queue.insert(vec![
+            TestOperation(clock.tick()),
+            TestOperation(clock.tick()),
+        ]);
+        assert_eq!(queue.len(), 2);
+
+        queue.insert(vec![TestOperation(clock.tick())]);
+        assert_eq!(queue.len(), 3);
+
+        drop(queue.drain());
+        assert_eq!(queue.len(), 0);
+
+        queue.insert(vec![TestOperation(clock.tick())]);
+        assert_eq!(queue.len(), 1);
+    }
+
+    #[derive(Clone, Debug, Eq, PartialEq)]
+    struct TestOperation(clock::Lamport);
+
+    impl Operation for TestOperation {
+        fn lamport_timestamp(&self) -> clock::Lamport {
+            self.0
+        }
+    }
+}

crates/text2/src/patch.rs 🔗

@@ -0,0 +1,594 @@
+use crate::Edit;
+use std::{
+    cmp, mem,
+    ops::{Add, AddAssign, Sub},
+};
+
+#[derive(Clone, Default, Debug, PartialEq, Eq)]
+pub struct Patch<T>(Vec<Edit<T>>);
+
+impl<T> Patch<T>
+where
+    T: 'static
+        + Clone
+        + Copy
+        + Ord
+        + Sub<T, Output = T>
+        + Add<T, Output = T>
+        + AddAssign
+        + Default
+        + PartialEq,
+{
+    pub fn new(edits: Vec<Edit<T>>) -> Self {
+        #[cfg(debug_assertions)]
+        {
+            let mut last_edit: Option<&Edit<T>> = None;
+            for edit in &edits {
+                if let Some(last_edit) = last_edit {
+                    assert!(edit.old.start > last_edit.old.end);
+                    assert!(edit.new.start > last_edit.new.end);
+                }
+                last_edit = Some(edit);
+            }
+        }
+        Self(edits)
+    }
+
+    pub fn edits(&self) -> &[Edit<T>] {
+        &self.0
+    }
+
+    pub fn into_inner(self) -> Vec<Edit<T>> {
+        self.0
+    }
+
+    pub fn compose(&self, new_edits_iter: impl IntoIterator<Item = Edit<T>>) -> Self {
+        let mut old_edits_iter = self.0.iter().cloned().peekable();
+        let mut new_edits_iter = new_edits_iter.into_iter().peekable();
+        let mut composed = Patch(Vec::new());
+
+        let mut old_start = T::default();
+        let mut new_start = T::default();
+        loop {
+            let old_edit = old_edits_iter.peek_mut();
+            let new_edit = new_edits_iter.peek_mut();
+
+            // Push the old edit if its new end is before the new edit's old start.
+            if let Some(old_edit) = old_edit.as_ref() {
+                let new_edit = new_edit.as_ref();
+                if new_edit.map_or(true, |new_edit| old_edit.new.end < new_edit.old.start) {
+                    let catchup = old_edit.old.start - old_start;
+                    old_start += catchup;
+                    new_start += catchup;
+
+                    let old_end = old_start + old_edit.old_len();
+                    let new_end = new_start + old_edit.new_len();
+                    composed.push(Edit {
+                        old: old_start..old_end,
+                        new: new_start..new_end,
+                    });
+                    old_start = old_end;
+                    new_start = new_end;
+                    old_edits_iter.next();
+                    continue;
+                }
+            }
+
+            // Push the new edit if its old end is before the old edit's new start.
+            if let Some(new_edit) = new_edit.as_ref() {
+                let old_edit = old_edit.as_ref();
+                if old_edit.map_or(true, |old_edit| new_edit.old.end < old_edit.new.start) {
+                    let catchup = new_edit.new.start - new_start;
+                    old_start += catchup;
+                    new_start += catchup;
+
+                    let old_end = old_start + new_edit.old_len();
+                    let new_end = new_start + new_edit.new_len();
+                    composed.push(Edit {
+                        old: old_start..old_end,
+                        new: new_start..new_end,
+                    });
+                    old_start = old_end;
+                    new_start = new_end;
+                    new_edits_iter.next();
+                    continue;
+                }
+            }
+
+            // If we still have edits by this point then they must intersect, so we compose them.
+            if let Some((old_edit, new_edit)) = old_edit.zip(new_edit) {
+                if old_edit.new.start < new_edit.old.start {
+                    let catchup = old_edit.old.start - old_start;
+                    old_start += catchup;
+                    new_start += catchup;
+
+                    let overshoot = new_edit.old.start - old_edit.new.start;
+                    let old_end = cmp::min(old_start + overshoot, old_edit.old.end);
+                    let new_end = new_start + overshoot;
+                    composed.push(Edit {
+                        old: old_start..old_end,
+                        new: new_start..new_end,
+                    });
+
+                    old_edit.old.start = old_end;
+                    old_edit.new.start += overshoot;
+                    old_start = old_end;
+                    new_start = new_end;
+                } else {
+                    let catchup = new_edit.new.start - new_start;
+                    old_start += catchup;
+                    new_start += catchup;
+
+                    let overshoot = old_edit.new.start - new_edit.old.start;
+                    let old_end = old_start + overshoot;
+                    let new_end = cmp::min(new_start + overshoot, new_edit.new.end);
+                    composed.push(Edit {
+                        old: old_start..old_end,
+                        new: new_start..new_end,
+                    });
+
+                    new_edit.old.start += overshoot;
+                    new_edit.new.start = new_end;
+                    old_start = old_end;
+                    new_start = new_end;
+                }
+
+                if old_edit.new.end > new_edit.old.end {
+                    let old_end = old_start + cmp::min(old_edit.old_len(), new_edit.old_len());
+                    let new_end = new_start + new_edit.new_len();
+                    composed.push(Edit {
+                        old: old_start..old_end,
+                        new: new_start..new_end,
+                    });
+
+                    old_edit.old.start = old_end;
+                    old_edit.new.start = new_edit.old.end;
+                    old_start = old_end;
+                    new_start = new_end;
+                    new_edits_iter.next();
+                } else {
+                    let old_end = old_start + old_edit.old_len();
+                    let new_end = new_start + cmp::min(old_edit.new_len(), new_edit.new_len());
+                    composed.push(Edit {
+                        old: old_start..old_end,
+                        new: new_start..new_end,
+                    });
+
+                    new_edit.old.start = old_edit.new.end;
+                    new_edit.new.start = new_end;
+                    old_start = old_end;
+                    new_start = new_end;
+                    old_edits_iter.next();
+                }
+            } else {
+                break;
+            }
+        }
+
+        composed
+    }
+
+    pub fn invert(&mut self) -> &mut Self {
+        for edit in &mut self.0 {
+            mem::swap(&mut edit.old, &mut edit.new);
+        }
+        self
+    }
+
+    pub fn clear(&mut self) {
+        self.0.clear();
+    }
+
+    pub fn is_empty(&self) -> bool {
+        self.0.is_empty()
+    }
+
+    pub fn push(&mut self, edit: Edit<T>) {
+        if edit.is_empty() {
+            return;
+        }
+
+        if let Some(last) = self.0.last_mut() {
+            if last.old.end >= edit.old.start {
+                last.old.end = edit.old.end;
+                last.new.end = edit.new.end;
+            } else {
+                self.0.push(edit);
+            }
+        } else {
+            self.0.push(edit);
+        }
+    }
+
+    pub fn old_to_new(&self, old: T) -> T {
+        let ix = match self.0.binary_search_by(|probe| probe.old.start.cmp(&old)) {
+            Ok(ix) => ix,
+            Err(ix) => {
+                if ix == 0 {
+                    return old;
+                } else {
+                    ix - 1
+                }
+            }
+        };
+        if let Some(edit) = self.0.get(ix) {
+            if old >= edit.old.end {
+                edit.new.end + (old - edit.old.end)
+            } else {
+                edit.new.start
+            }
+        } else {
+            old
+        }
+    }
+}
+
+impl<T: Clone> IntoIterator for Patch<T> {
+    type Item = Edit<T>;
+    type IntoIter = std::vec::IntoIter<Edit<T>>;
+
+    fn into_iter(self) -> Self::IntoIter {
+        self.0.into_iter()
+    }
+}
+
+impl<'a, T: Clone> IntoIterator for &'a Patch<T> {
+    type Item = Edit<T>;
+    type IntoIter = std::iter::Cloned<std::slice::Iter<'a, Edit<T>>>;
+
+    fn into_iter(self) -> Self::IntoIter {
+        self.0.iter().cloned()
+    }
+}
+
+impl<'a, T: Clone> IntoIterator for &'a mut Patch<T> {
+    type Item = Edit<T>;
+    type IntoIter = std::iter::Cloned<std::slice::Iter<'a, Edit<T>>>;
+
+    fn into_iter(self) -> Self::IntoIter {
+        self.0.iter().cloned()
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use rand::prelude::*;
+    use std::env;
+
+    #[gpui2::test]
+    fn test_one_disjoint_edit() {
+        assert_patch_composition(
+            Patch(vec![Edit {
+                old: 1..3,
+                new: 1..4,
+            }]),
+            Patch(vec![Edit {
+                old: 0..0,
+                new: 0..4,
+            }]),
+            Patch(vec![
+                Edit {
+                    old: 0..0,
+                    new: 0..4,
+                },
+                Edit {
+                    old: 1..3,
+                    new: 5..8,
+                },
+            ]),
+        );
+
+        assert_patch_composition(
+            Patch(vec![Edit {
+                old: 1..3,
+                new: 1..4,
+            }]),
+            Patch(vec![Edit {
+                old: 5..9,
+                new: 5..7,
+            }]),
+            Patch(vec![
+                Edit {
+                    old: 1..3,
+                    new: 1..4,
+                },
+                Edit {
+                    old: 4..8,
+                    new: 5..7,
+                },
+            ]),
+        );
+    }
+
+    #[gpui2::test]
+    fn test_one_overlapping_edit() {
+        assert_patch_composition(
+            Patch(vec![Edit {
+                old: 1..3,
+                new: 1..4,
+            }]),
+            Patch(vec![Edit {
+                old: 3..5,
+                new: 3..6,
+            }]),
+            Patch(vec![Edit {
+                old: 1..4,
+                new: 1..6,
+            }]),
+        );
+    }
+
+    #[gpui2::test]
+    fn test_two_disjoint_and_overlapping() {
+        assert_patch_composition(
+            Patch(vec![
+                Edit {
+                    old: 1..3,
+                    new: 1..4,
+                },
+                Edit {
+                    old: 8..12,
+                    new: 9..11,
+                },
+            ]),
+            Patch(vec![
+                Edit {
+                    old: 0..0,
+                    new: 0..4,
+                },
+                Edit {
+                    old: 3..10,
+                    new: 7..9,
+                },
+            ]),
+            Patch(vec![
+                Edit {
+                    old: 0..0,
+                    new: 0..4,
+                },
+                Edit {
+                    old: 1..12,
+                    new: 5..10,
+                },
+            ]),
+        );
+    }
+
+    #[gpui2::test]
+    fn test_two_new_edits_overlapping_one_old_edit() {
+        assert_patch_composition(
+            Patch(vec![Edit {
+                old: 0..0,
+                new: 0..3,
+            }]),
+            Patch(vec![
+                Edit {
+                    old: 0..0,
+                    new: 0..1,
+                },
+                Edit {
+                    old: 1..2,
+                    new: 2..2,
+                },
+            ]),
+            Patch(vec![Edit {
+                old: 0..0,
+                new: 0..3,
+            }]),
+        );
+
+        assert_patch_composition(
+            Patch(vec![Edit {
+                old: 2..3,
+                new: 2..4,
+            }]),
+            Patch(vec![
+                Edit {
+                    old: 0..2,
+                    new: 0..1,
+                },
+                Edit {
+                    old: 3..3,
+                    new: 2..5,
+                },
+            ]),
+            Patch(vec![Edit {
+                old: 0..3,
+                new: 0..6,
+            }]),
+        );
+
+        assert_patch_composition(
+            Patch(vec![Edit {
+                old: 0..0,
+                new: 0..2,
+            }]),
+            Patch(vec![
+                Edit {
+                    old: 0..0,
+                    new: 0..2,
+                },
+                Edit {
+                    old: 2..5,
+                    new: 4..4,
+                },
+            ]),
+            Patch(vec![Edit {
+                old: 0..3,
+                new: 0..4,
+            }]),
+        );
+    }
+
+    #[gpui2::test]
+    fn test_two_new_edits_touching_one_old_edit() {
+        assert_patch_composition(
+            Patch(vec![
+                Edit {
+                    old: 2..3,
+                    new: 2..4,
+                },
+                Edit {
+                    old: 7..7,
+                    new: 8..11,
+                },
+            ]),
+            Patch(vec![
+                Edit {
+                    old: 2..3,
+                    new: 2..2,
+                },
+                Edit {
+                    old: 4..4,
+                    new: 3..4,
+                },
+            ]),
+            Patch(vec![
+                Edit {
+                    old: 2..3,
+                    new: 2..4,
+                },
+                Edit {
+                    old: 7..7,
+                    new: 8..11,
+                },
+            ]),
+        );
+    }
+
+    #[gpui2::test]
+    fn test_old_to_new() {
+        let patch = Patch(vec![
+            Edit {
+                old: 2..4,
+                new: 2..4,
+            },
+            Edit {
+                old: 7..8,
+                new: 7..11,
+            },
+        ]);
+        assert_eq!(patch.old_to_new(0), 0);
+        assert_eq!(patch.old_to_new(1), 1);
+        assert_eq!(patch.old_to_new(2), 2);
+        assert_eq!(patch.old_to_new(3), 2);
+        assert_eq!(patch.old_to_new(4), 4);
+        assert_eq!(patch.old_to_new(5), 5);
+        assert_eq!(patch.old_to_new(6), 6);
+        assert_eq!(patch.old_to_new(7), 7);
+        assert_eq!(patch.old_to_new(8), 11);
+        assert_eq!(patch.old_to_new(9), 12);
+    }
+
+    #[gpui2::test(iterations = 100)]
+    fn test_random_patch_compositions(mut rng: StdRng) {
+        let operations = env::var("OPERATIONS")
+            .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
+            .unwrap_or(20);
+
+        let initial_chars = (0..rng.gen_range(0..=100))
+            .map(|_| rng.gen_range(b'a'..=b'z') as char)
+            .collect::<Vec<_>>();
+        log::info!("initial chars: {:?}", initial_chars);
+
+        // Generate two sequential patches
+        let mut patches = Vec::new();
+        let mut expected_chars = initial_chars.clone();
+        for i in 0..2 {
+            log::info!("patch {}:", i);
+
+            let mut delta = 0i32;
+            let mut last_edit_end = 0;
+            let mut edits = Vec::new();
+
+            for _ in 0..operations {
+                if last_edit_end >= expected_chars.len() {
+                    break;
+                }
+
+                let end = rng.gen_range(last_edit_end..=expected_chars.len());
+                let start = rng.gen_range(last_edit_end..=end);
+                let old_len = end - start;
+
+                let mut new_len = rng.gen_range(0..=3);
+                if start == end && new_len == 0 {
+                    new_len += 1;
+                }
+
+                last_edit_end = start + new_len + 1;
+
+                let new_chars = (0..new_len)
+                    .map(|_| rng.gen_range(b'A'..=b'Z') as char)
+                    .collect::<Vec<_>>();
+                log::info!(
+                    "  editing {:?}: {:?}",
+                    start..end,
+                    new_chars.iter().collect::<String>()
+                );
+                edits.push(Edit {
+                    old: (start as i32 - delta) as u32..(end as i32 - delta) as u32,
+                    new: start as u32..(start + new_len) as u32,
+                });
+                expected_chars.splice(start..end, new_chars);
+
+                delta += new_len as i32 - old_len as i32;
+            }
+
+            patches.push(Patch(edits));
+        }
+
+        log::info!("old patch: {:?}", &patches[0]);
+        log::info!("new patch: {:?}", &patches[1]);
+        log::info!("initial chars: {:?}", initial_chars);
+        log::info!("final chars: {:?}", expected_chars);
+
+        // Compose the patches, and verify that it has the same effect as applying the
+        // two patches separately.
+        let composed = patches[0].compose(&patches[1]);
+        log::info!("composed patch: {:?}", &composed);
+
+        let mut actual_chars = initial_chars;
+        for edit in composed.0 {
+            actual_chars.splice(
+                edit.new.start as usize..edit.new.start as usize + edit.old.len(),
+                expected_chars[edit.new.start as usize..edit.new.end as usize]
+                    .iter()
+                    .copied(),
+            );
+        }
+
+        assert_eq!(actual_chars, expected_chars);
+    }
+
+    #[track_caller]
+    fn assert_patch_composition(old: Patch<u32>, new: Patch<u32>, composed: Patch<u32>) {
+        let original = ('a'..'z').collect::<Vec<_>>();
+        let inserted = ('A'..'Z').collect::<Vec<_>>();
+
+        let mut expected = original.clone();
+        apply_patch(&mut expected, &old, &inserted);
+        apply_patch(&mut expected, &new, &inserted);
+
+        let mut actual = original;
+        apply_patch(&mut actual, &composed, &expected);
+        assert_eq!(
+            actual.into_iter().collect::<String>(),
+            expected.into_iter().collect::<String>(),
+            "expected patch is incorrect"
+        );
+
+        assert_eq!(old.compose(&new), composed);
+    }
+
+    fn apply_patch(text: &mut Vec<char>, patch: &Patch<u32>, new_text: &[char]) {
+        for edit in patch.0.iter().rev() {
+            text.splice(
+                edit.old.start as usize..edit.old.end as usize,
+                new_text[edit.new.start as usize..edit.new.end as usize]
+                    .iter()
+                    .copied(),
+            );
+        }
+    }
+}

crates/text2/src/selection.rs 🔗

@@ -0,0 +1,123 @@
+use crate::{Anchor, BufferSnapshot, TextDimension};
+use std::cmp::Ordering;
+use std::ops::Range;
+
+#[derive(Copy, Clone, Debug, PartialEq)]
+pub enum SelectionGoal {
+    None,
+    HorizontalPosition(f32),
+    HorizontalRange { start: f32, end: f32 },
+    WrappedHorizontalPosition((u32, f32)),
+}
+
+#[derive(Clone, Debug, PartialEq)]
+pub struct Selection<T> {
+    pub id: usize,
+    pub start: T,
+    pub end: T,
+    pub reversed: bool,
+    pub goal: SelectionGoal,
+}
+
+impl Default for SelectionGoal {
+    fn default() -> Self {
+        Self::None
+    }
+}
+
+impl<T: Clone> Selection<T> {
+    pub fn head(&self) -> T {
+        if self.reversed {
+            self.start.clone()
+        } else {
+            self.end.clone()
+        }
+    }
+
+    pub fn tail(&self) -> T {
+        if self.reversed {
+            self.end.clone()
+        } else {
+            self.start.clone()
+        }
+    }
+
+    pub fn map<F, S>(&self, f: F) -> Selection<S>
+    where
+        F: Fn(T) -> S,
+    {
+        Selection::<S> {
+            id: self.id,
+            start: f(self.start.clone()),
+            end: f(self.end.clone()),
+            reversed: self.reversed,
+            goal: self.goal,
+        }
+    }
+
+    pub fn collapse_to(&mut self, point: T, new_goal: SelectionGoal) {
+        self.start = point.clone();
+        self.end = point;
+        self.goal = new_goal;
+        self.reversed = false;
+    }
+}
+
+impl<T: Copy + Ord> Selection<T> {
+    pub fn is_empty(&self) -> bool {
+        self.start == self.end
+    }
+
+    pub fn set_head(&mut self, head: T, new_goal: SelectionGoal) {
+        if head.cmp(&self.tail()) < Ordering::Equal {
+            if !self.reversed {
+                self.end = self.start;
+                self.reversed = true;
+            }
+            self.start = head;
+        } else {
+            if self.reversed {
+                self.start = self.end;
+                self.reversed = false;
+            }
+            self.end = head;
+        }
+        self.goal = new_goal;
+    }
+
+    pub fn range(&self) -> Range<T> {
+        self.start..self.end
+    }
+}
+
+impl Selection<usize> {
+    #[cfg(feature = "test-support")]
+    pub fn from_offset(offset: usize) -> Self {
+        Selection {
+            id: 0,
+            start: offset,
+            end: offset,
+            goal: SelectionGoal::None,
+            reversed: false,
+        }
+    }
+
+    pub fn equals(&self, offset_range: &Range<usize>) -> bool {
+        self.start == offset_range.start && self.end == offset_range.end
+    }
+}
+
+impl Selection<Anchor> {
+    pub fn resolve<'a, D: 'a + TextDimension>(
+        &'a self,
+        snapshot: &'a BufferSnapshot,
+    ) -> Selection<D> {
+        Selection {
+            id: self.id,
+            start: snapshot.summary_for_anchor(&self.start),
+            end: snapshot.summary_for_anchor(&self.end),
+            reversed: self.reversed,
+            goal: self.goal,
+        }
+    }
+}

crates/text2/src/subscription.rs 🔗

@@ -0,0 +1,48 @@
+use crate::{Edit, Patch};
+use parking_lot::Mutex;
+use std::{
+    mem,
+    sync::{Arc, Weak},
+};
+
+#[derive(Default)]
+pub struct Topic(Mutex<Vec<Weak<Mutex<Patch<usize>>>>>);
+
+pub struct Subscription(Arc<Mutex<Patch<usize>>>);
+
+impl Topic {
+    pub fn subscribe(&mut self) -> Subscription {
+        let subscription = Subscription(Default::default());
+        self.0.get_mut().push(Arc::downgrade(&subscription.0));
+        subscription
+    }
+
+    pub fn publish(&self, edits: impl Clone + IntoIterator<Item = Edit<usize>>) {
+        publish(&mut *self.0.lock(), edits);
+    }
+
+    pub fn publish_mut(&mut self, edits: impl Clone + IntoIterator<Item = Edit<usize>>) {
+        publish(self.0.get_mut(), edits);
+    }
+}
+
+impl Subscription {
+    pub fn consume(&self) -> Patch<usize> {
+        mem::take(&mut *self.0.lock())
+    }
+}
+
+fn publish(
+    subscriptions: &mut Vec<Weak<Mutex<Patch<usize>>>>,
+    edits: impl Clone + IntoIterator<Item = Edit<usize>>,
+) {
+    subscriptions.retain(|subscription| {
+        if let Some(subscription) = subscription.upgrade() {
+            let mut patch = subscription.lock();
+            *patch = patch.compose(edits.clone());
+            true
+        } else {
+            false
+        }
+    });
+}

crates/text2/src/tests.rs 🔗

@@ -0,0 +1,764 @@
+use super::{network::Network, *};
+use clock::ReplicaId;
+use rand::prelude::*;
+use std::{
+    cmp::Ordering,
+    env,
+    iter::Iterator,
+    time::{Duration, Instant},
+};
+
+#[cfg(test)]
+#[ctor::ctor]
+fn init_logger() {
+    if std::env::var("RUST_LOG").is_ok() {
+        env_logger::init();
+    }
+}
+
+#[test]
+fn test_edit() {
+    let mut buffer = Buffer::new(0, 0, "abc".into());
+    assert_eq!(buffer.text(), "abc");
+    buffer.edit([(3..3, "def")]);
+    assert_eq!(buffer.text(), "abcdef");
+    buffer.edit([(0..0, "ghi")]);
+    assert_eq!(buffer.text(), "ghiabcdef");
+    buffer.edit([(5..5, "jkl")]);
+    assert_eq!(buffer.text(), "ghiabjklcdef");
+    buffer.edit([(6..7, "")]);
+    assert_eq!(buffer.text(), "ghiabjlcdef");
+    buffer.edit([(4..9, "mno")]);
+    assert_eq!(buffer.text(), "ghiamnoef");
+}
+
+#[gpui2::test(iterations = 100)]
+fn test_random_edits(mut rng: StdRng) {
+    let operations = env::var("OPERATIONS")
+        .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
+        .unwrap_or(10);
+
+    let reference_string_len = rng.gen_range(0..3);
+    let mut reference_string = RandomCharIter::new(&mut rng)
+        .take(reference_string_len)
+        .collect::<String>();
+    let mut buffer = Buffer::new(0, 0, reference_string.clone());
+    LineEnding::normalize(&mut reference_string);
+
+    buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
+    let mut buffer_versions = Vec::new();
+    log::info!(
+        "buffer text {:?}, version: {:?}",
+        buffer.text(),
+        buffer.version()
+    );
+
+    for _i in 0..operations {
+        let (edits, _) = buffer.randomly_edit(&mut rng, 5);
+        for (old_range, new_text) in edits.iter().rev() {
+            reference_string.replace_range(old_range.clone(), new_text);
+        }
+
+        assert_eq!(buffer.text(), reference_string);
+        log::info!(
+            "buffer text {:?}, version: {:?}",
+            buffer.text(),
+            buffer.version()
+        );
+
+        if rng.gen_bool(0.25) {
+            buffer.randomly_undo_redo(&mut rng);
+            reference_string = buffer.text();
+            log::info!(
+                "buffer text {:?}, version: {:?}",
+                buffer.text(),
+                buffer.version()
+            );
+        }
+
+        let range = buffer.random_byte_range(0, &mut rng);
+        assert_eq!(
+            buffer.text_summary_for_range::<TextSummary, _>(range.clone()),
+            TextSummary::from(&reference_string[range])
+        );
+
+        buffer.check_invariants();
+
+        if rng.gen_bool(0.3) {
+            buffer_versions.push((buffer.clone(), buffer.subscribe()));
+        }
+    }
+
+    for (old_buffer, subscription) in buffer_versions {
+        let edits = buffer
+            .edits_since::<usize>(&old_buffer.version)
+            .collect::<Vec<_>>();
+
+        log::info!(
+            "applying edits since version {:?} to old text: {:?}: {:?}",
+            old_buffer.version(),
+            old_buffer.text(),
+            edits,
+        );
+
+        let mut text = old_buffer.visible_text.clone();
+        for edit in edits {
+            let new_text: String = buffer.text_for_range(edit.new.clone()).collect();
+            text.replace(edit.new.start..edit.new.start + edit.old.len(), &new_text);
+        }
+        assert_eq!(text.to_string(), buffer.text());
+
+        for _ in 0..5 {
+            let end_ix = old_buffer.clip_offset(rng.gen_range(0..=old_buffer.len()), Bias::Right);
+            let start_ix = old_buffer.clip_offset(rng.gen_range(0..=end_ix), Bias::Left);
+            let range = old_buffer.anchor_before(start_ix)..old_buffer.anchor_after(end_ix);
+            let mut old_text = old_buffer.text_for_range(range.clone()).collect::<String>();
+            let edits = buffer
+                .edits_since_in_range::<usize>(&old_buffer.version, range.clone())
+                .collect::<Vec<_>>();
+            log::info!(
+                "applying edits since version {:?} to old text in range {:?}: {:?}: {:?}",
+                old_buffer.version(),
+                start_ix..end_ix,
+                old_text,
+                edits,
+            );
+
+            let new_text = buffer.text_for_range(range).collect::<String>();
+            for edit in edits {
+                old_text.replace_range(
+                    edit.new.start..edit.new.start + edit.old_len(),
+                    &new_text[edit.new],
+                );
+            }
+            assert_eq!(old_text, new_text);
+        }
+
+        let subscription_edits = subscription.consume();
+        log::info!(
+            "applying subscription edits since version {:?} to old text: {:?}: {:?}",
+            old_buffer.version(),
+            old_buffer.text(),
+            subscription_edits,
+        );
+
+        let mut text = old_buffer.visible_text.clone();
+        for edit in subscription_edits.into_inner() {
+            let new_text: String = buffer.text_for_range(edit.new.clone()).collect();
+            text.replace(edit.new.start..edit.new.start + edit.old.len(), &new_text);
+        }
+        assert_eq!(text.to_string(), buffer.text());
+    }
+}
+
+#[test]
+fn test_line_endings() {
+    assert_eq!(LineEnding::detect(&"🍐✅\n".repeat(1000)), LineEnding::Unix);
+    assert_eq!(LineEnding::detect(&"abcd\n".repeat(1000)), LineEnding::Unix);
+    assert_eq!(
+        LineEnding::detect(&"🍐✅\r\n".repeat(1000)),
+        LineEnding::Windows
+    );
+    assert_eq!(
+        LineEnding::detect(&"abcd\r\n".repeat(1000)),
+        LineEnding::Windows
+    );
+
+    let mut buffer = Buffer::new(0, 0, "one\r\ntwo\rthree".into());
+    assert_eq!(buffer.text(), "one\ntwo\nthree");
+    assert_eq!(buffer.line_ending(), LineEnding::Windows);
+    buffer.check_invariants();
+
+    buffer.edit([(buffer.len()..buffer.len(), "\r\nfour")]);
+    buffer.edit([(0..0, "zero\r\n")]);
+    assert_eq!(buffer.text(), "zero\none\ntwo\nthree\nfour");
+    assert_eq!(buffer.line_ending(), LineEnding::Windows);
+    buffer.check_invariants();
+}
+
+#[test]
+fn test_line_len() {
+    let mut buffer = Buffer::new(0, 0, "".into());
+    buffer.edit([(0..0, "abcd\nefg\nhij")]);
+    buffer.edit([(12..12, "kl\nmno")]);
+    buffer.edit([(18..18, "\npqrs\n")]);
+    buffer.edit([(18..21, "\nPQ")]);
+
+    assert_eq!(buffer.line_len(0), 4);
+    assert_eq!(buffer.line_len(1), 3);
+    assert_eq!(buffer.line_len(2), 5);
+    assert_eq!(buffer.line_len(3), 3);
+    assert_eq!(buffer.line_len(4), 4);
+    assert_eq!(buffer.line_len(5), 0);
+}
+
+#[test]
+fn test_common_prefix_at_position() {
+    let text = "a = str; b = δα";
+    let buffer = Buffer::new(0, 0, text.into());
+
+    let offset1 = offset_after(text, "str");
+    let offset2 = offset_after(text, "δα");
+
+    // the preceding word is a prefix of the suggestion
+    assert_eq!(
+        buffer.common_prefix_at(offset1, "string"),
+        range_of(text, "str"),
+    );
+    // a suffix of the preceding word is a prefix of the suggestion
+    assert_eq!(
+        buffer.common_prefix_at(offset1, "tree"),
+        range_of(text, "tr"),
+    );
+    // the preceding word is a substring of the suggestion, but not a prefix
+    assert_eq!(
+        buffer.common_prefix_at(offset1, "astro"),
+        empty_range_after(text, "str"),
+    );
+
+    // prefix matching is case insensitive.
+    assert_eq!(
+        buffer.common_prefix_at(offset1, "Strαngε"),
+        range_of(text, "str"),
+    );
+    assert_eq!(
+        buffer.common_prefix_at(offset2, "ΔΑΜΝ"),
+        range_of(text, "δα"),
+    );
+
+    fn offset_after(text: &str, part: &str) -> usize {
+        text.find(part).unwrap() + part.len()
+    }
+
+    fn empty_range_after(text: &str, part: &str) -> Range<usize> {
+        let offset = offset_after(text, part);
+        offset..offset
+    }
+
+    fn range_of(text: &str, part: &str) -> Range<usize> {
+        let start = text.find(part).unwrap();
+        start..start + part.len()
+    }
+}
+
+#[test]
+fn test_text_summary_for_range() {
+    let buffer = Buffer::new(0, 0, "ab\nefg\nhklm\nnopqrs\ntuvwxyz".into());
+    assert_eq!(
+        buffer.text_summary_for_range::<TextSummary, _>(1..3),
+        TextSummary {
+            len: 2,
+            len_utf16: OffsetUtf16(2),
+            lines: Point::new(1, 0),
+            first_line_chars: 1,
+            last_line_chars: 0,
+            last_line_len_utf16: 0,
+            longest_row: 0,
+            longest_row_chars: 1,
+        }
+    );
+    assert_eq!(
+        buffer.text_summary_for_range::<TextSummary, _>(1..12),
+        TextSummary {
+            len: 11,
+            len_utf16: OffsetUtf16(11),
+            lines: Point::new(3, 0),
+            first_line_chars: 1,
+            last_line_chars: 0,
+            last_line_len_utf16: 0,
+            longest_row: 2,
+            longest_row_chars: 4,
+        }
+    );
+    assert_eq!(
+        buffer.text_summary_for_range::<TextSummary, _>(0..20),
+        TextSummary {
+            len: 20,
+            len_utf16: OffsetUtf16(20),
+            lines: Point::new(4, 1),
+            first_line_chars: 2,
+            last_line_chars: 1,
+            last_line_len_utf16: 1,
+            longest_row: 3,
+            longest_row_chars: 6,
+        }
+    );
+    assert_eq!(
+        buffer.text_summary_for_range::<TextSummary, _>(0..22),
+        TextSummary {
+            len: 22,
+            len_utf16: OffsetUtf16(22),
+            lines: Point::new(4, 3),
+            first_line_chars: 2,
+            last_line_chars: 3,
+            last_line_len_utf16: 3,
+            longest_row: 3,
+            longest_row_chars: 6,
+        }
+    );
+    assert_eq!(
+        buffer.text_summary_for_range::<TextSummary, _>(7..22),
+        TextSummary {
+            len: 15,
+            len_utf16: OffsetUtf16(15),
+            lines: Point::new(2, 3),
+            first_line_chars: 4,
+            last_line_chars: 3,
+            last_line_len_utf16: 3,
+            longest_row: 1,
+            longest_row_chars: 6,
+        }
+    );
+}
+
+#[test]
+fn test_chars_at() {
+    let mut buffer = Buffer::new(0, 0, "".into());
+    buffer.edit([(0..0, "abcd\nefgh\nij")]);
+    buffer.edit([(12..12, "kl\nmno")]);
+    buffer.edit([(18..18, "\npqrs")]);
+    buffer.edit([(18..21, "\nPQ")]);
+
+    let chars = buffer.chars_at(Point::new(0, 0));
+    assert_eq!(chars.collect::<String>(), "abcd\nefgh\nijkl\nmno\nPQrs");
+
+    let chars = buffer.chars_at(Point::new(1, 0));
+    assert_eq!(chars.collect::<String>(), "efgh\nijkl\nmno\nPQrs");
+
+    let chars = buffer.chars_at(Point::new(2, 0));
+    assert_eq!(chars.collect::<String>(), "ijkl\nmno\nPQrs");
+
+    let chars = buffer.chars_at(Point::new(3, 0));
+    assert_eq!(chars.collect::<String>(), "mno\nPQrs");
+
+    let chars = buffer.chars_at(Point::new(4, 0));
+    assert_eq!(chars.collect::<String>(), "PQrs");
+
+    // Regression test:
+    let mut buffer = Buffer::new(0, 0, "".into());
+    buffer.edit([(0..0, "[workspace]\nmembers = [\n    \"xray_core\",\n    \"xray_server\",\n    \"xray_cli\",\n    \"xray_wasm\",\n]\n")]);
+    buffer.edit([(60..60, "\n")]);
+
+    let chars = buffer.chars_at(Point::new(6, 0));
+    assert_eq!(chars.collect::<String>(), "    \"xray_wasm\",\n]\n");
+}
+
+#[test]
+fn test_anchors() {
+    let mut buffer = Buffer::new(0, 0, "".into());
+    buffer.edit([(0..0, "abc")]);
+    let left_anchor = buffer.anchor_before(2);
+    let right_anchor = buffer.anchor_after(2);
+
+    buffer.edit([(1..1, "def\n")]);
+    assert_eq!(buffer.text(), "adef\nbc");
+    assert_eq!(left_anchor.to_offset(&buffer), 6);
+    assert_eq!(right_anchor.to_offset(&buffer), 6);
+    assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 });
+    assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 });
+
+    buffer.edit([(2..3, "")]);
+    assert_eq!(buffer.text(), "adf\nbc");
+    assert_eq!(left_anchor.to_offset(&buffer), 5);
+    assert_eq!(right_anchor.to_offset(&buffer), 5);
+    assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 });
+    assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 });
+
+    buffer.edit([(5..5, "ghi\n")]);
+    assert_eq!(buffer.text(), "adf\nbghi\nc");
+    assert_eq!(left_anchor.to_offset(&buffer), 5);
+    assert_eq!(right_anchor.to_offset(&buffer), 9);
+    assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 });
+    assert_eq!(right_anchor.to_point(&buffer), Point { row: 2, column: 0 });
+
+    buffer.edit([(7..9, "")]);
+    assert_eq!(buffer.text(), "adf\nbghc");
+    assert_eq!(left_anchor.to_offset(&buffer), 5);
+    assert_eq!(right_anchor.to_offset(&buffer), 7);
+    assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 },);
+    assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 3 });
+
+    // Ensure anchoring to a point is equivalent to anchoring to an offset.
+    assert_eq!(
+        buffer.anchor_before(Point { row: 0, column: 0 }),
+        buffer.anchor_before(0)
+    );
+    assert_eq!(
+        buffer.anchor_before(Point { row: 0, column: 1 }),
+        buffer.anchor_before(1)
+    );
+    assert_eq!(
+        buffer.anchor_before(Point { row: 0, column: 2 }),
+        buffer.anchor_before(2)
+    );
+    assert_eq!(
+        buffer.anchor_before(Point { row: 0, column: 3 }),
+        buffer.anchor_before(3)
+    );
+    assert_eq!(
+        buffer.anchor_before(Point { row: 1, column: 0 }),
+        buffer.anchor_before(4)
+    );
+    assert_eq!(
+        buffer.anchor_before(Point { row: 1, column: 1 }),
+        buffer.anchor_before(5)
+    );
+    assert_eq!(
+        buffer.anchor_before(Point { row: 1, column: 2 }),
+        buffer.anchor_before(6)
+    );
+    assert_eq!(
+        buffer.anchor_before(Point { row: 1, column: 3 }),
+        buffer.anchor_before(7)
+    );
+    assert_eq!(
+        buffer.anchor_before(Point { row: 1, column: 4 }),
+        buffer.anchor_before(8)
+    );
+
+    // Comparison between anchors.
+    let anchor_at_offset_0 = buffer.anchor_before(0);
+    let anchor_at_offset_1 = buffer.anchor_before(1);
+    let anchor_at_offset_2 = buffer.anchor_before(2);
+
+    assert_eq!(
+        anchor_at_offset_0.cmp(&anchor_at_offset_0, &buffer),
+        Ordering::Equal
+    );
+    assert_eq!(
+        anchor_at_offset_1.cmp(&anchor_at_offset_1, &buffer),
+        Ordering::Equal
+    );
+    assert_eq!(
+        anchor_at_offset_2.cmp(&anchor_at_offset_2, &buffer),
+        Ordering::Equal
+    );
+
+    assert_eq!(
+        anchor_at_offset_0.cmp(&anchor_at_offset_1, &buffer),
+        Ordering::Less
+    );
+    assert_eq!(
+        anchor_at_offset_1.cmp(&anchor_at_offset_2, &buffer),
+        Ordering::Less
+    );
+    assert_eq!(
+        anchor_at_offset_0.cmp(&anchor_at_offset_2, &buffer),
+        Ordering::Less
+    );
+
+    assert_eq!(
+        anchor_at_offset_1.cmp(&anchor_at_offset_0, &buffer),
+        Ordering::Greater
+    );
+    assert_eq!(
+        anchor_at_offset_2.cmp(&anchor_at_offset_1, &buffer),
+        Ordering::Greater
+    );
+    assert_eq!(
+        anchor_at_offset_2.cmp(&anchor_at_offset_0, &buffer),
+        Ordering::Greater
+    );
+}
+
+#[test]
+fn test_anchors_at_start_and_end() {
+    let mut buffer = Buffer::new(0, 0, "".into());
+    let before_start_anchor = buffer.anchor_before(0);
+    let after_end_anchor = buffer.anchor_after(0);
+
+    buffer.edit([(0..0, "abc")]);
+    assert_eq!(buffer.text(), "abc");
+    assert_eq!(before_start_anchor.to_offset(&buffer), 0);
+    assert_eq!(after_end_anchor.to_offset(&buffer), 3);
+
+    let after_start_anchor = buffer.anchor_after(0);
+    let before_end_anchor = buffer.anchor_before(3);
+
+    buffer.edit([(3..3, "def")]);
+    buffer.edit([(0..0, "ghi")]);
+    assert_eq!(buffer.text(), "ghiabcdef");
+    assert_eq!(before_start_anchor.to_offset(&buffer), 0);
+    assert_eq!(after_start_anchor.to_offset(&buffer), 3);
+    assert_eq!(before_end_anchor.to_offset(&buffer), 6);
+    assert_eq!(after_end_anchor.to_offset(&buffer), 9);
+}
+
+#[test]
+fn test_undo_redo() {
+    let mut buffer = Buffer::new(0, 0, "1234".into());
+    // Set group interval to zero so as to not group edits in the undo stack.
+    buffer.set_group_interval(Duration::from_secs(0));
+
+    buffer.edit([(1..1, "abx")]);
+    buffer.edit([(3..4, "yzef")]);
+    buffer.edit([(3..5, "cd")]);
+    assert_eq!(buffer.text(), "1abcdef234");
+
+    let entries = buffer.history.undo_stack.clone();
+    assert_eq!(entries.len(), 3);
+
+    buffer.undo_or_redo(entries[0].transaction.clone()).unwrap();
+    assert_eq!(buffer.text(), "1cdef234");
+    buffer.undo_or_redo(entries[0].transaction.clone()).unwrap();
+    assert_eq!(buffer.text(), "1abcdef234");
+
+    buffer.undo_or_redo(entries[1].transaction.clone()).unwrap();
+    assert_eq!(buffer.text(), "1abcdx234");
+    buffer.undo_or_redo(entries[2].transaction.clone()).unwrap();
+    assert_eq!(buffer.text(), "1abx234");
+    buffer.undo_or_redo(entries[1].transaction.clone()).unwrap();
+    assert_eq!(buffer.text(), "1abyzef234");
+    buffer.undo_or_redo(entries[2].transaction.clone()).unwrap();
+    assert_eq!(buffer.text(), "1abcdef234");
+
+    buffer.undo_or_redo(entries[2].transaction.clone()).unwrap();
+    assert_eq!(buffer.text(), "1abyzef234");
+    buffer.undo_or_redo(entries[0].transaction.clone()).unwrap();
+    assert_eq!(buffer.text(), "1yzef234");
+    buffer.undo_or_redo(entries[1].transaction.clone()).unwrap();
+    assert_eq!(buffer.text(), "1234");
+}
+
+#[test]
+fn test_history() {
+    let mut now = Instant::now();
+    let mut buffer = Buffer::new(0, 0, "123456".into());
+    buffer.set_group_interval(Duration::from_millis(300));
+
+    let transaction_1 = buffer.start_transaction_at(now).unwrap();
+    buffer.edit([(2..4, "cd")]);
+    buffer.end_transaction_at(now);
+    assert_eq!(buffer.text(), "12cd56");
+
+    buffer.start_transaction_at(now);
+    buffer.edit([(4..5, "e")]);
+    buffer.end_transaction_at(now).unwrap();
+    assert_eq!(buffer.text(), "12cde6");
+
+    now += buffer.transaction_group_interval() + Duration::from_millis(1);
+    buffer.start_transaction_at(now);
+    buffer.edit([(0..1, "a")]);
+    buffer.edit([(1..1, "b")]);
+    buffer.end_transaction_at(now).unwrap();
+    assert_eq!(buffer.text(), "ab2cde6");
+
+    // Last transaction happened past the group interval, undo it on its own.
+    buffer.undo();
+    assert_eq!(buffer.text(), "12cde6");
+
+    // First two transactions happened within the group interval, undo them together.
+    buffer.undo();
+    assert_eq!(buffer.text(), "123456");
+
+    // Redo the first two transactions together.
+    buffer.redo();
+    assert_eq!(buffer.text(), "12cde6");
+
+    // Redo the last transaction on its own.
+    buffer.redo();
+    assert_eq!(buffer.text(), "ab2cde6");
+
+    buffer.start_transaction_at(now);
+    assert!(buffer.end_transaction_at(now).is_none());
+    buffer.undo();
+    assert_eq!(buffer.text(), "12cde6");
+
+    // Redo stack gets cleared after performing an edit.
+    buffer.start_transaction_at(now);
+    buffer.edit([(0..0, "X")]);
+    buffer.end_transaction_at(now);
+    assert_eq!(buffer.text(), "X12cde6");
+    buffer.redo();
+    assert_eq!(buffer.text(), "X12cde6");
+    buffer.undo();
+    assert_eq!(buffer.text(), "12cde6");
+    buffer.undo();
+    assert_eq!(buffer.text(), "123456");
+
+    // Transactions can be grouped manually.
+    buffer.redo();
+    buffer.redo();
+    assert_eq!(buffer.text(), "X12cde6");
+    buffer.group_until_transaction(transaction_1);
+    buffer.undo();
+    assert_eq!(buffer.text(), "123456");
+    buffer.redo();
+    assert_eq!(buffer.text(), "X12cde6");
+}
+
+#[test]
+fn test_finalize_last_transaction() {
+    let now = Instant::now();
+    let mut buffer = Buffer::new(0, 0, "123456".into());
+
+    buffer.start_transaction_at(now);
+    buffer.edit([(2..4, "cd")]);
+    buffer.end_transaction_at(now);
+    assert_eq!(buffer.text(), "12cd56");
+
+    buffer.finalize_last_transaction();
+    buffer.start_transaction_at(now);
+    buffer.edit([(4..5, "e")]);
+    buffer.end_transaction_at(now).unwrap();
+    assert_eq!(buffer.text(), "12cde6");
+
+    buffer.start_transaction_at(now);
+    buffer.edit([(0..1, "a")]);
+    buffer.edit([(1..1, "b")]);
+    buffer.end_transaction_at(now).unwrap();
+    assert_eq!(buffer.text(), "ab2cde6");
+
+    buffer.undo();
+    assert_eq!(buffer.text(), "12cd56");
+
+    buffer.undo();
+    assert_eq!(buffer.text(), "123456");
+
+    buffer.redo();
+    assert_eq!(buffer.text(), "12cd56");
+
+    buffer.redo();
+    assert_eq!(buffer.text(), "ab2cde6");
+}
+
+#[test]
+fn test_edited_ranges_for_transaction() {
+    let now = Instant::now();
+    let mut buffer = Buffer::new(0, 0, "1234567".into());
+
+    buffer.start_transaction_at(now);
+    buffer.edit([(2..4, "cd")]);
+    buffer.edit([(6..6, "efg")]);
+    buffer.end_transaction_at(now);
+    assert_eq!(buffer.text(), "12cd56efg7");
+
+    let tx = buffer.finalize_last_transaction().unwrap().clone();
+    assert_eq!(
+        buffer
+            .edited_ranges_for_transaction::<usize>(&tx)
+            .collect::<Vec<_>>(),
+        [2..4, 6..9]
+    );
+
+    buffer.edit([(5..5, "hijk")]);
+    assert_eq!(buffer.text(), "12cd5hijk6efg7");
+    assert_eq!(
+        buffer
+            .edited_ranges_for_transaction::<usize>(&tx)
+            .collect::<Vec<_>>(),
+        [2..4, 10..13]
+    );
+
+    buffer.edit([(4..4, "l")]);
+    assert_eq!(buffer.text(), "12cdl5hijk6efg7");
+    assert_eq!(
+        buffer
+            .edited_ranges_for_transaction::<usize>(&tx)
+            .collect::<Vec<_>>(),
+        [2..4, 11..14]
+    );
+}
+
+#[test]
+fn test_concurrent_edits() {
+    let text = "abcdef";
+
+    let mut buffer1 = Buffer::new(1, 0, text.into());
+    let mut buffer2 = Buffer::new(2, 0, text.into());
+    let mut buffer3 = Buffer::new(3, 0, text.into());
+
+    let buf1_op = buffer1.edit([(1..2, "12")]);
+    assert_eq!(buffer1.text(), "a12cdef");
+    let buf2_op = buffer2.edit([(3..4, "34")]);
+    assert_eq!(buffer2.text(), "abc34ef");
+    let buf3_op = buffer3.edit([(5..6, "56")]);
+    assert_eq!(buffer3.text(), "abcde56");
+
+    buffer1.apply_op(buf2_op.clone()).unwrap();
+    buffer1.apply_op(buf3_op.clone()).unwrap();
+    buffer2.apply_op(buf1_op.clone()).unwrap();
+    buffer2.apply_op(buf3_op).unwrap();
+    buffer3.apply_op(buf1_op).unwrap();
+    buffer3.apply_op(buf2_op).unwrap();
+
+    assert_eq!(buffer1.text(), "a12c34e56");
+    assert_eq!(buffer2.text(), "a12c34e56");
+    assert_eq!(buffer3.text(), "a12c34e56");
+}
+
+#[gpui2::test(iterations = 100)]
+fn test_random_concurrent_edits(mut rng: StdRng) {
+    let peers = env::var("PEERS")
+        .map(|i| i.parse().expect("invalid `PEERS` variable"))
+        .unwrap_or(5);
+    let operations = env::var("OPERATIONS")
+        .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
+        .unwrap_or(10);
+
+    let base_text_len = rng.gen_range(0..10);
+    let base_text = RandomCharIter::new(&mut rng)
+        .take(base_text_len)
+        .collect::<String>();
+    let mut replica_ids = Vec::new();
+    let mut buffers = Vec::new();
+    let mut network = Network::new(rng.clone());
+
+    for i in 0..peers {
+        let mut buffer = Buffer::new(i as ReplicaId, 0, base_text.clone());
+        buffer.history.group_interval = Duration::from_millis(rng.gen_range(0..=200));
+        buffers.push(buffer);
+        replica_ids.push(i as u16);
+        network.add_peer(i as u16);
+    }
+
+    log::info!("initial text: {:?}", base_text);
+
+    let mut mutation_count = operations;
+    loop {
+        let replica_index = rng.gen_range(0..peers);
+        let replica_id = replica_ids[replica_index];
+        let buffer = &mut buffers[replica_index];
+        match rng.gen_range(0..=100) {
+            0..=50 if mutation_count != 0 => {
+                let op = buffer.randomly_edit(&mut rng, 5).1;
+                network.broadcast(buffer.replica_id, vec![op]);
+                log::info!("buffer {} text: {:?}", buffer.replica_id, buffer.text());
+                mutation_count -= 1;
+            }
+            51..=70 if mutation_count != 0 => {
+                let ops = buffer.randomly_undo_redo(&mut rng);
+                network.broadcast(buffer.replica_id, ops);
+                mutation_count -= 1;
+            }
+            71..=100 if network.has_unreceived(replica_id) => {
+                let ops = network.receive(replica_id);
+                if !ops.is_empty() {
+                    log::info!(
+                        "peer {} applying {} ops from the network.",
+                        replica_id,
+                        ops.len()
+                    );
+                    buffer.apply_ops(ops).unwrap();
+                }
+            }
+            _ => {}
+        }
+        buffer.check_invariants();
+
+        if mutation_count == 0 && network.is_idle() {
+            break;
+        }
+    }
+
+    let first_buffer = &buffers[0];
+    for buffer in &buffers[1..] {
+        assert_eq!(
+            buffer.text(),
+            first_buffer.text(),
+            "Replica {} text != Replica 0 text",
+            buffer.replica_id
+        );
+        buffer.check_invariants();
+    }
+}

crates/text2/src/text2.rs 🔗

@@ -0,0 +1,2682 @@
+mod anchor;
+pub mod locator;
+#[cfg(any(test, feature = "test-support"))]
+pub mod network;
+pub mod operation_queue;
+mod patch;
+mod selection;
+pub mod subscription;
+#[cfg(test)]
+mod tests;
+mod undo_map;
+
+pub use anchor::*;
+use anyhow::{anyhow, Result};
+pub use clock::ReplicaId;
+use collections::{HashMap, HashSet};
+use locator::Locator;
+use operation_queue::OperationQueue;
+pub use patch::Patch;
+use postage::{oneshot, prelude::*};
+
+use lazy_static::lazy_static;
+use regex::Regex;
+pub use rope::*;
+pub use selection::*;
+use std::{
+    borrow::Cow,
+    cmp::{self, Ordering, Reverse},
+    future::Future,
+    iter::Iterator,
+    ops::{self, Deref, Range, Sub},
+    str,
+    sync::Arc,
+    time::{Duration, Instant},
+};
+pub use subscription::*;
+pub use sum_tree::Bias;
+use sum_tree::{FilterCursor, SumTree, TreeMap};
+use undo_map::UndoMap;
+use util::ResultExt;
+
+#[cfg(any(test, feature = "test-support"))]
+use util::RandomCharIter;
+
+lazy_static! {
+    static ref LINE_SEPARATORS_REGEX: Regex = Regex::new("\r\n|\r|\u{2028}|\u{2029}").unwrap();
+}
+
+pub type TransactionId = clock::Lamport;
+
+pub struct Buffer {
+    snapshot: BufferSnapshot,
+    history: History,
+    deferred_ops: OperationQueue<Operation>,
+    deferred_replicas: HashSet<ReplicaId>,
+    pub lamport_clock: clock::Lamport,
+    subscriptions: Topic,
+    edit_id_resolvers: HashMap<clock::Lamport, Vec<oneshot::Sender<()>>>,
+    wait_for_version_txs: Vec<(clock::Global, oneshot::Sender<()>)>,
+}
+
+#[derive(Clone)]
+pub struct BufferSnapshot {
+    replica_id: ReplicaId,
+    remote_id: u64,
+    visible_text: Rope,
+    deleted_text: Rope,
+    line_ending: LineEnding,
+    undo_map: UndoMap,
+    fragments: SumTree<Fragment>,
+    insertions: SumTree<InsertionFragment>,
+    pub version: clock::Global,
+}
+
+#[derive(Clone, Debug)]
+pub struct HistoryEntry {
+    transaction: Transaction,
+    first_edit_at: Instant,
+    last_edit_at: Instant,
+    suppress_grouping: bool,
+}
+
+#[derive(Clone, Debug)]
+pub struct Transaction {
+    pub id: TransactionId,
+    pub edit_ids: Vec<clock::Lamport>,
+    pub start: clock::Global,
+}
+
+impl HistoryEntry {
+    pub fn transaction_id(&self) -> TransactionId {
+        self.transaction.id
+    }
+}
+
+struct History {
+    base_text: Rope,
+    operations: TreeMap<clock::Lamport, Operation>,
+    insertion_slices: HashMap<clock::Lamport, Vec<InsertionSlice>>,
+    undo_stack: Vec<HistoryEntry>,
+    redo_stack: Vec<HistoryEntry>,
+    transaction_depth: usize,
+    group_interval: Duration,
+}
+
+#[derive(Clone, Debug)]
+struct InsertionSlice {
+    insertion_id: clock::Lamport,
+    range: Range<usize>,
+}
+
+impl History {
+    pub fn new(base_text: Rope) -> Self {
+        Self {
+            base_text,
+            operations: Default::default(),
+            insertion_slices: Default::default(),
+            undo_stack: Vec::new(),
+            redo_stack: Vec::new(),
+            transaction_depth: 0,
+            // Don't group transactions in tests unless we opt in, because it's a footgun.
+            #[cfg(any(test, feature = "test-support"))]
+            group_interval: Duration::ZERO,
+            #[cfg(not(any(test, feature = "test-support")))]
+            group_interval: Duration::from_millis(300),
+        }
+    }
+
+    fn push(&mut self, op: Operation) {
+        self.operations.insert(op.timestamp(), op);
+    }
+
+    fn start_transaction(
+        &mut self,
+        start: clock::Global,
+        now: Instant,
+        clock: &mut clock::Lamport,
+    ) -> Option<TransactionId> {
+        self.transaction_depth += 1;
+        if self.transaction_depth == 1 {
+            let id = clock.tick();
+            self.undo_stack.push(HistoryEntry {
+                transaction: Transaction {
+                    id,
+                    start,
+                    edit_ids: Default::default(),
+                },
+                first_edit_at: now,
+                last_edit_at: now,
+                suppress_grouping: false,
+            });
+            Some(id)
+        } else {
+            None
+        }
+    }
+
+    fn end_transaction(&mut self, now: Instant) -> Option<&HistoryEntry> {
+        assert_ne!(self.transaction_depth, 0);
+        self.transaction_depth -= 1;
+        if self.transaction_depth == 0 {
+            if self
+                .undo_stack
+                .last()
+                .unwrap()
+                .transaction
+                .edit_ids
+                .is_empty()
+            {
+                self.undo_stack.pop();
+                None
+            } else {
+                self.redo_stack.clear();
+                let entry = self.undo_stack.last_mut().unwrap();
+                entry.last_edit_at = now;
+                Some(entry)
+            }
+        } else {
+            None
+        }
+    }
+
+    fn group(&mut self) -> Option<TransactionId> {
+        let mut count = 0;
+        let mut entries = self.undo_stack.iter();
+        if let Some(mut entry) = entries.next_back() {
+            while let Some(prev_entry) = entries.next_back() {
+                if !prev_entry.suppress_grouping
+                    && entry.first_edit_at - prev_entry.last_edit_at <= self.group_interval
+                {
+                    entry = prev_entry;
+                    count += 1;
+                } else {
+                    break;
+                }
+            }
+        }
+        self.group_trailing(count)
+    }
+
+    fn group_until(&mut self, transaction_id: TransactionId) {
+        let mut count = 0;
+        for entry in self.undo_stack.iter().rev() {
+            if entry.transaction_id() == transaction_id {
+                self.group_trailing(count);
+                break;
+            } else if entry.suppress_grouping {
+                break;
+            } else {
+                count += 1;
+            }
+        }
+    }
+
+    fn group_trailing(&mut self, n: usize) -> Option<TransactionId> {
+        let new_len = self.undo_stack.len() - n;
+        let (entries_to_keep, entries_to_merge) = self.undo_stack.split_at_mut(new_len);
+        if let Some(last_entry) = entries_to_keep.last_mut() {
+            for entry in &*entries_to_merge {
+                for edit_id in &entry.transaction.edit_ids {
+                    last_entry.transaction.edit_ids.push(*edit_id);
+                }
+            }
+
+            if let Some(entry) = entries_to_merge.last_mut() {
+                last_entry.last_edit_at = entry.last_edit_at;
+            }
+        }
+
+        self.undo_stack.truncate(new_len);
+        self.undo_stack.last().map(|e| e.transaction.id)
+    }
+
+    fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
+        self.undo_stack.last_mut().map(|entry| {
+            entry.suppress_grouping = true;
+            &entry.transaction
+        })
+    }
+
+    fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
+        assert_eq!(self.transaction_depth, 0);
+        self.undo_stack.push(HistoryEntry {
+            transaction,
+            first_edit_at: now,
+            last_edit_at: now,
+            suppress_grouping: false,
+        });
+        self.redo_stack.clear();
+    }
+
+    fn push_undo(&mut self, op_id: clock::Lamport) {
+        assert_ne!(self.transaction_depth, 0);
+        if let Some(Operation::Edit(_)) = self.operations.get(&op_id) {
+            let last_transaction = self.undo_stack.last_mut().unwrap();
+            last_transaction.transaction.edit_ids.push(op_id);
+        }
+    }
+
+    fn pop_undo(&mut self) -> Option<&HistoryEntry> {
+        assert_eq!(self.transaction_depth, 0);
+        if let Some(entry) = self.undo_stack.pop() {
+            self.redo_stack.push(entry);
+            self.redo_stack.last()
+        } else {
+            None
+        }
+    }
+
+    fn remove_from_undo(&mut self, transaction_id: TransactionId) -> Option<&HistoryEntry> {
+        assert_eq!(self.transaction_depth, 0);
+
+        let entry_ix = self
+            .undo_stack
+            .iter()
+            .rposition(|entry| entry.transaction.id == transaction_id)?;
+        let entry = self.undo_stack.remove(entry_ix);
+        self.redo_stack.push(entry);
+        self.redo_stack.last()
+    }
+
+    fn remove_from_undo_until(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
+        assert_eq!(self.transaction_depth, 0);
+
+        let redo_stack_start_len = self.redo_stack.len();
+        if let Some(entry_ix) = self
+            .undo_stack
+            .iter()
+            .rposition(|entry| entry.transaction.id == transaction_id)
+        {
+            self.redo_stack
+                .extend(self.undo_stack.drain(entry_ix..).rev());
+        }
+        &self.redo_stack[redo_stack_start_len..]
+    }
+
+    fn forget(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
+        assert_eq!(self.transaction_depth, 0);
+        if let Some(entry_ix) = self
+            .undo_stack
+            .iter()
+            .rposition(|entry| entry.transaction.id == transaction_id)
+        {
+            Some(self.undo_stack.remove(entry_ix).transaction)
+        } else if let Some(entry_ix) = self
+            .redo_stack
+            .iter()
+            .rposition(|entry| entry.transaction.id == transaction_id)
+        {
+            Some(self.redo_stack.remove(entry_ix).transaction)
+        } else {
+            None
+        }
+    }
+
+    fn transaction_mut(&mut self, transaction_id: TransactionId) -> Option<&mut Transaction> {
+        let entry = self
+            .undo_stack
+            .iter_mut()
+            .rfind(|entry| entry.transaction.id == transaction_id)
+            .or_else(|| {
+                self.redo_stack
+                    .iter_mut()
+                    .rfind(|entry| entry.transaction.id == transaction_id)
+            })?;
+        Some(&mut entry.transaction)
+    }
+
+    fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
+        if let Some(transaction) = self.forget(transaction) {
+            if let Some(destination) = self.transaction_mut(destination) {
+                destination.edit_ids.extend(transaction.edit_ids);
+            }
+        }
+    }
+
+    fn pop_redo(&mut self) -> Option<&HistoryEntry> {
+        assert_eq!(self.transaction_depth, 0);
+        if let Some(entry) = self.redo_stack.pop() {
+            self.undo_stack.push(entry);
+            self.undo_stack.last()
+        } else {
+            None
+        }
+    }
+
+    fn remove_from_redo(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
+        assert_eq!(self.transaction_depth, 0);
+
+        let undo_stack_start_len = self.undo_stack.len();
+        if let Some(entry_ix) = self
+            .redo_stack
+            .iter()
+            .rposition(|entry| entry.transaction.id == transaction_id)
+        {
+            self.undo_stack
+                .extend(self.redo_stack.drain(entry_ix..).rev());
+        }
+        &self.undo_stack[undo_stack_start_len..]
+    }
+}
+
+struct Edits<'a, D: TextDimension, F: FnMut(&FragmentSummary) -> bool> {
+    visible_cursor: rope::Cursor<'a>,
+    deleted_cursor: rope::Cursor<'a>,
+    fragments_cursor: Option<FilterCursor<'a, F, Fragment, FragmentTextSummary>>,
+    undos: &'a UndoMap,
+    since: &'a clock::Global,
+    old_end: D,
+    new_end: D,
+    range: Range<(&'a Locator, usize)>,
+    buffer_id: u64,
+}
+
+#[derive(Clone, Debug, Default, Eq, PartialEq)]
+pub struct Edit<D> {
+    pub old: Range<D>,
+    pub new: Range<D>,
+}
+
+impl<D> Edit<D>
+where
+    D: Sub<D, Output = D> + PartialEq + Copy,
+{
+    pub fn old_len(&self) -> D {
+        self.old.end - self.old.start
+    }
+
+    pub fn new_len(&self) -> D {
+        self.new.end - self.new.start
+    }
+
+    pub fn is_empty(&self) -> bool {
+        self.old.start == self.old.end && self.new.start == self.new.end
+    }
+}
+
+impl<D1, D2> Edit<(D1, D2)> {
+    pub fn flatten(self) -> (Edit<D1>, Edit<D2>) {
+        (
+            Edit {
+                old: self.old.start.0..self.old.end.0,
+                new: self.new.start.0..self.new.end.0,
+            },
+            Edit {
+                old: self.old.start.1..self.old.end.1,
+                new: self.new.start.1..self.new.end.1,
+            },
+        )
+    }
+}
+
+#[derive(Eq, PartialEq, Clone, Debug)]
+pub struct Fragment {
+    pub id: Locator,
+    pub timestamp: clock::Lamport,
+    pub insertion_offset: usize,
+    pub len: usize,
+    pub visible: bool,
+    pub deletions: HashSet<clock::Lamport>,
+    pub max_undos: clock::Global,
+}
+
+#[derive(Eq, PartialEq, Clone, Debug)]
+pub struct FragmentSummary {
+    text: FragmentTextSummary,
+    max_id: Locator,
+    max_version: clock::Global,
+    min_insertion_version: clock::Global,
+    max_insertion_version: clock::Global,
+}
+
+#[derive(Copy, Default, Clone, Debug, PartialEq, Eq)]
+struct FragmentTextSummary {
+    visible: usize,
+    deleted: usize,
+}
+
+impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary {
+    fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
+        self.visible += summary.text.visible;
+        self.deleted += summary.text.deleted;
+    }
+}
+
+#[derive(Eq, PartialEq, Clone, Debug)]
+struct InsertionFragment {
+    timestamp: clock::Lamport,
+    split_offset: usize,
+    fragment_id: Locator,
+}
+
+#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord)]
+struct InsertionFragmentKey {
+    timestamp: clock::Lamport,
+    split_offset: usize,
+}
+
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub enum Operation {
+    Edit(EditOperation),
+    Undo(UndoOperation),
+}
+
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct EditOperation {
+    pub timestamp: clock::Lamport,
+    pub version: clock::Global,
+    pub ranges: Vec<Range<FullOffset>>,
+    pub new_text: Vec<Arc<str>>,
+}
+
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct UndoOperation {
+    pub timestamp: clock::Lamport,
+    pub version: clock::Global,
+    pub counts: HashMap<clock::Lamport, u32>,
+}
+
+impl Buffer {
+    pub fn new(replica_id: u16, remote_id: u64, mut base_text: String) -> Buffer {
+        let line_ending = LineEnding::detect(&base_text);
+        LineEnding::normalize(&mut base_text);
+
+        let history = History::new(Rope::from(base_text.as_ref()));
+        let mut fragments = SumTree::new();
+        let mut insertions = SumTree::new();
+
+        let mut lamport_clock = clock::Lamport::new(replica_id);
+        let mut version = clock::Global::new();
+
+        let visible_text = history.base_text.clone();
+        if !visible_text.is_empty() {
+            let insertion_timestamp = clock::Lamport {
+                replica_id: 0,
+                value: 1,
+            };
+            lamport_clock.observe(insertion_timestamp);
+            version.observe(insertion_timestamp);
+            let fragment_id = Locator::between(&Locator::min(), &Locator::max());
+            let fragment = Fragment {
+                id: fragment_id,
+                timestamp: insertion_timestamp,
+                insertion_offset: 0,
+                len: visible_text.len(),
+                visible: true,
+                deletions: Default::default(),
+                max_undos: Default::default(),
+            };
+            insertions.push(InsertionFragment::new(&fragment), &());
+            fragments.push(fragment, &None);
+        }
+
+        Buffer {
+            snapshot: BufferSnapshot {
+                replica_id,
+                remote_id,
+                visible_text,
+                deleted_text: Rope::new(),
+                line_ending,
+                fragments,
+                insertions,
+                version,
+                undo_map: Default::default(),
+            },
+            history,
+            deferred_ops: OperationQueue::new(),
+            deferred_replicas: HashSet::default(),
+            lamport_clock,
+            subscriptions: Default::default(),
+            edit_id_resolvers: Default::default(),
+            wait_for_version_txs: Default::default(),
+        }
+    }
+
+    pub fn version(&self) -> clock::Global {
+        self.version.clone()
+    }
+
+    pub fn snapshot(&self) -> BufferSnapshot {
+        self.snapshot.clone()
+    }
+
+    pub fn replica_id(&self) -> ReplicaId {
+        self.lamport_clock.replica_id
+    }
+
+    pub fn remote_id(&self) -> u64 {
+        self.remote_id
+    }
+
+    pub fn deferred_ops_len(&self) -> usize {
+        self.deferred_ops.len()
+    }
+
+    pub fn transaction_group_interval(&self) -> Duration {
+        self.history.group_interval
+    }
+
+    pub fn edit<R, I, S, T>(&mut self, edits: R) -> Operation
+    where
+        R: IntoIterator<IntoIter = I>,
+        I: ExactSizeIterator<Item = (Range<S>, T)>,
+        S: ToOffset,
+        T: Into<Arc<str>>,
+    {
+        let edits = edits
+            .into_iter()
+            .map(|(range, new_text)| (range, new_text.into()));
+
+        self.start_transaction();
+        let timestamp = self.lamport_clock.tick();
+        let operation = Operation::Edit(self.apply_local_edit(edits, timestamp));
+
+        self.history.push(operation.clone());
+        self.history.push_undo(operation.timestamp());
+        self.snapshot.version.observe(operation.timestamp());
+        self.end_transaction();
+        operation
+    }
+
+    fn apply_local_edit<S: ToOffset, T: Into<Arc<str>>>(
+        &mut self,
+        edits: impl ExactSizeIterator<Item = (Range<S>, T)>,
+        timestamp: clock::Lamport,
+    ) -> EditOperation {
+        let mut edits_patch = Patch::default();
+        let mut edit_op = EditOperation {
+            timestamp,
+            version: self.version(),
+            ranges: Vec::with_capacity(edits.len()),
+            new_text: Vec::with_capacity(edits.len()),
+        };
+        let mut new_insertions = Vec::new();
+        let mut insertion_offset = 0;
+        let mut insertion_slices = Vec::new();
+
+        let mut edits = edits
+            .map(|(range, new_text)| (range.to_offset(&*self), new_text))
+            .peekable();
+
+        let mut new_ropes =
+            RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
+        let mut old_fragments = self.fragments.cursor::<FragmentTextSummary>();
+        let mut new_fragments =
+            old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right, &None);
+        new_ropes.append(new_fragments.summary().text);
+
+        let mut fragment_start = old_fragments.start().visible;
+        for (range, new_text) in edits {
+            let new_text = LineEnding::normalize_arc(new_text.into());
+            let fragment_end = old_fragments.end(&None).visible;
+
+            // If the current fragment ends before this range, then jump ahead to the first fragment
+            // that extends past the start of this range, reusing any intervening fragments.
+            if fragment_end < range.start {
+                // If the current fragment has been partially consumed, then consume the rest of it
+                // and advance to the next fragment before slicing.
+                if fragment_start > old_fragments.start().visible {
+                    if fragment_end > fragment_start {
+                        let mut suffix = old_fragments.item().unwrap().clone();
+                        suffix.len = fragment_end - fragment_start;
+                        suffix.insertion_offset += fragment_start - old_fragments.start().visible;
+                        new_insertions.push(InsertionFragment::insert_new(&suffix));
+                        new_ropes.push_fragment(&suffix, suffix.visible);
+                        new_fragments.push(suffix, &None);
+                    }
+                    old_fragments.next(&None);
+                }
+
+                let slice = old_fragments.slice(&range.start, Bias::Right, &None);
+                new_ropes.append(slice.summary().text);
+                new_fragments.append(slice, &None);
+                fragment_start = old_fragments.start().visible;
+            }
+
+            let full_range_start = FullOffset(range.start + old_fragments.start().deleted);
+
+            // Preserve any portion of the current fragment that precedes this range.
+            if fragment_start < range.start {
+                let mut prefix = old_fragments.item().unwrap().clone();
+                prefix.len = range.start - fragment_start;
+                prefix.insertion_offset += fragment_start - old_fragments.start().visible;
+                prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
+                new_insertions.push(InsertionFragment::insert_new(&prefix));
+                new_ropes.push_fragment(&prefix, prefix.visible);
+                new_fragments.push(prefix, &None);
+                fragment_start = range.start;
+            }
+
+            // Insert the new text before any existing fragments within the range.
+            if !new_text.is_empty() {
+                let new_start = new_fragments.summary().text.visible;
+
+                let fragment = Fragment {
+                    id: Locator::between(
+                        &new_fragments.summary().max_id,
+                        old_fragments
+                            .item()
+                            .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
+                    ),
+                    timestamp,
+                    insertion_offset,
+                    len: new_text.len(),
+                    deletions: Default::default(),
+                    max_undos: Default::default(),
+                    visible: true,
+                };
+                edits_patch.push(Edit {
+                    old: fragment_start..fragment_start,
+                    new: new_start..new_start + new_text.len(),
+                });
+                insertion_slices.push(fragment.insertion_slice());
+                new_insertions.push(InsertionFragment::insert_new(&fragment));
+                new_ropes.push_str(new_text.as_ref());
+                new_fragments.push(fragment, &None);
+                insertion_offset += new_text.len();
+            }
+
+            // Advance through every fragment that intersects this range, marking the intersecting
+            // portions as deleted.
+            while fragment_start < range.end {
+                let fragment = old_fragments.item().unwrap();
+                let fragment_end = old_fragments.end(&None).visible;
+                let mut intersection = fragment.clone();
+                let intersection_end = cmp::min(range.end, fragment_end);
+                if fragment.visible {
+                    intersection.len = intersection_end - fragment_start;
+                    intersection.insertion_offset += fragment_start - old_fragments.start().visible;
+                    intersection.id =
+                        Locator::between(&new_fragments.summary().max_id, &intersection.id);
+                    intersection.deletions.insert(timestamp);
+                    intersection.visible = false;
+                }
+                if intersection.len > 0 {
+                    if fragment.visible && !intersection.visible {
+                        let new_start = new_fragments.summary().text.visible;
+                        edits_patch.push(Edit {
+                            old: fragment_start..intersection_end,
+                            new: new_start..new_start,
+                        });
+                        insertion_slices.push(intersection.insertion_slice());
+                    }
+                    new_insertions.push(InsertionFragment::insert_new(&intersection));
+                    new_ropes.push_fragment(&intersection, fragment.visible);
+                    new_fragments.push(intersection, &None);
+                    fragment_start = intersection_end;
+                }
+                if fragment_end <= range.end {
+                    old_fragments.next(&None);
+                }
+            }
+
+            let full_range_end = FullOffset(range.end + old_fragments.start().deleted);
+            edit_op.ranges.push(full_range_start..full_range_end);
+            edit_op.new_text.push(new_text);
+        }
+
+        // If the current fragment has been partially consumed, then consume the rest of it
+        // and advance to the next fragment before slicing.
+        if fragment_start > old_fragments.start().visible {
+            let fragment_end = old_fragments.end(&None).visible;
+            if fragment_end > fragment_start {
+                let mut suffix = old_fragments.item().unwrap().clone();
+                suffix.len = fragment_end - fragment_start;
+                suffix.insertion_offset += fragment_start - old_fragments.start().visible;
+                new_insertions.push(InsertionFragment::insert_new(&suffix));
+                new_ropes.push_fragment(&suffix, suffix.visible);
+                new_fragments.push(suffix, &None);
+            }
+            old_fragments.next(&None);
+        }
+
+        let suffix = old_fragments.suffix(&None);
+        new_ropes.append(suffix.summary().text);
+        new_fragments.append(suffix, &None);
+        let (visible_text, deleted_text) = new_ropes.finish();
+        drop(old_fragments);
+
+        self.snapshot.fragments = new_fragments;
+        self.snapshot.insertions.edit(new_insertions, &());
+        self.snapshot.visible_text = visible_text;
+        self.snapshot.deleted_text = deleted_text;
+        self.subscriptions.publish_mut(&edits_patch);
+        self.history
+            .insertion_slices
+            .insert(timestamp, insertion_slices);
+        edit_op
+    }
+
+    pub fn set_line_ending(&mut self, line_ending: LineEnding) {
+        self.snapshot.line_ending = line_ending;
+    }
+
+    pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I) -> Result<()> {
+        let mut deferred_ops = Vec::new();
+        for op in ops {
+            self.history.push(op.clone());
+            if self.can_apply_op(&op) {
+                self.apply_op(op)?;
+            } else {
+                self.deferred_replicas.insert(op.replica_id());
+                deferred_ops.push(op);
+            }
+        }
+        self.deferred_ops.insert(deferred_ops);
+        self.flush_deferred_ops()?;
+        Ok(())
+    }
+
+    fn apply_op(&mut self, op: Operation) -> Result<()> {
+        match op {
+            Operation::Edit(edit) => {
+                if !self.version.observed(edit.timestamp) {
+                    self.apply_remote_edit(
+                        &edit.version,
+                        &edit.ranges,
+                        &edit.new_text,
+                        edit.timestamp,
+                    );
+                    self.snapshot.version.observe(edit.timestamp);
+                    self.lamport_clock.observe(edit.timestamp);
+                    self.resolve_edit(edit.timestamp);
+                }
+            }
+            Operation::Undo(undo) => {
+                if !self.version.observed(undo.timestamp) {
+                    self.apply_undo(&undo)?;
+                    self.snapshot.version.observe(undo.timestamp);
+                    self.lamport_clock.observe(undo.timestamp);
+                }
+            }
+        }
+        self.wait_for_version_txs.retain_mut(|(version, tx)| {
+            if self.snapshot.version().observed_all(version) {
+                tx.try_send(()).ok();
+                false
+            } else {
+                true
+            }
+        });
+        Ok(())
+    }
+
+    fn apply_remote_edit(
+        &mut self,
+        version: &clock::Global,
+        ranges: &[Range<FullOffset>],
+        new_text: &[Arc<str>],
+        timestamp: clock::Lamport,
+    ) {
+        if ranges.is_empty() {
+            return;
+        }
+
+        let edits = ranges.iter().zip(new_text.iter());
+        let mut edits_patch = Patch::default();
+        let mut insertion_slices = Vec::new();
+        let cx = Some(version.clone());
+        let mut new_insertions = Vec::new();
+        let mut insertion_offset = 0;
+        let mut new_ropes =
+            RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
+        let mut old_fragments = self.fragments.cursor::<(VersionedFullOffset, usize)>();
+        let mut new_fragments = old_fragments.slice(
+            &VersionedFullOffset::Offset(ranges[0].start),
+            Bias::Left,
+            &cx,
+        );
+        new_ropes.append(new_fragments.summary().text);
+
+        let mut fragment_start = old_fragments.start().0.full_offset();
+        for (range, new_text) in edits {
+            let fragment_end = old_fragments.end(&cx).0.full_offset();
+
+            // If the current fragment ends before this range, then jump ahead to the first fragment
+            // that extends past the start of this range, reusing any intervening fragments.
+            if fragment_end < range.start {
+                // If the current fragment has been partially consumed, then consume the rest of it
+                // and advance to the next fragment before slicing.
+                if fragment_start > old_fragments.start().0.full_offset() {
+                    if fragment_end > fragment_start {
+                        let mut suffix = old_fragments.item().unwrap().clone();
+                        suffix.len = fragment_end.0 - fragment_start.0;
+                        suffix.insertion_offset +=
+                            fragment_start - old_fragments.start().0.full_offset();
+                        new_insertions.push(InsertionFragment::insert_new(&suffix));
+                        new_ropes.push_fragment(&suffix, suffix.visible);
+                        new_fragments.push(suffix, &None);
+                    }
+                    old_fragments.next(&cx);
+                }
+
+                let slice =
+                    old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left, &cx);
+                new_ropes.append(slice.summary().text);
+                new_fragments.append(slice, &None);
+                fragment_start = old_fragments.start().0.full_offset();
+            }
+
+            // If we are at the end of a non-concurrent fragment, advance to the next one.
+            let fragment_end = old_fragments.end(&cx).0.full_offset();
+            if fragment_end == range.start && fragment_end > fragment_start {
+                let mut fragment = old_fragments.item().unwrap().clone();
+                fragment.len = fragment_end.0 - fragment_start.0;
+                fragment.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
+                new_insertions.push(InsertionFragment::insert_new(&fragment));
+                new_ropes.push_fragment(&fragment, fragment.visible);
+                new_fragments.push(fragment, &None);
+                old_fragments.next(&cx);
+                fragment_start = old_fragments.start().0.full_offset();
+            }
+
+            // Skip over insertions that are concurrent to this edit, but have a lower lamport
+            // timestamp.
+            while let Some(fragment) = old_fragments.item() {
+                if fragment_start == range.start && fragment.timestamp > timestamp {
+                    new_ropes.push_fragment(fragment, fragment.visible);
+                    new_fragments.push(fragment.clone(), &None);
+                    old_fragments.next(&cx);
+                    debug_assert_eq!(fragment_start, range.start);
+                } else {
+                    break;
+                }
+            }
+            debug_assert!(fragment_start <= range.start);
+
+            // Preserve any portion of the current fragment that precedes this range.
+            if fragment_start < range.start {
+                let mut prefix = old_fragments.item().unwrap().clone();
+                prefix.len = range.start.0 - fragment_start.0;
+                prefix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
+                prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
+                new_insertions.push(InsertionFragment::insert_new(&prefix));
+                fragment_start = range.start;
+                new_ropes.push_fragment(&prefix, prefix.visible);
+                new_fragments.push(prefix, &None);
+            }
+
+            // Insert the new text before any existing fragments within the range.
+            if !new_text.is_empty() {
+                let mut old_start = old_fragments.start().1;
+                if old_fragments.item().map_or(false, |f| f.visible) {
+                    old_start += fragment_start.0 - old_fragments.start().0.full_offset().0;
+                }
+                let new_start = new_fragments.summary().text.visible;
+                let fragment = Fragment {
+                    id: Locator::between(
+                        &new_fragments.summary().max_id,
+                        old_fragments
+                            .item()
+                            .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
+                    ),
+                    timestamp,
+                    insertion_offset,
+                    len: new_text.len(),
+                    deletions: Default::default(),
+                    max_undos: Default::default(),
+                    visible: true,
+                };
+                edits_patch.push(Edit {
+                    old: old_start..old_start,
+                    new: new_start..new_start + new_text.len(),
+                });
+                insertion_slices.push(fragment.insertion_slice());
+                new_insertions.push(InsertionFragment::insert_new(&fragment));
+                new_ropes.push_str(new_text);
+                new_fragments.push(fragment, &None);
+                insertion_offset += new_text.len();
+            }
+
+            // Advance through every fragment that intersects this range, marking the intersecting
+            // portions as deleted.
+            while fragment_start < range.end {
+                let fragment = old_fragments.item().unwrap();
+                let fragment_end = old_fragments.end(&cx).0.full_offset();
+                let mut intersection = fragment.clone();
+                let intersection_end = cmp::min(range.end, fragment_end);
+                if fragment.was_visible(version, &self.undo_map) {
+                    intersection.len = intersection_end.0 - fragment_start.0;
+                    intersection.insertion_offset +=
+                        fragment_start - old_fragments.start().0.full_offset();
+                    intersection.id =
+                        Locator::between(&new_fragments.summary().max_id, &intersection.id);
+                    intersection.deletions.insert(timestamp);
+                    intersection.visible = false;
+                    insertion_slices.push(intersection.insertion_slice());
+                }
+                if intersection.len > 0 {
+                    if fragment.visible && !intersection.visible {
+                        let old_start = old_fragments.start().1
+                            + (fragment_start.0 - old_fragments.start().0.full_offset().0);
+                        let new_start = new_fragments.summary().text.visible;
+                        edits_patch.push(Edit {
+                            old: old_start..old_start + intersection.len,
+                            new: new_start..new_start,
+                        });
+                    }
+                    new_insertions.push(InsertionFragment::insert_new(&intersection));
+                    new_ropes.push_fragment(&intersection, fragment.visible);
+                    new_fragments.push(intersection, &None);
+                    fragment_start = intersection_end;
+                }
+                if fragment_end <= range.end {
+                    old_fragments.next(&cx);
+                }
+            }
+        }
+
+        // If the current fragment has been partially consumed, then consume the rest of it
+        // and advance to the next fragment before slicing.
+        if fragment_start > old_fragments.start().0.full_offset() {
+            let fragment_end = old_fragments.end(&cx).0.full_offset();
+            if fragment_end > fragment_start {
+                let mut suffix = old_fragments.item().unwrap().clone();
+                suffix.len = fragment_end.0 - fragment_start.0;
+                suffix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
+                new_insertions.push(InsertionFragment::insert_new(&suffix));
+                new_ropes.push_fragment(&suffix, suffix.visible);
+                new_fragments.push(suffix, &None);
+            }
+            old_fragments.next(&cx);
+        }
+
+        let suffix = old_fragments.suffix(&cx);
+        new_ropes.append(suffix.summary().text);
+        new_fragments.append(suffix, &None);
+        let (visible_text, deleted_text) = new_ropes.finish();
+        drop(old_fragments);
+
+        self.snapshot.fragments = new_fragments;
+        self.snapshot.visible_text = visible_text;
+        self.snapshot.deleted_text = deleted_text;
+        self.snapshot.insertions.edit(new_insertions, &());
+        self.history
+            .insertion_slices
+            .insert(timestamp, insertion_slices);
+        self.subscriptions.publish_mut(&edits_patch)
+    }
+
+    fn fragment_ids_for_edits<'a>(
+        &'a self,
+        edit_ids: impl Iterator<Item = &'a clock::Lamport>,
+    ) -> Vec<&'a Locator> {
+        // Get all of the insertion slices changed by the given edits.
+        let mut insertion_slices = Vec::new();
+        for edit_id in edit_ids {
+            if let Some(slices) = self.history.insertion_slices.get(edit_id) {
+                insertion_slices.extend_from_slice(slices)
+            }
+        }
+        insertion_slices
+            .sort_unstable_by_key(|s| (s.insertion_id, s.range.start, Reverse(s.range.end)));
+
+        // Get all of the fragments corresponding to these insertion slices.
+        let mut fragment_ids = Vec::new();
+        let mut insertions_cursor = self.insertions.cursor::<InsertionFragmentKey>();
+        for insertion_slice in &insertion_slices {
+            if insertion_slice.insertion_id != insertions_cursor.start().timestamp
+                || insertion_slice.range.start > insertions_cursor.start().split_offset
+            {
+                insertions_cursor.seek_forward(
+                    &InsertionFragmentKey {
+                        timestamp: insertion_slice.insertion_id,
+                        split_offset: insertion_slice.range.start,
+                    },
+                    Bias::Left,
+                    &(),
+                );
+            }
+            while let Some(item) = insertions_cursor.item() {
+                if item.timestamp != insertion_slice.insertion_id
+                    || item.split_offset >= insertion_slice.range.end
+                {
+                    break;
+                }
+                fragment_ids.push(&item.fragment_id);
+                insertions_cursor.next(&());
+            }
+        }
+        fragment_ids.sort_unstable();
+        fragment_ids
+    }
+
+    fn apply_undo(&mut self, undo: &UndoOperation) -> Result<()> {
+        self.snapshot.undo_map.insert(undo);
+
+        let mut edits = Patch::default();
+        let mut old_fragments = self.fragments.cursor::<(Option<&Locator>, usize)>();
+        let mut new_fragments = SumTree::new();
+        let mut new_ropes =
+            RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
+
+        for fragment_id in self.fragment_ids_for_edits(undo.counts.keys()) {
+            let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left, &None);
+            new_ropes.append(preceding_fragments.summary().text);
+            new_fragments.append(preceding_fragments, &None);
+
+            if let Some(fragment) = old_fragments.item() {
+                let mut fragment = fragment.clone();
+                let fragment_was_visible = fragment.visible;
+
+                fragment.visible = fragment.is_visible(&self.undo_map);
+                fragment.max_undos.observe(undo.timestamp);
+
+                let old_start = old_fragments.start().1;
+                let new_start = new_fragments.summary().text.visible;
+                if fragment_was_visible && !fragment.visible {
+                    edits.push(Edit {
+                        old: old_start..old_start + fragment.len,
+                        new: new_start..new_start,
+                    });
+                } else if !fragment_was_visible && fragment.visible {
+                    edits.push(Edit {
+                        old: old_start..old_start,
+                        new: new_start..new_start + fragment.len,
+                    });
+                }
+                new_ropes.push_fragment(&fragment, fragment_was_visible);
+                new_fragments.push(fragment, &None);
+
+                old_fragments.next(&None);
+            }
+        }
+
+        let suffix = old_fragments.suffix(&None);
+        new_ropes.append(suffix.summary().text);
+        new_fragments.append(suffix, &None);
+
+        drop(old_fragments);
+        let (visible_text, deleted_text) = new_ropes.finish();
+        self.snapshot.fragments = new_fragments;
+        self.snapshot.visible_text = visible_text;
+        self.snapshot.deleted_text = deleted_text;
+        self.subscriptions.publish_mut(&edits);
+        Ok(())
+    }
+
+    fn flush_deferred_ops(&mut self) -> Result<()> {
+        self.deferred_replicas.clear();
+        let mut deferred_ops = Vec::new();
+        for op in self.deferred_ops.drain().iter().cloned() {
+            if self.can_apply_op(&op) {
+                self.apply_op(op)?;
+            } else {
+                self.deferred_replicas.insert(op.replica_id());
+                deferred_ops.push(op);
+            }
+        }
+        self.deferred_ops.insert(deferred_ops);
+        Ok(())
+    }
+
+    fn can_apply_op(&self, op: &Operation) -> bool {
+        if self.deferred_replicas.contains(&op.replica_id()) {
+            false
+        } else {
+            self.version.observed_all(match op {
+                Operation::Edit(edit) => &edit.version,
+                Operation::Undo(undo) => &undo.version,
+            })
+        }
+    }
+
+    pub fn peek_undo_stack(&self) -> Option<&HistoryEntry> {
+        self.history.undo_stack.last()
+    }
+
+    pub fn peek_redo_stack(&self) -> Option<&HistoryEntry> {
+        self.history.redo_stack.last()
+    }
+
+    pub fn start_transaction(&mut self) -> Option<TransactionId> {
+        self.start_transaction_at(Instant::now())
+    }
+
+    pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
+        self.history
+            .start_transaction(self.version.clone(), now, &mut self.lamport_clock)
+    }
+
+    pub fn end_transaction(&mut self) -> Option<(TransactionId, clock::Global)> {
+        self.end_transaction_at(Instant::now())
+    }
+
+    pub fn end_transaction_at(&mut self, now: Instant) -> Option<(TransactionId, clock::Global)> {
+        if let Some(entry) = self.history.end_transaction(now) {
+            let since = entry.transaction.start.clone();
+            let id = self.history.group().unwrap();
+            Some((id, since))
+        } else {
+            None
+        }
+    }
+
+    pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
+        self.history.finalize_last_transaction()
+    }
+
+    pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
+        self.history.group_until(transaction_id);
+    }
+
+    pub fn base_text(&self) -> &Rope {
+        &self.history.base_text
+    }
+
+    pub fn operations(&self) -> &TreeMap<clock::Lamport, Operation> {
+        &self.history.operations
+    }
+
+    pub fn undo(&mut self) -> Option<(TransactionId, Operation)> {
+        if let Some(entry) = self.history.pop_undo() {
+            let transaction = entry.transaction.clone();
+            let transaction_id = transaction.id;
+            let op = self.undo_or_redo(transaction).unwrap();
+            Some((transaction_id, op))
+        } else {
+            None
+        }
+    }
+
+    pub fn undo_transaction(&mut self, transaction_id: TransactionId) -> Option<Operation> {
+        let transaction = self
+            .history
+            .remove_from_undo(transaction_id)?
+            .transaction
+            .clone();
+        self.undo_or_redo(transaction).log_err()
+    }
+
+    #[allow(clippy::needless_collect)]
+    pub fn undo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
+        let transactions = self
+            .history
+            .remove_from_undo_until(transaction_id)
+            .iter()
+            .map(|entry| entry.transaction.clone())
+            .collect::<Vec<_>>();
+
+        transactions
+            .into_iter()
+            .map(|transaction| self.undo_or_redo(transaction).unwrap())
+            .collect()
+    }
+
+    pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
+        self.history.forget(transaction_id);
+    }
+
+    pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
+        self.history.merge_transactions(transaction, destination);
+    }
+
+    pub fn redo(&mut self) -> Option<(TransactionId, Operation)> {
+        if let Some(entry) = self.history.pop_redo() {
+            let transaction = entry.transaction.clone();
+            let transaction_id = transaction.id;
+            let op = self.undo_or_redo(transaction).unwrap();
+            Some((transaction_id, op))
+        } else {
+            None
+        }
+    }
+
+    #[allow(clippy::needless_collect)]
+    pub fn redo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
+        let transactions = self
+            .history
+            .remove_from_redo(transaction_id)
+            .iter()
+            .map(|entry| entry.transaction.clone())
+            .collect::<Vec<_>>();
+
+        transactions
+            .into_iter()
+            .map(|transaction| self.undo_or_redo(transaction).unwrap())
+            .collect()
+    }
+
+    fn undo_or_redo(&mut self, transaction: Transaction) -> Result<Operation> {
+        let mut counts = HashMap::default();
+        for edit_id in transaction.edit_ids {
+            counts.insert(edit_id, self.undo_map.undo_count(edit_id) + 1);
+        }
+
+        let undo = UndoOperation {
+            timestamp: self.lamport_clock.tick(),
+            version: self.version(),
+            counts,
+        };
+        self.apply_undo(&undo)?;
+        self.snapshot.version.observe(undo.timestamp);
+        let operation = Operation::Undo(undo);
+        self.history.push(operation.clone());
+        Ok(operation)
+    }
+
+    pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
+        self.history.push_transaction(transaction, now);
+        self.history.finalize_last_transaction();
+    }
+
+    pub fn edited_ranges_for_transaction<'a, D>(
+        &'a self,
+        transaction: &'a Transaction,
+    ) -> impl 'a + Iterator<Item = Range<D>>
+    where
+        D: TextDimension,
+    {
+        // get fragment ranges
+        let mut cursor = self.fragments.cursor::<(Option<&Locator>, usize)>();
+        let offset_ranges = self
+            .fragment_ids_for_edits(transaction.edit_ids.iter())
+            .into_iter()
+            .filter_map(move |fragment_id| {
+                cursor.seek_forward(&Some(fragment_id), Bias::Left, &None);
+                let fragment = cursor.item()?;
+                let start_offset = cursor.start().1;
+                let end_offset = start_offset + if fragment.visible { fragment.len } else { 0 };
+                Some(start_offset..end_offset)
+            });
+
+        // combine adjacent ranges
+        let mut prev_range: Option<Range<usize>> = None;
+        let disjoint_ranges = offset_ranges
+            .map(Some)
+            .chain([None])
+            .filter_map(move |range| {
+                if let Some((range, prev_range)) = range.as_ref().zip(prev_range.as_mut()) {
+                    if prev_range.end == range.start {
+                        prev_range.end = range.end;
+                        return None;
+                    }
+                }
+                let result = prev_range.clone();
+                prev_range = range;
+                result
+            });
+
+        // convert to the desired text dimension.
+        let mut position = D::default();
+        let mut rope_cursor = self.visible_text.cursor(0);
+        disjoint_ranges.map(move |range| {
+            position.add_assign(&rope_cursor.summary(range.start));
+            let start = position.clone();
+            position.add_assign(&rope_cursor.summary(range.end));
+            let end = position.clone();
+            start..end
+        })
+    }
+
+    pub fn subscribe(&mut self) -> Subscription {
+        self.subscriptions.subscribe()
+    }
+
+    pub fn wait_for_edits(
+        &mut self,
+        edit_ids: impl IntoIterator<Item = clock::Lamport>,
+    ) -> impl 'static + Future<Output = Result<()>> {
+        let mut futures = Vec::new();
+        for edit_id in edit_ids {
+            if !self.version.observed(edit_id) {
+                let (tx, rx) = oneshot::channel();
+                self.edit_id_resolvers.entry(edit_id).or_default().push(tx);
+                futures.push(rx);
+            }
+        }
+
+        async move {
+            for mut future in futures {
+                if future.recv().await.is_none() {
+                    Err(anyhow!("gave up waiting for edits"))?;
+                }
+            }
+            Ok(())
+        }
+    }
+
+    pub fn wait_for_anchors(
+        &mut self,
+        anchors: impl IntoIterator<Item = Anchor>,
+    ) -> impl 'static + Future<Output = Result<()>> {
+        let mut futures = Vec::new();
+        for anchor in anchors {
+            if !self.version.observed(anchor.timestamp)
+                && anchor != Anchor::MAX
+                && anchor != Anchor::MIN
+            {
+                let (tx, rx) = oneshot::channel();
+                self.edit_id_resolvers
+                    .entry(anchor.timestamp)
+                    .or_default()
+                    .push(tx);
+                futures.push(rx);
+            }
+        }
+
+        async move {
+            for mut future in futures {
+                if future.recv().await.is_none() {
+                    Err(anyhow!("gave up waiting for anchors"))?;
+                }
+            }
+            Ok(())
+        }
+    }
+
+    pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
+        let mut rx = None;
+        if !self.snapshot.version.observed_all(&version) {
+            let channel = oneshot::channel();
+            self.wait_for_version_txs.push((version, channel.0));
+            rx = Some(channel.1);
+        }
+        async move {
+            if let Some(mut rx) = rx {
+                if rx.recv().await.is_none() {
+                    Err(anyhow!("gave up waiting for version"))?;
+                }
+            }
+            Ok(())
+        }
+    }
+
+    pub fn give_up_waiting(&mut self) {
+        self.edit_id_resolvers.clear();
+        self.wait_for_version_txs.clear();
+    }
+
+    fn resolve_edit(&mut self, edit_id: clock::Lamport) {
+        for mut tx in self
+            .edit_id_resolvers
+            .remove(&edit_id)
+            .into_iter()
+            .flatten()
+        {
+            tx.try_send(()).ok();
+        }
+    }
+}
+
+#[cfg(any(test, feature = "test-support"))]
+impl Buffer {
+    pub fn edit_via_marked_text(&mut self, marked_string: &str) {
+        let edits = self.edits_for_marked_text(marked_string);
+        self.edit(edits);
+    }
+
+    pub fn edits_for_marked_text(&self, marked_string: &str) -> Vec<(Range<usize>, String)> {
+        let old_text = self.text();
+        let (new_text, mut ranges) = util::test::marked_text_ranges(marked_string, false);
+        if ranges.is_empty() {
+            ranges.push(0..new_text.len());
+        }
+
+        assert_eq!(
+            old_text[..ranges[0].start],
+            new_text[..ranges[0].start],
+            "invalid edit"
+        );
+
+        let mut delta = 0;
+        let mut edits = Vec::new();
+        let mut ranges = ranges.into_iter().peekable();
+
+        while let Some(inserted_range) = ranges.next() {
+            let new_start = inserted_range.start;
+            let old_start = (new_start as isize - delta) as usize;
+
+            let following_text = if let Some(next_range) = ranges.peek() {
+                &new_text[inserted_range.end..next_range.start]
+            } else {
+                &new_text[inserted_range.end..]
+            };
+
+            let inserted_len = inserted_range.len();
+            let deleted_len = old_text[old_start..]
+                .find(following_text)
+                .expect("invalid edit");
+
+            let old_range = old_start..old_start + deleted_len;
+            edits.push((old_range, new_text[inserted_range].to_string()));
+            delta += inserted_len as isize - deleted_len as isize;
+        }
+
+        assert_eq!(
+            old_text.len() as isize + delta,
+            new_text.len() as isize,
+            "invalid edit"
+        );
+
+        edits
+    }
+
+    pub fn check_invariants(&self) {
+        // Ensure every fragment is ordered by locator in the fragment tree and corresponds
+        // to an insertion fragment in the insertions tree.
+        let mut prev_fragment_id = Locator::min();
+        for fragment in self.snapshot.fragments.items(&None) {
+            assert!(fragment.id > prev_fragment_id);
+            prev_fragment_id = fragment.id.clone();
+
+            let insertion_fragment = self
+                .snapshot
+                .insertions
+                .get(
+                    &InsertionFragmentKey {
+                        timestamp: fragment.timestamp,
+                        split_offset: fragment.insertion_offset,
+                    },
+                    &(),
+                )
+                .unwrap();
+            assert_eq!(
+                insertion_fragment.fragment_id, fragment.id,
+                "fragment: {:?}\ninsertion: {:?}",
+                fragment, insertion_fragment
+            );
+        }
+
+        let mut cursor = self.snapshot.fragments.cursor::<Option<&Locator>>();
+        for insertion_fragment in self.snapshot.insertions.cursor::<()>() {
+            cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left, &None);
+            let fragment = cursor.item().unwrap();
+            assert_eq!(insertion_fragment.fragment_id, fragment.id);
+            assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset);
+        }
+
+        let fragment_summary = self.snapshot.fragments.summary();
+        assert_eq!(
+            fragment_summary.text.visible,
+            self.snapshot.visible_text.len()
+        );
+        assert_eq!(
+            fragment_summary.text.deleted,
+            self.snapshot.deleted_text.len()
+        );
+
+        assert!(!self.text().contains("\r\n"));
+    }
+
+    pub fn set_group_interval(&mut self, group_interval: Duration) {
+        self.history.group_interval = group_interval;
+    }
+
+    pub fn random_byte_range(&self, start_offset: usize, rng: &mut impl rand::Rng) -> Range<usize> {
+        let end = self.clip_offset(rng.gen_range(start_offset..=self.len()), Bias::Right);
+        let start = self.clip_offset(rng.gen_range(start_offset..=end), Bias::Right);
+        start..end
+    }
+
+    pub fn get_random_edits<T>(
+        &self,
+        rng: &mut T,
+        edit_count: usize,
+    ) -> Vec<(Range<usize>, Arc<str>)>
+    where
+        T: rand::Rng,
+    {
+        let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
+        let mut last_end = None;
+        for _ in 0..edit_count {
+            if last_end.map_or(false, |last_end| last_end >= self.len()) {
+                break;
+            }
+            let new_start = last_end.map_or(0, |last_end| last_end + 1);
+            let range = self.random_byte_range(new_start, rng);
+            last_end = Some(range.end);
+
+            let new_text_len = rng.gen_range(0..10);
+            let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
+
+            edits.push((range, new_text.into()));
+        }
+        edits
+    }
+
+    #[allow(clippy::type_complexity)]
+    pub fn randomly_edit<T>(
+        &mut self,
+        rng: &mut T,
+        edit_count: usize,
+    ) -> (Vec<(Range<usize>, Arc<str>)>, Operation)
+    where
+        T: rand::Rng,
+    {
+        let mut edits = self.get_random_edits(rng, edit_count);
+        log::info!("mutating buffer {} with {:?}", self.replica_id, edits);
+
+        let op = self.edit(edits.iter().cloned());
+        if let Operation::Edit(edit) = &op {
+            assert_eq!(edits.len(), edit.new_text.len());
+            for (edit, new_text) in edits.iter_mut().zip(&edit.new_text) {
+                edit.1 = new_text.clone();
+            }
+        } else {
+            unreachable!()
+        }
+
+        (edits, op)
+    }
+
+    pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng) -> Vec<Operation> {
+        use rand::prelude::*;
+
+        let mut ops = Vec::new();
+        for _ in 0..rng.gen_range(1..=5) {
+            if let Some(entry) = self.history.undo_stack.choose(rng) {
+                let transaction = entry.transaction.clone();
+                log::info!(
+                    "undoing buffer {} transaction {:?}",
+                    self.replica_id,
+                    transaction
+                );
+                ops.push(self.undo_or_redo(transaction).unwrap());
+            }
+        }
+        ops
+    }
+}
+
+impl Deref for Buffer {
+    type Target = BufferSnapshot;
+
+    fn deref(&self) -> &Self::Target {
+        &self.snapshot
+    }
+}
+
+impl BufferSnapshot {
+    pub fn as_rope(&self) -> &Rope {
+        &self.visible_text
+    }
+
+    pub fn remote_id(&self) -> u64 {
+        self.remote_id
+    }
+
+    pub fn replica_id(&self) -> ReplicaId {
+        self.replica_id
+    }
+
+    pub fn row_count(&self) -> u32 {
+        self.max_point().row + 1
+    }
+
+    pub fn len(&self) -> usize {
+        self.visible_text.len()
+    }
+
+    pub fn is_empty(&self) -> bool {
+        self.len() == 0
+    }
+
+    pub fn chars(&self) -> impl Iterator<Item = char> + '_ {
+        self.chars_at(0)
+    }
+
+    pub fn chars_for_range<T: ToOffset>(&self, range: Range<T>) -> impl Iterator<Item = char> + '_ {
+        self.text_for_range(range).flat_map(str::chars)
+    }
+
+    pub fn reversed_chars_for_range<T: ToOffset>(
+        &self,
+        range: Range<T>,
+    ) -> impl Iterator<Item = char> + '_ {
+        self.reversed_chunks_in_range(range)
+            .flat_map(|chunk| chunk.chars().rev())
+    }
+
+    pub fn contains_str_at<T>(&self, position: T, needle: &str) -> bool
+    where
+        T: ToOffset,
+    {
+        let position = position.to_offset(self);
+        position == self.clip_offset(position, Bias::Left)
+            && self
+                .bytes_in_range(position..self.len())
+                .flatten()
+                .copied()
+                .take(needle.len())
+                .eq(needle.bytes())
+    }
+
+    pub fn common_prefix_at<T>(&self, position: T, needle: &str) -> Range<T>
+    where
+        T: ToOffset + TextDimension,
+    {
+        let offset = position.to_offset(self);
+        let common_prefix_len = needle
+            .char_indices()
+            .map(|(index, _)| index)
+            .chain([needle.len()])
+            .take_while(|&len| len <= offset)
+            .filter(|&len| {
+                let left = self
+                    .chars_for_range(offset - len..offset)
+                    .flat_map(char::to_lowercase);
+                let right = needle[..len].chars().flat_map(char::to_lowercase);
+                left.eq(right)
+            })
+            .last()
+            .unwrap_or(0);
+        let start_offset = offset - common_prefix_len;
+        let start = self.text_summary_for_range(0..start_offset);
+        start..position
+    }
+
+    pub fn text(&self) -> String {
+        self.visible_text.to_string()
+    }
+
+    pub fn line_ending(&self) -> LineEnding {
+        self.line_ending
+    }
+
+    pub fn deleted_text(&self) -> String {
+        self.deleted_text.to_string()
+    }
+
+    pub fn fragments(&self) -> impl Iterator<Item = &Fragment> {
+        self.fragments.iter()
+    }
+
+    pub fn text_summary(&self) -> TextSummary {
+        self.visible_text.summary()
+    }
+
+    pub fn max_point(&self) -> Point {
+        self.visible_text.max_point()
+    }
+
+    pub fn max_point_utf16(&self) -> PointUtf16 {
+        self.visible_text.max_point_utf16()
+    }
+
+    pub fn point_to_offset(&self, point: Point) -> usize {
+        self.visible_text.point_to_offset(point)
+    }
+
+    pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize {
+        self.visible_text.point_utf16_to_offset(point)
+    }
+
+    pub fn unclipped_point_utf16_to_offset(&self, point: Unclipped<PointUtf16>) -> usize {
+        self.visible_text.unclipped_point_utf16_to_offset(point)
+    }
+
+    pub fn unclipped_point_utf16_to_point(&self, point: Unclipped<PointUtf16>) -> Point {
+        self.visible_text.unclipped_point_utf16_to_point(point)
+    }
+
+    pub fn offset_utf16_to_offset(&self, offset: OffsetUtf16) -> usize {
+        self.visible_text.offset_utf16_to_offset(offset)
+    }
+
+    pub fn offset_to_offset_utf16(&self, offset: usize) -> OffsetUtf16 {
+        self.visible_text.offset_to_offset_utf16(offset)
+    }
+
+    pub fn offset_to_point(&self, offset: usize) -> Point {
+        self.visible_text.offset_to_point(offset)
+    }
+
+    pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 {
+        self.visible_text.offset_to_point_utf16(offset)
+    }
+
+    pub fn point_to_point_utf16(&self, point: Point) -> PointUtf16 {
+        self.visible_text.point_to_point_utf16(point)
+    }
+
+    pub fn version(&self) -> &clock::Global {
+        &self.version
+    }
+
+    pub fn chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
+        let offset = position.to_offset(self);
+        self.visible_text.chars_at(offset)
+    }
+
+    pub fn reversed_chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
+        let offset = position.to_offset(self);
+        self.visible_text.reversed_chars_at(offset)
+    }
+
+    pub fn reversed_chunks_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Chunks {
+        let range = range.start.to_offset(self)..range.end.to_offset(self);
+        self.visible_text.reversed_chunks_in_range(range)
+    }
+
+    pub fn bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
+        let start = range.start.to_offset(self);
+        let end = range.end.to_offset(self);
+        self.visible_text.bytes_in_range(start..end)
+    }
+
+    pub fn reversed_bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
+        let start = range.start.to_offset(self);
+        let end = range.end.to_offset(self);
+        self.visible_text.reversed_bytes_in_range(start..end)
+    }
+
+    pub fn text_for_range<T: ToOffset>(&self, range: Range<T>) -> Chunks<'_> {
+        let start = range.start.to_offset(self);
+        let end = range.end.to_offset(self);
+        self.visible_text.chunks_in_range(start..end)
+    }
+
+    pub fn line_len(&self, row: u32) -> u32 {
+        let row_start_offset = Point::new(row, 0).to_offset(self);
+        let row_end_offset = if row >= self.max_point().row {
+            self.len()
+        } else {
+            Point::new(row + 1, 0).to_offset(self) - 1
+        };
+        (row_end_offset - row_start_offset) as u32
+    }
+
+    pub fn is_line_blank(&self, row: u32) -> bool {
+        self.text_for_range(Point::new(row, 0)..Point::new(row, self.line_len(row)))
+            .all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none())
+    }
+
+    pub fn text_summary_for_range<D, O: ToOffset>(&self, range: Range<O>) -> D
+    where
+        D: TextDimension,
+    {
+        self.visible_text
+            .cursor(range.start.to_offset(self))
+            .summary(range.end.to_offset(self))
+    }
+
+    pub fn summaries_for_anchors<'a, D, A>(&'a self, anchors: A) -> impl 'a + Iterator<Item = D>
+    where
+        D: 'a + TextDimension,
+        A: 'a + IntoIterator<Item = &'a Anchor>,
+    {
+        let anchors = anchors.into_iter();
+        self.summaries_for_anchors_with_payload::<D, _, ()>(anchors.map(|a| (a, ())))
+            .map(|d| d.0)
+    }
+
+    pub fn summaries_for_anchors_with_payload<'a, D, A, T>(
+        &'a self,
+        anchors: A,
+    ) -> impl 'a + Iterator<Item = (D, T)>
+    where
+        D: 'a + TextDimension,
+        A: 'a + IntoIterator<Item = (&'a Anchor, T)>,
+    {
+        let anchors = anchors.into_iter();
+        let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>();
+        let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, usize)>();
+        let mut text_cursor = self.visible_text.cursor(0);
+        let mut position = D::default();
+
+        anchors.map(move |(anchor, payload)| {
+            if *anchor == Anchor::MIN {
+                return (D::default(), payload);
+            } else if *anchor == Anchor::MAX {
+                return (D::from_text_summary(&self.visible_text.summary()), payload);
+            }
+
+            let anchor_key = InsertionFragmentKey {
+                timestamp: anchor.timestamp,
+                split_offset: anchor.offset,
+            };
+            insertion_cursor.seek(&anchor_key, anchor.bias, &());
+            if let Some(insertion) = insertion_cursor.item() {
+                let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
+                if comparison == Ordering::Greater
+                    || (anchor.bias == Bias::Left
+                        && comparison == Ordering::Equal
+                        && anchor.offset > 0)
+                {
+                    insertion_cursor.prev(&());
+                }
+            } else {
+                insertion_cursor.prev(&());
+            }
+            let insertion = insertion_cursor.item().expect("invalid insertion");
+            assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion");
+
+            fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left, &None);
+            let fragment = fragment_cursor.item().unwrap();
+            let mut fragment_offset = fragment_cursor.start().1;
+            if fragment.visible {
+                fragment_offset += anchor.offset - insertion.split_offset;
+            }
+
+            position.add_assign(&text_cursor.summary(fragment_offset));
+            (position.clone(), payload)
+        })
+    }
+
+    fn summary_for_anchor<D>(&self, anchor: &Anchor) -> D
+    where
+        D: TextDimension,
+    {
+        if *anchor == Anchor::MIN {
+            D::default()
+        } else if *anchor == Anchor::MAX {
+            D::from_text_summary(&self.visible_text.summary())
+        } else {
+            let anchor_key = InsertionFragmentKey {
+                timestamp: anchor.timestamp,
+                split_offset: anchor.offset,
+            };
+            let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>();
+            insertion_cursor.seek(&anchor_key, anchor.bias, &());
+            if let Some(insertion) = insertion_cursor.item() {
+                let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
+                if comparison == Ordering::Greater
+                    || (anchor.bias == Bias::Left
+                        && comparison == Ordering::Equal
+                        && anchor.offset > 0)
+                {
+                    insertion_cursor.prev(&());
+                }
+            } else {
+                insertion_cursor.prev(&());
+            }
+            let insertion = insertion_cursor.item().expect("invalid insertion");
+            assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion");
+
+            let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, usize)>();
+            fragment_cursor.seek(&Some(&insertion.fragment_id), Bias::Left, &None);
+            let fragment = fragment_cursor.item().unwrap();
+            let mut fragment_offset = fragment_cursor.start().1;
+            if fragment.visible {
+                fragment_offset += anchor.offset - insertion.split_offset;
+            }
+            self.text_summary_for_range(0..fragment_offset)
+        }
+    }
+
+    fn fragment_id_for_anchor(&self, anchor: &Anchor) -> &Locator {
+        if *anchor == Anchor::MIN {
+            Locator::min_ref()
+        } else if *anchor == Anchor::MAX {
+            Locator::max_ref()
+        } else {
+            let anchor_key = InsertionFragmentKey {
+                timestamp: anchor.timestamp,
+                split_offset: anchor.offset,
+            };
+            let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>();
+            insertion_cursor.seek(&anchor_key, anchor.bias, &());
+            if let Some(insertion) = insertion_cursor.item() {
+                let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
+                if comparison == Ordering::Greater
+                    || (anchor.bias == Bias::Left
+                        && comparison == Ordering::Equal
+                        && anchor.offset > 0)
+                {
+                    insertion_cursor.prev(&());
+                }
+            } else {
+                insertion_cursor.prev(&());
+            }
+            let insertion = insertion_cursor.item().expect("invalid insertion");
+            debug_assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion");
+            &insertion.fragment_id
+        }
+    }
+
+    pub fn anchor_before<T: ToOffset>(&self, position: T) -> Anchor {
+        self.anchor_at(position, Bias::Left)
+    }
+
+    pub fn anchor_after<T: ToOffset>(&self, position: T) -> Anchor {
+        self.anchor_at(position, Bias::Right)
+    }
+
+    pub fn anchor_at<T: ToOffset>(&self, position: T, bias: Bias) -> Anchor {
+        self.anchor_at_offset(position.to_offset(self), bias)
+    }
+
+    fn anchor_at_offset(&self, offset: usize, bias: Bias) -> Anchor {
+        if bias == Bias::Left && offset == 0 {
+            Anchor::MIN
+        } else if bias == Bias::Right && offset == self.len() {
+            Anchor::MAX
+        } else {
+            let mut fragment_cursor = self.fragments.cursor::<usize>();
+            fragment_cursor.seek(&offset, bias, &None);
+            let fragment = fragment_cursor.item().unwrap();
+            let overshoot = offset - *fragment_cursor.start();
+            Anchor {
+                timestamp: fragment.timestamp,
+                offset: fragment.insertion_offset + overshoot,
+                bias,
+                buffer_id: Some(self.remote_id),
+            }
+        }
+    }
+
+    pub fn can_resolve(&self, anchor: &Anchor) -> bool {
+        *anchor == Anchor::MIN
+            || *anchor == Anchor::MAX
+            || (Some(self.remote_id) == anchor.buffer_id && self.version.observed(anchor.timestamp))
+    }
+
+    pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize {
+        self.visible_text.clip_offset(offset, bias)
+    }
+
+    pub fn clip_point(&self, point: Point, bias: Bias) -> Point {
+        self.visible_text.clip_point(point, bias)
+    }
+
+    pub fn clip_offset_utf16(&self, offset: OffsetUtf16, bias: Bias) -> OffsetUtf16 {
+        self.visible_text.clip_offset_utf16(offset, bias)
+    }
+
+    pub fn clip_point_utf16(&self, point: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
+        self.visible_text.clip_point_utf16(point, bias)
+    }
+
+    pub fn edits_since<'a, D>(
+        &'a self,
+        since: &'a clock::Global,
+    ) -> impl 'a + Iterator<Item = Edit<D>>
+    where
+        D: TextDimension + Ord,
+    {
+        self.edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
+    }
+
+    pub fn anchored_edits_since<'a, D>(
+        &'a self,
+        since: &'a clock::Global,
+    ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
+    where
+        D: TextDimension + Ord,
+    {
+        self.anchored_edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
+    }
+
+    pub fn edits_since_in_range<'a, D>(
+        &'a self,
+        since: &'a clock::Global,
+        range: Range<Anchor>,
+    ) -> impl 'a + Iterator<Item = Edit<D>>
+    where
+        D: TextDimension + Ord,
+    {
+        self.anchored_edits_since_in_range(since, range)
+            .map(|item| item.0)
+    }
+
+    pub fn anchored_edits_since_in_range<'a, D>(
+        &'a self,
+        since: &'a clock::Global,
+        range: Range<Anchor>,
+    ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
+    where
+        D: TextDimension + Ord,
+    {
+        let fragments_cursor = if *since == self.version {
+            None
+        } else {
+            let mut cursor = self
+                .fragments
+                .filter(move |summary| !since.observed_all(&summary.max_version));
+            cursor.next(&None);
+            Some(cursor)
+        };
+        let mut cursor = self
+            .fragments
+            .cursor::<(Option<&Locator>, FragmentTextSummary)>();
+
+        let start_fragment_id = self.fragment_id_for_anchor(&range.start);
+        cursor.seek(&Some(start_fragment_id), Bias::Left, &None);
+        let mut visible_start = cursor.start().1.visible;
+        let mut deleted_start = cursor.start().1.deleted;
+        if let Some(fragment) = cursor.item() {
+            let overshoot = range.start.offset - fragment.insertion_offset;
+            if fragment.visible {
+                visible_start += overshoot;
+            } else {
+                deleted_start += overshoot;
+            }
+        }
+        let end_fragment_id = self.fragment_id_for_anchor(&range.end);
+
+        Edits {
+            visible_cursor: self.visible_text.cursor(visible_start),
+            deleted_cursor: self.deleted_text.cursor(deleted_start),
+            fragments_cursor,
+            undos: &self.undo_map,
+            since,
+            old_end: Default::default(),
+            new_end: Default::default(),
+            range: (start_fragment_id, range.start.offset)..(end_fragment_id, range.end.offset),
+            buffer_id: self.remote_id,
+        }
+    }
+}
+
+struct RopeBuilder<'a> {
+    old_visible_cursor: rope::Cursor<'a>,
+    old_deleted_cursor: rope::Cursor<'a>,
+    new_visible: Rope,
+    new_deleted: Rope,
+}
+
+impl<'a> RopeBuilder<'a> {
+    fn new(old_visible_cursor: rope::Cursor<'a>, old_deleted_cursor: rope::Cursor<'a>) -> Self {
+        Self {
+            old_visible_cursor,
+            old_deleted_cursor,
+            new_visible: Rope::new(),
+            new_deleted: Rope::new(),
+        }
+    }
+
+    fn append(&mut self, len: FragmentTextSummary) {
+        self.push(len.visible, true, true);
+        self.push(len.deleted, false, false);
+    }
+
+    fn push_fragment(&mut self, fragment: &Fragment, was_visible: bool) {
+        debug_assert!(fragment.len > 0);
+        self.push(fragment.len, was_visible, fragment.visible)
+    }
+
+    fn push(&mut self, len: usize, was_visible: bool, is_visible: bool) {
+        let text = if was_visible {
+            self.old_visible_cursor
+                .slice(self.old_visible_cursor.offset() + len)
+        } else {
+            self.old_deleted_cursor
+                .slice(self.old_deleted_cursor.offset() + len)
+        };
+        if is_visible {
+            self.new_visible.append(text);
+        } else {
+            self.new_deleted.append(text);
+        }
+    }
+
+    fn push_str(&mut self, text: &str) {
+        self.new_visible.push(text);
+    }
+
+    fn finish(mut self) -> (Rope, Rope) {
+        self.new_visible.append(self.old_visible_cursor.suffix());
+        self.new_deleted.append(self.old_deleted_cursor.suffix());
+        (self.new_visible, self.new_deleted)
+    }
+}
+
+impl<'a, D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Edits<'a, D, F> {
+    type Item = (Edit<D>, Range<Anchor>);
+
+    fn next(&mut self) -> Option<Self::Item> {
+        let mut pending_edit: Option<Self::Item> = None;
+        let cursor = self.fragments_cursor.as_mut()?;
+
+        while let Some(fragment) = cursor.item() {
+            if fragment.id < *self.range.start.0 {
+                cursor.next(&None);
+                continue;
+            } else if fragment.id > *self.range.end.0 {
+                break;
+            }
+
+            if cursor.start().visible > self.visible_cursor.offset() {
+                let summary = self.visible_cursor.summary(cursor.start().visible);
+                self.old_end.add_assign(&summary);
+                self.new_end.add_assign(&summary);
+            }
+
+            if pending_edit
+                .as_ref()
+                .map_or(false, |(change, _)| change.new.end < self.new_end)
+            {
+                break;
+            }
+
+            let start_anchor = Anchor {
+                timestamp: fragment.timestamp,
+                offset: fragment.insertion_offset,
+                bias: Bias::Right,
+                buffer_id: Some(self.buffer_id),
+            };
+            let end_anchor = Anchor {
+                timestamp: fragment.timestamp,
+                offset: fragment.insertion_offset + fragment.len,
+                bias: Bias::Left,
+                buffer_id: Some(self.buffer_id),
+            };
+
+            if !fragment.was_visible(self.since, self.undos) && fragment.visible {
+                let mut visible_end = cursor.end(&None).visible;
+                if fragment.id == *self.range.end.0 {
+                    visible_end = cmp::min(
+                        visible_end,
+                        cursor.start().visible + (self.range.end.1 - fragment.insertion_offset),
+                    );
+                }
+
+                let fragment_summary = self.visible_cursor.summary(visible_end);
+                let mut new_end = self.new_end.clone();
+                new_end.add_assign(&fragment_summary);
+                if let Some((edit, range)) = pending_edit.as_mut() {
+                    edit.new.end = new_end.clone();
+                    range.end = end_anchor;
+                } else {
+                    pending_edit = Some((
+                        Edit {
+                            old: self.old_end.clone()..self.old_end.clone(),
+                            new: self.new_end.clone()..new_end.clone(),
+                        },
+                        start_anchor..end_anchor,
+                    ));
+                }
+
+                self.new_end = new_end;
+            } else if fragment.was_visible(self.since, self.undos) && !fragment.visible {
+                let mut deleted_end = cursor.end(&None).deleted;
+                if fragment.id == *self.range.end.0 {
+                    deleted_end = cmp::min(
+                        deleted_end,
+                        cursor.start().deleted + (self.range.end.1 - fragment.insertion_offset),
+                    );
+                }
+
+                if cursor.start().deleted > self.deleted_cursor.offset() {
+                    self.deleted_cursor.seek_forward(cursor.start().deleted);
+                }
+                let fragment_summary = self.deleted_cursor.summary(deleted_end);
+                let mut old_end = self.old_end.clone();
+                old_end.add_assign(&fragment_summary);
+                if let Some((edit, range)) = pending_edit.as_mut() {
+                    edit.old.end = old_end.clone();
+                    range.end = end_anchor;
+                } else {
+                    pending_edit = Some((
+                        Edit {
+                            old: self.old_end.clone()..old_end.clone(),
+                            new: self.new_end.clone()..self.new_end.clone(),
+                        },
+                        start_anchor..end_anchor,
+                    ));
+                }
+
+                self.old_end = old_end;
+            }
+
+            cursor.next(&None);
+        }
+
+        pending_edit
+    }
+}
+
+impl Fragment {
+    fn insertion_slice(&self) -> InsertionSlice {
+        InsertionSlice {
+            insertion_id: self.timestamp,
+            range: self.insertion_offset..self.insertion_offset + self.len,
+        }
+    }
+
+    fn is_visible(&self, undos: &UndoMap) -> bool {
+        !undos.is_undone(self.timestamp) && self.deletions.iter().all(|d| undos.is_undone(*d))
+    }
+
+    fn was_visible(&self, version: &clock::Global, undos: &UndoMap) -> bool {
+        (version.observed(self.timestamp) && !undos.was_undone(self.timestamp, version))
+            && self
+                .deletions
+                .iter()
+                .all(|d| !version.observed(*d) || undos.was_undone(*d, version))
+    }
+}
+
+impl sum_tree::Item for Fragment {
+    type Summary = FragmentSummary;
+
+    fn summary(&self) -> Self::Summary {
+        let mut max_version = clock::Global::new();
+        max_version.observe(self.timestamp);
+        for deletion in &self.deletions {
+            max_version.observe(*deletion);
+        }
+        max_version.join(&self.max_undos);
+
+        let mut min_insertion_version = clock::Global::new();
+        min_insertion_version.observe(self.timestamp);
+        let max_insertion_version = min_insertion_version.clone();
+        if self.visible {
+            FragmentSummary {
+                max_id: self.id.clone(),
+                text: FragmentTextSummary {
+                    visible: self.len,
+                    deleted: 0,
+                },
+                max_version,
+                min_insertion_version,
+                max_insertion_version,
+            }
+        } else {
+            FragmentSummary {
+                max_id: self.id.clone(),
+                text: FragmentTextSummary {
+                    visible: 0,
+                    deleted: self.len,
+                },
+                max_version,
+                min_insertion_version,
+                max_insertion_version,
+            }
+        }
+    }
+}
+
+impl sum_tree::Summary for FragmentSummary {
+    type Context = Option<clock::Global>;
+
+    fn add_summary(&mut self, other: &Self, _: &Self::Context) {
+        self.max_id.assign(&other.max_id);
+        self.text.visible += &other.text.visible;
+        self.text.deleted += &other.text.deleted;
+        self.max_version.join(&other.max_version);
+        self.min_insertion_version
+            .meet(&other.min_insertion_version);
+        self.max_insertion_version
+            .join(&other.max_insertion_version);
+    }
+}
+
+impl Default for FragmentSummary {
+    fn default() -> Self {
+        FragmentSummary {
+            max_id: Locator::min(),
+            text: FragmentTextSummary::default(),
+            max_version: clock::Global::new(),
+            min_insertion_version: clock::Global::new(),
+            max_insertion_version: clock::Global::new(),
+        }
+    }
+}
+
+impl sum_tree::Item for InsertionFragment {
+    type Summary = InsertionFragmentKey;
+
+    fn summary(&self) -> Self::Summary {
+        InsertionFragmentKey {
+            timestamp: self.timestamp,
+            split_offset: self.split_offset,
+        }
+    }
+}
+
+impl sum_tree::KeyedItem for InsertionFragment {
+    type Key = InsertionFragmentKey;
+
+    fn key(&self) -> Self::Key {
+        sum_tree::Item::summary(self)
+    }
+}
+
+impl InsertionFragment {
+    fn new(fragment: &Fragment) -> Self {
+        Self {
+            timestamp: fragment.timestamp,
+            split_offset: fragment.insertion_offset,
+            fragment_id: fragment.id.clone(),
+        }
+    }
+
+    fn insert_new(fragment: &Fragment) -> sum_tree::Edit<Self> {
+        sum_tree::Edit::Insert(Self::new(fragment))
+    }
+}
+
+impl sum_tree::Summary for InsertionFragmentKey {
+    type Context = ();
+
+    fn add_summary(&mut self, summary: &Self, _: &()) {
+        *self = *summary;
+    }
+}
+
+#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct FullOffset(pub usize);
+
+impl ops::AddAssign<usize> for FullOffset {
+    fn add_assign(&mut self, rhs: usize) {
+        self.0 += rhs;
+    }
+}
+
+impl ops::Add<usize> for FullOffset {
+    type Output = Self;
+
+    fn add(mut self, rhs: usize) -> Self::Output {
+        self += rhs;
+        self
+    }
+}
+
+impl ops::Sub for FullOffset {
+    type Output = usize;
+
+    fn sub(self, rhs: Self) -> Self::Output {
+        self.0 - rhs.0
+    }
+}
+
+impl<'a> sum_tree::Dimension<'a, FragmentSummary> for usize {
+    fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
+        *self += summary.text.visible;
+    }
+}
+
+impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FullOffset {
+    fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
+        self.0 += summary.text.visible + summary.text.deleted;
+    }
+}
+
+impl<'a> sum_tree::Dimension<'a, FragmentSummary> for Option<&'a Locator> {
+    fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
+        *self = Some(&summary.max_id);
+    }
+}
+
+impl<'a> sum_tree::SeekTarget<'a, FragmentSummary, FragmentTextSummary> for usize {
+    fn cmp(
+        &self,
+        cursor_location: &FragmentTextSummary,
+        _: &Option<clock::Global>,
+    ) -> cmp::Ordering {
+        Ord::cmp(self, &cursor_location.visible)
+    }
+}
+
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+enum VersionedFullOffset {
+    Offset(FullOffset),
+    Invalid,
+}
+
+impl VersionedFullOffset {
+    fn full_offset(&self) -> FullOffset {
+        if let Self::Offset(position) = self {
+            *position
+        } else {
+            panic!("invalid version")
+        }
+    }
+}
+
+impl Default for VersionedFullOffset {
+    fn default() -> Self {
+        Self::Offset(Default::default())
+    }
+}
+
+impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedFullOffset {
+    fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option<clock::Global>) {
+        if let Self::Offset(offset) = self {
+            let version = cx.as_ref().unwrap();
+            if version.observed_all(&summary.max_insertion_version) {
+                *offset += summary.text.visible + summary.text.deleted;
+            } else if version.observed_any(&summary.min_insertion_version) {
+                *self = Self::Invalid;
+            }
+        }
+    }
+}
+
+impl<'a> sum_tree::SeekTarget<'a, FragmentSummary, Self> for VersionedFullOffset {
+    fn cmp(&self, cursor_position: &Self, _: &Option<clock::Global>) -> cmp::Ordering {
+        match (self, cursor_position) {
+            (Self::Offset(a), Self::Offset(b)) => Ord::cmp(a, b),
+            (Self::Offset(_), Self::Invalid) => cmp::Ordering::Less,
+            (Self::Invalid, _) => unreachable!(),
+        }
+    }
+}
+
+impl Operation {
+    fn replica_id(&self) -> ReplicaId {
+        operation_queue::Operation::lamport_timestamp(self).replica_id
+    }
+
+    pub fn timestamp(&self) -> clock::Lamport {
+        match self {
+            Operation::Edit(edit) => edit.timestamp,
+            Operation::Undo(undo) => undo.timestamp,
+        }
+    }
+
+    pub fn as_edit(&self) -> Option<&EditOperation> {
+        match self {
+            Operation::Edit(edit) => Some(edit),
+            _ => None,
+        }
+    }
+
+    pub fn is_edit(&self) -> bool {
+        matches!(self, Operation::Edit { .. })
+    }
+}
+
+impl operation_queue::Operation for Operation {
+    fn lamport_timestamp(&self) -> clock::Lamport {
+        match self {
+            Operation::Edit(edit) => edit.timestamp,
+            Operation::Undo(undo) => undo.timestamp,
+        }
+    }
+}
+
+pub trait ToOffset {
+    fn to_offset(&self, snapshot: &BufferSnapshot) -> usize;
+}
+
+impl ToOffset for Point {
+    fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
+        snapshot.point_to_offset(*self)
+    }
+}
+
+impl ToOffset for usize {
+    fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
+        assert!(
+            *self <= snapshot.len(),
+            "offset {} is out of range, max allowed is {}",
+            self,
+            snapshot.len()
+        );
+        *self
+    }
+}
+
+impl ToOffset for Anchor {
+    fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
+        snapshot.summary_for_anchor(self)
+    }
+}
+
+impl<'a, T: ToOffset> ToOffset for &'a T {
+    fn to_offset(&self, content: &BufferSnapshot) -> usize {
+        (*self).to_offset(content)
+    }
+}
+
+impl ToOffset for PointUtf16 {
+    fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
+        snapshot.point_utf16_to_offset(*self)
+    }
+}
+
+impl ToOffset for Unclipped<PointUtf16> {
+    fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
+        snapshot.unclipped_point_utf16_to_offset(*self)
+    }
+}
+
+pub trait ToPoint {
+    fn to_point(&self, snapshot: &BufferSnapshot) -> Point;
+}
+
+impl ToPoint for Anchor {
+    fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
+        snapshot.summary_for_anchor(self)
+    }
+}
+
+impl ToPoint for usize {
+    fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
+        snapshot.offset_to_point(*self)
+    }
+}
+
+impl ToPoint for Point {
+    fn to_point(&self, _: &BufferSnapshot) -> Point {
+        *self
+    }
+}
+
+impl ToPoint for Unclipped<PointUtf16> {
+    fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
+        snapshot.unclipped_point_utf16_to_point(*self)
+    }
+}
+
+pub trait ToPointUtf16 {
+    fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16;
+}
+
+impl ToPointUtf16 for Anchor {
+    fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
+        snapshot.summary_for_anchor(self)
+    }
+}
+
+impl ToPointUtf16 for usize {
+    fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
+        snapshot.offset_to_point_utf16(*self)
+    }
+}
+
+impl ToPointUtf16 for PointUtf16 {
+    fn to_point_utf16(&self, _: &BufferSnapshot) -> PointUtf16 {
+        *self
+    }
+}
+
+impl ToPointUtf16 for Point {
+    fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
+        snapshot.point_to_point_utf16(*self)
+    }
+}
+
+pub trait ToOffsetUtf16 {
+    fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16;
+}
+
+impl ToOffsetUtf16 for Anchor {
+    fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
+        snapshot.summary_for_anchor(self)
+    }
+}
+
+impl ToOffsetUtf16 for usize {
+    fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
+        snapshot.offset_to_offset_utf16(*self)
+    }
+}
+
+impl ToOffsetUtf16 for OffsetUtf16 {
+    fn to_offset_utf16(&self, _snapshot: &BufferSnapshot) -> OffsetUtf16 {
+        *self
+    }
+}
+
+pub trait FromAnchor {
+    fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self;
+}
+
+impl FromAnchor for Point {
+    fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
+        snapshot.summary_for_anchor(anchor)
+    }
+}
+
+impl FromAnchor for PointUtf16 {
+    fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
+        snapshot.summary_for_anchor(anchor)
+    }
+}
+
+impl FromAnchor for usize {
+    fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
+        snapshot.summary_for_anchor(anchor)
+    }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq)]
+pub enum LineEnding {
+    Unix,
+    Windows,
+}
+
+impl Default for LineEnding {
+    fn default() -> Self {
+        #[cfg(unix)]
+        return Self::Unix;
+
+        #[cfg(not(unix))]
+        return Self::CRLF;
+    }
+}
+
+impl LineEnding {
+    pub fn as_str(&self) -> &'static str {
+        match self {
+            LineEnding::Unix => "\n",
+            LineEnding::Windows => "\r\n",
+        }
+    }
+
+    pub fn detect(text: &str) -> Self {
+        let mut max_ix = cmp::min(text.len(), 1000);
+        while !text.is_char_boundary(max_ix) {
+            max_ix -= 1;
+        }
+
+        if let Some(ix) = text[..max_ix].find(&['\n']) {
+            if ix > 0 && text.as_bytes()[ix - 1] == b'\r' {
+                Self::Windows
+            } else {
+                Self::Unix
+            }
+        } else {
+            Self::default()
+        }
+    }
+
+    pub fn normalize(text: &mut String) {
+        if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(text, "\n") {
+            *text = replaced;
+        }
+    }
+
+    pub fn normalize_arc(text: Arc<str>) -> Arc<str> {
+        if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
+            replaced.into()
+        } else {
+            text
+        }
+    }
+}

crates/text2/src/undo_map.rs 🔗

@@ -0,0 +1,112 @@
+use crate::UndoOperation;
+use std::cmp;
+use sum_tree::{Bias, SumTree};
+
+#[derive(Copy, Clone, Debug)]
+struct UndoMapEntry {
+    key: UndoMapKey,
+    undo_count: u32,
+}
+
+impl sum_tree::Item for UndoMapEntry {
+    type Summary = UndoMapKey;
+
+    fn summary(&self) -> Self::Summary {
+        self.key
+    }
+}
+
+impl sum_tree::KeyedItem for UndoMapEntry {
+    type Key = UndoMapKey;
+
+    fn key(&self) -> Self::Key {
+        self.key
+    }
+}
+
+#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord)]
+struct UndoMapKey {
+    edit_id: clock::Lamport,
+    undo_id: clock::Lamport,
+}
+
+impl sum_tree::Summary for UndoMapKey {
+    type Context = ();
+
+    fn add_summary(&mut self, summary: &Self, _: &Self::Context) {
+        *self = cmp::max(*self, *summary);
+    }
+}
+
+#[derive(Clone, Default)]
+pub struct UndoMap(SumTree<UndoMapEntry>);
+
+impl UndoMap {
+    pub fn insert(&mut self, undo: &UndoOperation) {
+        let edits = undo
+            .counts
+            .iter()
+            .map(|(edit_id, count)| {
+                sum_tree::Edit::Insert(UndoMapEntry {
+                    key: UndoMapKey {
+                        edit_id: *edit_id,
+                        undo_id: undo.timestamp,
+                    },
+                    undo_count: *count,
+                })
+            })
+            .collect::<Vec<_>>();
+        self.0.edit(edits, &());
+    }
+
+    pub fn is_undone(&self, edit_id: clock::Lamport) -> bool {
+        self.undo_count(edit_id) % 2 == 1
+    }
+
+    pub fn was_undone(&self, edit_id: clock::Lamport, version: &clock::Global) -> bool {
+        let mut cursor = self.0.cursor::<UndoMapKey>();
+        cursor.seek(
+            &UndoMapKey {
+                edit_id,
+                undo_id: Default::default(),
+            },
+            Bias::Left,
+            &(),
+        );
+
+        let mut undo_count = 0;
+        for entry in cursor {
+            if entry.key.edit_id != edit_id {
+                break;
+            }
+
+            if version.observed(entry.key.undo_id) {
+                undo_count = cmp::max(undo_count, entry.undo_count);
+            }
+        }
+
+        undo_count % 2 == 1
+    }
+
+    pub fn undo_count(&self, edit_id: clock::Lamport) -> u32 {
+        let mut cursor = self.0.cursor::<UndoMapKey>();
+        cursor.seek(
+            &UndoMapKey {
+                edit_id,
+                undo_id: Default::default(),
+            },
+            Bias::Left,
+            &(),
+        );
+
+        let mut undo_count = 0;
+        for entry in cursor {
+            if entry.key.edit_id != edit_id {
+                break;
+            }
+
+            undo_count = cmp::max(undo_count, entry.undo_count);
+        }
+        undo_count
+    }
+}

crates/theme2/src/default_colors.rs 🔗

@@ -1,9 +1,12 @@
-use gpui2::{hsla, Rgba};
+use std::num::ParseIntError;
+
+use gpui2::{hsla, Hsla, Rgba};
 
 use crate::{
     colors::{GitStatusColors, PlayerColor, PlayerColors, StatusColors, SystemColors, ThemeColors},
     scale::{ColorScaleSet, ColorScales},
     syntax::SyntaxTheme,
+    ColorScale,
 };
 
 impl Default for SystemColors {
@@ -20,7 +23,7 @@ impl Default for SystemColors {
 impl Default for StatusColors {
     fn default() -> Self {
         Self {
-            conflict: gpui2::black(),
+            conflict: red().dark().step_11(),
             created: gpui2::black(),
             deleted: gpui2::black(),
             error: gpui2::black(),
@@ -269,31 +272,35 @@ impl ThemeColors {
     }
 }
 
-struct DefaultColorScaleSet {
+type StaticColorScale = [&'static str; 12];
+
+struct StaticColorScaleSet {
     scale: &'static str,
-    light: [&'static str; 12],
-    light_alpha: [&'static str; 12],
-    dark: [&'static str; 12],
-    dark_alpha: [&'static str; 12],
+    light: StaticColorScale,
+    light_alpha: StaticColorScale,
+    dark: StaticColorScale,
+    dark_alpha: StaticColorScale,
 }
 
-impl From<DefaultColorScaleSet> for ColorScaleSet {
-    fn from(default: DefaultColorScaleSet) -> Self {
-        Self::new(
-            default.scale,
-            default
-                .light
-                .map(|color| Rgba::try_from(color).unwrap().into()),
-            default
-                .light_alpha
-                .map(|color| Rgba::try_from(color).unwrap().into()),
-            default
-                .dark
-                .map(|color| Rgba::try_from(color).unwrap().into()),
-            default
-                .dark_alpha
-                .map(|color| Rgba::try_from(color).unwrap().into()),
-        )
+impl TryFrom<StaticColorScaleSet> for ColorScaleSet {
+    type Error = ParseIntError;
+
+    fn try_from(value: StaticColorScaleSet) -> Result<Self, Self::Error> {
+        fn to_color_scale(scale: StaticColorScale) -> Result<ColorScale, ParseIntError> {
+            scale
+                .into_iter()
+                .map(|color| Rgba::try_from(color).map(Hsla::from))
+                .collect::<Result<Vec<_>, _>>()
+                .map(ColorScale::from_iter)
+        }
+
+        Ok(Self::new(
+            value.scale,
+            to_color_scale(value.light)?,
+            to_color_scale(value.light_alpha)?,
+            to_color_scale(value.dark)?,
+            to_color_scale(value.dark_alpha)?,
+        ))
     }
 }
 
@@ -336,7 +343,7 @@ pub fn default_color_scales() -> ColorScales {
 }
 
 fn gray() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Gray",
         light: [
             "#fcfcfcff",
@@ -395,11 +402,12 @@ fn gray() -> ColorScaleSet {
             "#ffffffed",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn mauve() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Mauve",
         light: [
             "#fdfcfdff",
@@ -458,11 +466,12 @@ fn mauve() -> ColorScaleSet {
             "#fdfdffef",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn slate() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Slate",
         light: [
             "#fcfcfdff",
@@ -521,11 +530,12 @@ fn slate() -> ColorScaleSet {
             "#fcfdffef",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn sage() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Sage",
         light: [
             "#fbfdfcff",
@@ -584,11 +594,12 @@ fn sage() -> ColorScaleSet {
             "#fdfffeed",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn olive() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Olive",
         light: [
             "#fcfdfcff",
@@ -647,11 +658,12 @@ fn olive() -> ColorScaleSet {
             "#fdfffded",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn sand() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Sand",
         light: [
             "#fdfdfcff",
@@ -710,11 +722,12 @@ fn sand() -> ColorScaleSet {
             "#fffffded",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn gold() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Gold",
         light: [
             "#fdfdfcff",
@@ -773,11 +786,12 @@ fn gold() -> ColorScaleSet {
             "#fef7ede7",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn bronze() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Bronze",
         light: [
             "#fdfcfcff",
@@ -836,11 +850,12 @@ fn bronze() -> ColorScaleSet {
             "#fff1e9ec",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn brown() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Brown",
         light: [
             "#fefdfcff",
@@ -899,11 +914,12 @@ fn brown() -> ColorScaleSet {
             "#feecd4f2",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn yellow() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Yellow",
         light: [
             "#fdfdf9ff",
@@ -962,11 +978,12 @@ fn yellow() -> ColorScaleSet {
             "#fef6baf6",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn amber() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Amber",
         light: [
             "#fefdfbff",
@@ -1025,11 +1042,12 @@ fn amber() -> ColorScaleSet {
             "#ffe7b3ff",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn orange() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Orange",
         light: [
             "#fefcfbff",
@@ -1088,11 +1106,12 @@ fn orange() -> ColorScaleSet {
             "#ffe0c2ff",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn tomato() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Tomato",
         light: [
             "#fffcfcff",
@@ -1151,11 +1170,12 @@ fn tomato() -> ColorScaleSet {
             "#ffd6cefb",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn red() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Red",
         light: [
             "#fffcfcff",
@@ -1214,11 +1234,12 @@ fn red() -> ColorScaleSet {
             "#ffd1d9ff",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn ruby() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Ruby",
         light: [
             "#fffcfdff",
@@ -1277,11 +1298,12 @@ fn ruby() -> ColorScaleSet {
             "#ffd3e2fe",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn crimson() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Crimson",
         light: [
             "#fffcfdff",
@@ -1340,11 +1362,12 @@ fn crimson() -> ColorScaleSet {
             "#ffd5eafd",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn pink() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Pink",
         light: [
             "#fffcfeff",
@@ -1403,11 +1426,12 @@ fn pink() -> ColorScaleSet {
             "#ffd3ecfd",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn plum() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Plum",
         light: [
             "#fefcffff",
@@ -1466,11 +1490,12 @@ fn plum() -> ColorScaleSet {
             "#feddfef4",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn purple() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Purple",
         light: [
             "#fefcfeff",
@@ -1529,11 +1554,12 @@ fn purple() -> ColorScaleSet {
             "#f1ddfffa",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn violet() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Violet",
         light: [
             "#fdfcfeff",
@@ -1592,11 +1618,12 @@ fn violet() -> ColorScaleSet {
             "#e3defffe",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn iris() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Iris",
         light: [
             "#fdfdffff",
@@ -1655,11 +1682,12 @@ fn iris() -> ColorScaleSet {
             "#e1e0fffe",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn indigo() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Indigo",
         light: [
             "#fdfdfeff",
@@ -1718,11 +1746,12 @@ fn indigo() -> ColorScaleSet {
             "#d6e1ffff",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn blue() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Blue",
         light: [
             "#fbfdffff",
@@ -1781,11 +1810,12 @@ fn blue() -> ColorScaleSet {
             "#c2e6ffff",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn cyan() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Cyan",
         light: [
             "#fafdfeff",
@@ -1844,11 +1874,12 @@ fn cyan() -> ColorScaleSet {
             "#bbf3fef7",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn teal() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Teal",
         light: [
             "#fafefdff",
@@ -1907,11 +1938,12 @@ fn teal() -> ColorScaleSet {
             "#b8ffebef",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn jade() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Jade",
         light: [
             "#fbfefdff",
@@ -1970,11 +2002,12 @@ fn jade() -> ColorScaleSet {
             "#b8ffe1ef",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn green() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Green",
         light: [
             "#fbfefcff",
@@ -2033,11 +2066,12 @@ fn green() -> ColorScaleSet {
             "#bbffd7f0",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn grass() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Grass",
         light: [
             "#fbfefbff",
@@ -2096,11 +2130,12 @@ fn grass() -> ColorScaleSet {
             "#ceffceef",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn lime() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Lime",
         light: [
             "#fcfdfaff",
@@ -2159,11 +2194,12 @@ fn lime() -> ColorScaleSet {
             "#e9febff7",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn mint() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Mint",
         light: [
             "#f9fefdff",
@@ -2222,11 +2258,12 @@ fn mint() -> ColorScaleSet {
             "#cbfee9f5",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn sky() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Sky",
         light: [
             "#f9feffff",
@@ -2285,11 +2322,12 @@ fn sky() -> ColorScaleSet {
             "#c2f3ffff",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn black() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "Black",
         light: [
             "#0000000d",
@@ -2348,11 +2386,12 @@ fn black() -> ColorScaleSet {
             "#000000f2",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }
 
 fn white() -> ColorScaleSet {
-    DefaultColorScaleSet {
+    StaticColorScaleSet {
         scale: "White",
         light: [
             "#ffffff0d",
@@ -2411,5 +2450,6 @@ fn white() -> ColorScaleSet {
             "#fffffff2",
         ],
     }
-    .into()
+    .try_into()
+    .unwrap()
 }

crates/theme2/src/scale.rs 🔗

@@ -2,7 +2,152 @@ use gpui2::{AppContext, Hsla, SharedString};
 
 use crate::{ActiveTheme, Appearance};
 
-pub type ColorScale = [Hsla; 12];
+/// A one-based step in a [`ColorScale`].
+#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
+pub struct ColorScaleStep(usize);
+
+impl ColorScaleStep {
+    /// The first step in a [`ColorScale`].
+    pub const ONE: Self = Self(1);
+
+    /// The second step in a [`ColorScale`].
+    pub const TWO: Self = Self(2);
+
+    /// The third step in a [`ColorScale`].
+    pub const THREE: Self = Self(3);
+
+    /// The fourth step in a [`ColorScale`].
+    pub const FOUR: Self = Self(4);
+
+    /// The fifth step in a [`ColorScale`].
+    pub const FIVE: Self = Self(5);
+
+    /// The sixth step in a [`ColorScale`].
+    pub const SIX: Self = Self(6);
+
+    /// The seventh step in a [`ColorScale`].
+    pub const SEVEN: Self = Self(7);
+
+    /// The eighth step in a [`ColorScale`].
+    pub const EIGHT: Self = Self(8);
+
+    /// The ninth step in a [`ColorScale`].
+    pub const NINE: Self = Self(9);
+
+    /// The tenth step in a [`ColorScale`].
+    pub const TEN: Self = Self(10);
+
+    /// The eleventh step in a [`ColorScale`].
+    pub const ELEVEN: Self = Self(11);
+
+    /// The twelfth step in a [`ColorScale`].
+    pub const TWELVE: Self = Self(12);
+
+    /// All of the steps in a [`ColorScale`].
+    pub const ALL: [ColorScaleStep; 12] = [
+        Self::ONE,
+        Self::TWO,
+        Self::THREE,
+        Self::FOUR,
+        Self::FIVE,
+        Self::SIX,
+        Self::SEVEN,
+        Self::EIGHT,
+        Self::NINE,
+        Self::TEN,
+        Self::ELEVEN,
+        Self::TWELVE,
+    ];
+}
+
+pub struct ColorScale(Vec<Hsla>);
+
+impl FromIterator<Hsla> for ColorScale {
+    fn from_iter<T: IntoIterator<Item = Hsla>>(iter: T) -> Self {
+        Self(Vec::from_iter(iter))
+    }
+}
+
+impl ColorScale {
+    /// Returns the specified step in the [`ColorScale`].
+    #[inline]
+    pub fn step(&self, step: ColorScaleStep) -> Hsla {
+        // Steps are one-based, so we need convert to the zero-based vec index.
+        self.0[step.0 - 1]
+    }
+
+    /// Returns the first step in the [`ColorScale`].
+    #[inline]
+    pub fn step_1(&self) -> Hsla {
+        self.step(ColorScaleStep::ONE)
+    }
+
+    /// Returns the second step in the [`ColorScale`].
+    #[inline]
+    pub fn step_2(&self) -> Hsla {
+        self.step(ColorScaleStep::TWO)
+    }
+
+    /// Returns the third step in the [`ColorScale`].
+    #[inline]
+    pub fn step_3(&self) -> Hsla {
+        self.step(ColorScaleStep::THREE)
+    }
+
+    /// Returns the fourth step in the [`ColorScale`].
+    #[inline]
+    pub fn step_4(&self) -> Hsla {
+        self.step(ColorScaleStep::FOUR)
+    }
+
+    /// Returns the fifth step in the [`ColorScale`].
+    #[inline]
+    pub fn step_5(&self) -> Hsla {
+        self.step(ColorScaleStep::FIVE)
+    }
+
+    /// Returns the sixth step in the [`ColorScale`].
+    #[inline]
+    pub fn step_6(&self) -> Hsla {
+        self.step(ColorScaleStep::SIX)
+    }
+
+    /// Returns the seventh step in the [`ColorScale`].
+    #[inline]
+    pub fn step_7(&self) -> Hsla {
+        self.step(ColorScaleStep::SEVEN)
+    }
+
+    /// Returns the eighth step in the [`ColorScale`].
+    #[inline]
+    pub fn step_8(&self) -> Hsla {
+        self.step(ColorScaleStep::EIGHT)
+    }
+
+    /// Returns the ninth step in the [`ColorScale`].
+    #[inline]
+    pub fn step_9(&self) -> Hsla {
+        self.step(ColorScaleStep::NINE)
+    }
+
+    /// Returns the tenth step in the [`ColorScale`].
+    #[inline]
+    pub fn step_10(&self) -> Hsla {
+        self.step(ColorScaleStep::TEN)
+    }
+
+    /// Returns the eleventh step in the [`ColorScale`].
+    #[inline]
+    pub fn step_11(&self) -> Hsla {
+        self.step(ColorScaleStep::ELEVEN)
+    }
+
+    /// Returns the twelfth step in the [`ColorScale`].
+    #[inline]
+    pub fn step_12(&self) -> Hsla {
+        self.step(ColorScaleStep::TWELVE)
+    }
+}
 
 pub struct ColorScales {
     pub gray: ColorScaleSet,
@@ -85,9 +230,6 @@ impl IntoIterator for ColorScales {
     }
 }
 
-/// A one-based step in a [`ColorScale`].
-pub type ColorScaleStep = usize;
-
 pub struct ColorScaleSet {
     name: SharedString,
     light: ColorScale,
@@ -117,33 +259,33 @@ impl ColorScaleSet {
         &self.name
     }
 
-    pub fn light(&self, step: ColorScaleStep) -> Hsla {
-        self.light[step - 1]
+    pub fn light(&self) -> &ColorScale {
+        &self.light
     }
 
-    pub fn light_alpha(&self, step: ColorScaleStep) -> Hsla {
-        self.light_alpha[step - 1]
+    pub fn light_alpha(&self) -> &ColorScale {
+        &self.light_alpha
     }
 
-    pub fn dark(&self, step: ColorScaleStep) -> Hsla {
-        self.dark[step - 1]
+    pub fn dark(&self) -> &ColorScale {
+        &self.dark
     }
 
-    pub fn dark_alpha(&self, step: ColorScaleStep) -> Hsla {
-        self.dark_alpha[step - 1]
+    pub fn dark_alpha(&self) -> &ColorScale {
+        &self.dark_alpha
     }
 
     pub fn step(&self, cx: &AppContext, step: ColorScaleStep) -> Hsla {
         match cx.theme().appearance {
-            Appearance::Light => self.light(step),
-            Appearance::Dark => self.dark(step),
+            Appearance::Light => self.light().step(step),
+            Appearance::Dark => self.dark().step(step),
         }
     }
 
     pub fn step_alpha(&self, cx: &AppContext, step: ColorScaleStep) -> Hsla {
         match cx.theme().appearance {
-            Appearance::Light => self.light_alpha(step),
-            Appearance::Dark => self.dark_alpha(step),
+            Appearance::Light => self.light_alpha.step(step),
+            Appearance::Dark => self.dark_alpha.step(step),
         }
     }
 }

crates/ui2/src/components/panes.rs 🔗

@@ -51,7 +51,7 @@ impl<V: 'static> Pane<V> {
                     .id("drag-target")
                     .drag_over::<ExternalPaths>(|d| d.bg(red()))
                     .on_drop(|_, files: View<ExternalPaths>, cx| {
-                        dbg!("dropped files!", files.read(cx));
+                        eprintln!("dropped files! {:?}", files.read(cx));
                     })
                     .absolute()
                     .inset_0(),

crates/ui2/src/components/tab.rs 🔗

@@ -129,7 +129,7 @@ impl Tab {
             .on_drag(move |_view, cx| cx.build_view(|cx| drag_state.clone()))
             .drag_over::<TabDragState>(|d| d.bg(black()))
             .on_drop(|_view, state: View<TabDragState>, cx| {
-                dbg!(state.read(cx));
+                eprintln!("{:?}", state.read(cx));
             })
             .px_2()
             .py_0p5()

crates/zed2/Cargo.toml 🔗

@@ -63,7 +63,7 @@ settings2 = { path = "../settings2" }
 feature_flags2 = { path = "../feature_flags2" }
 sum_tree = { path = "../sum_tree" }
 shellexpand = "2.1.0"
-text = { path = "../text" }
+text2 = { path = "../text2" }
 # terminal_view = { path = "../terminal_view" }
 theme2 = { path = "../theme2" }
 # theme_selector = { path = "../theme_selector" }
@@ -152,7 +152,7 @@ language2 = { path = "../language2", features = ["test-support"] }
 project2 = { path = "../project2", features = ["test-support"] }
 # rpc = { path = "../rpc", features = ["test-support"] }
 # settings = { path = "../settings", features = ["test-support"] }
-# text = { path = "../text", features = ["test-support"] }
+text2 = { path = "../text2", features = ["test-support"] }
 # util = { path = "../util", features = ["test-support"] }
 # workspace = { path = "../workspace", features = ["test-support"] }
 unindent.workspace = true

crates/zed2/src/main.rs 🔗

@@ -64,20 +64,26 @@ fn main() {
     log::info!("========== starting zed ==========");
     let app = App::production(Arc::new(Assets));
 
-    let installation_id = app.executor().block(installation_id()).ok();
+    let installation_id = app.background_executor().block(installation_id()).ok();
     let session_id = Uuid::new_v4().to_string();
     init_panic_hook(&app, installation_id.clone(), session_id.clone());
 
     let fs = Arc::new(RealFs);
-    let user_settings_file_rx =
-        watch_config_file(&app.executor(), fs.clone(), paths::SETTINGS.clone());
-    let _user_keymap_file_rx =
-        watch_config_file(&app.executor(), fs.clone(), paths::KEYMAP.clone());
+    let user_settings_file_rx = watch_config_file(
+        &app.background_executor(),
+        fs.clone(),
+        paths::SETTINGS.clone(),
+    );
+    let _user_keymap_file_rx = watch_config_file(
+        &app.background_executor(),
+        fs.clone(),
+        paths::KEYMAP.clone(),
+    );
 
     let login_shell_env_loaded = if stdout_is_a_pty() {
         Task::ready(())
     } else {
-        app.executor().spawn(async {
+        app.background_executor().spawn(async {
             load_login_shell_environment().await.log_err();
         })
     };
@@ -113,7 +119,7 @@ fn main() {
         let client = client2::Client::new(http.clone(), cx);
         let mut languages = LanguageRegistry::new(login_shell_env_loaded);
         let copilot_language_server_id = languages.next_language_server_id();
-        languages.set_executor(cx.executor().clone());
+        languages.set_executor(cx.background_executor().clone());
         languages.set_language_server_download_dir(paths::LANGUAGES_DIR.clone());
         let languages = Arc::new(languages);
         let node_runtime = RealNodeRuntime::new(http.clone());
@@ -519,7 +525,7 @@ fn init_panic_hook(app: &App, installation_id: Option<String>, session_id: Strin
 fn upload_previous_panics(http: Arc<dyn HttpClient>, cx: &mut AppContext) {
     let telemetry_settings = *client2::TelemetrySettings::get_global(cx);
 
-    cx.executor()
+    cx.background_executor()
         .spawn(async move {
             let panic_report_url = format!("{}/api/panic", &*client2::ZED_SERVER_URL);
             let mut children = smol::fs::read_dir(&*paths::LOGS_DIR).await?;
@@ -649,7 +655,7 @@ fn load_embedded_fonts(cx: &AppContext) {
     let asset_source = cx.asset_source();
     let font_paths = asset_source.list("fonts").unwrap();
     let embedded_fonts = Mutex::new(Vec::new());
-    let executor = cx.executor();
+    let executor = cx.background_executor();
 
     executor.block(executor.scoped(|scope| {
         for font_path in &font_paths {