Merge pull request #446 from zed-industries/assists

Antonio Scandurra created

Implement code actions

Change summary

Cargo.lock                                 |    4 
crates/client/src/channel.rs               |   27 
crates/client/src/client.rs                |  174 +
crates/client/src/user.rs                  |    8 
crates/diagnostics/src/diagnostics.rs      |  207 +-
crates/editor/Cargo.toml                   |    2 
crates/editor/src/display_map.rs           |   51 
crates/editor/src/display_map/block_map.rs |  448 ++++--
crates/editor/src/display_map/fold_map.rs  |   28 
crates/editor/src/display_map/wrap_map.rs  |   17 
crates/editor/src/editor.rs                |  834 +++++++++---
crates/editor/src/element.rs               |  177 ++
crates/editor/src/items.rs                 |  114 +
crates/editor/src/movement.rs              |   81 
crates/editor/src/multi_buffer.rs          |  668 ++++++---
crates/editor/src/multi_buffer/anchor.rs   |   16 
crates/find/src/find.rs                    |   13 
crates/gpui/src/app.rs                     |   81 
crates/gpui/src/executor.rs                |    7 
crates/gpui/src/platform/mac/window.rs     |   32 
crates/gpui/src/presenter.rs               |   18 
crates/language/src/buffer.rs              |  708 ++++------
crates/language/src/language.rs            |   34 
crates/language/src/proto.rs               |  184 ++
crates/language/src/tests.rs               |  237 +++
crates/lsp/Cargo.toml                      |    3 
crates/lsp/src/lsp.rs                      |  280 ++-
crates/project/Cargo.toml                  |    7 
crates/project/src/fs.rs                   |  250 +++
crates/project/src/project.rs              | 1531 +++++++++++++++++------
crates/project/src/worktree.rs             |  118 -
crates/rpc/proto/zed.proto                 |  126 +
crates/rpc/src/peer.rs                     |  131 +
crates/rpc/src/proto.rs                    |   15 
crates/rpc/src/rpc.rs                      |    2 
crates/server/src/rpc.rs                   |  664 ++++-----
crates/server/src/rpc/store.rs             |  145 +
crates/text/src/anchor.rs                  |    7 
crates/text/src/rope.rs                    |   34 
crates/text/src/tests.rs                   |   76 
crates/text/src/text.rs                    |  478 ++++---
crates/theme/src/theme.rs                  |    2 
crates/workspace/src/pane.rs               |    4 
crates/workspace/src/workspace.rs          |   36 
crates/zed/assets/icons/zap.svg            |    3 
crates/zed/assets/themes/_base.toml        |    1 
crates/zed/src/zed.rs                      |    7 
script/drop-test-dbs                       |   16 
48 files changed, 5,267 insertions(+), 2,839 deletions(-)

Detailed changes

Cargo.lock šŸ”—

@@ -2776,6 +2776,8 @@ version = "0.1.0"
 dependencies = [
  "anyhow",
  "async-pipe",
+ "ctor",
+ "env_logger",
  "futures",
  "gpui",
  "log",
@@ -2784,7 +2786,6 @@ dependencies = [
  "postage",
  "serde",
  "serde_json",
- "simplelog",
  "smol",
  "unindent",
  "util",
@@ -3523,7 +3524,6 @@ dependencies = [
  "rpc",
  "serde",
  "serde_json",
- "simplelog",
  "smol",
  "sum_tree",
  "tempdir",

crates/client/src/channel.rs šŸ”—

@@ -184,7 +184,8 @@ impl Channel {
         rpc: Arc<Client>,
         cx: &mut ModelContext<Self>,
     ) -> Self {
-        let _subscription = rpc.subscribe_to_entity(details.id, cx, Self::handle_message_sent);
+        let _subscription =
+            rpc.add_entity_message_handler(details.id, cx, Self::handle_message_sent);
 
         {
             let user_store = user_store.clone();
@@ -398,29 +399,23 @@ impl Channel {
         cursor
     }
 
-    fn handle_message_sent(
-        &mut self,
+    async fn handle_message_sent(
+        this: ModelHandle<Self>,
         message: TypedEnvelope<proto::ChannelMessageSent>,
         _: Arc<Client>,
-        cx: &mut ModelContext<Self>,
+        mut cx: AsyncAppContext,
     ) -> Result<()> {
-        let user_store = self.user_store.clone();
+        let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
         let message = message
             .payload
             .message
             .ok_or_else(|| anyhow!("empty message"))?;
 
-        cx.spawn(|this, mut cx| {
-            async move {
-                let message = ChannelMessage::from_proto(message, &user_store, &mut cx).await?;
-                this.update(&mut cx, |this, cx| {
-                    this.insert_messages(SumTree::from_item(message, &()), cx)
-                });
-                Ok(())
-            }
-            .log_err()
-        })
-        .detach();
+        let message = ChannelMessage::from_proto(message, &user_store, &mut cx).await?;
+        this.update(&mut cx, |this, cx| {
+            this.insert_messages(SumTree::from_item(message, &()), cx)
+        });
+
         Ok(())
     }
 

crates/client/src/client.rs šŸ”—

@@ -11,8 +11,8 @@ use async_tungstenite::tungstenite::{
     error::Error as WebsocketError,
     http::{Request, StatusCode},
 };
-use futures::StreamExt;
-use gpui::{action, AsyncAppContext, Entity, ModelContext, MutableAppContext, Task};
+use futures::{future::LocalBoxFuture, FutureExt, StreamExt};
+use gpui::{action, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task};
 use http::HttpClient;
 use lazy_static::lazy_static;
 use parking_lot::RwLock;
@@ -20,10 +20,11 @@ use postage::watch;
 use rand::prelude::*;
 use rpc::proto::{AnyTypedEnvelope, EntityMessage, EnvelopedMessage, RequestMessage};
 use std::{
-    any::TypeId,
+    any::{type_name, TypeId},
     collections::HashMap,
     convert::TryFrom,
     fmt::Write as _,
+    future::Future,
     sync::{
         atomic::{AtomicUsize, Ordering},
         Arc, Weak,
@@ -123,14 +124,17 @@ pub enum Status {
     ReconnectionError { next_reconnection: Instant },
 }
 
+type ModelHandler = Box<
+    dyn Send
+        + Sync
+        + FnMut(Box<dyn AnyTypedEnvelope>, &AsyncAppContext) -> LocalBoxFuture<'static, Result<()>>,
+>;
+
 struct ClientState {
     credentials: Option<Credentials>,
     status: (watch::Sender<Status>, watch::Receiver<Status>),
     entity_id_extractors: HashMap<TypeId, Box<dyn Send + Sync + Fn(&dyn AnyTypedEnvelope) -> u64>>,
-    model_handlers: HashMap<
-        (TypeId, Option<u64>),
-        Option<Box<dyn Send + Sync + FnMut(Box<dyn AnyTypedEnvelope>, &mut AsyncAppContext)>>,
-    >,
+    model_handlers: HashMap<(TypeId, Option<u64>), Option<ModelHandler>>,
     _maintain_connection: Option<Task<()>>,
     heartbeat_interval: Duration,
 }
@@ -262,7 +266,7 @@ impl Client {
         }
     }
 
-    pub fn subscribe<T, M, F>(
+    pub fn add_message_handler<T, M, F, Fut>(
         self: &Arc<Self>,
         cx: &mut ModelContext<M>,
         mut handler: F,
@@ -273,7 +277,8 @@ impl Client {
         F: 'static
             + Send
             + Sync
-            + FnMut(&mut M, TypedEnvelope<T>, Arc<Self>, &mut ModelContext<M>) -> Result<()>,
+            + FnMut(ModelHandle<M>, TypedEnvelope<T>, Arc<Self>, AsyncAppContext) -> Fut,
+        Fut: 'static + Future<Output = Result<()>>,
     {
         let subscription_id = (TypeId::of::<T>(), None);
         let client = self.clone();
@@ -284,11 +289,15 @@ impl Client {
             Some(Box::new(move |envelope, cx| {
                 if let Some(model) = model.upgrade(cx) {
                     let envelope = envelope.into_any().downcast::<TypedEnvelope<T>>().unwrap();
-                    model.update(cx, |model, cx| {
-                        if let Err(error) = handler(model, *envelope, client.clone(), cx) {
-                            log::error!("error handling message: {}", error)
-                        }
-                    });
+                    handler(model, *envelope, client.clone(), cx.clone()).boxed_local()
+                } else {
+                    async move {
+                        Err(anyhow!(
+                            "received message for {:?} but model was dropped",
+                            type_name::<M>()
+                        ))
+                    }
+                    .boxed_local()
                 }
             })),
         );
@@ -302,7 +311,7 @@ impl Client {
         }
     }
 
-    pub fn subscribe_to_entity<T, M, F>(
+    pub fn add_entity_message_handler<T, M, F, Fut>(
         self: &Arc<Self>,
         remote_id: u64,
         cx: &mut ModelContext<M>,
@@ -314,7 +323,8 @@ impl Client {
         F: 'static
             + Send
             + Sync
-            + FnMut(&mut M, TypedEnvelope<T>, Arc<Self>, &mut ModelContext<M>) -> Result<()>,
+            + FnMut(ModelHandle<M>, TypedEnvelope<T>, Arc<Self>, AsyncAppContext) -> Fut,
+        Fut: 'static + Future<Output = Result<()>>,
     {
         let subscription_id = (TypeId::of::<T>(), Some(remote_id));
         let client = self.clone();
@@ -337,11 +347,15 @@ impl Client {
             Some(Box::new(move |envelope, cx| {
                 if let Some(model) = model.upgrade(cx) {
                     let envelope = envelope.into_any().downcast::<TypedEnvelope<T>>().unwrap();
-                    model.update(cx, |model, cx| {
-                        if let Err(error) = handler(model, *envelope, client.clone(), cx) {
-                            log::error!("error handling message: {}", error)
-                        }
-                    });
+                    handler(model, *envelope, client.clone(), cx.clone()).boxed_local()
+                } else {
+                    async move {
+                        Err(anyhow!(
+                            "received message for {:?} but model was dropped",
+                            type_name::<M>()
+                        ))
+                    }
+                    .boxed_local()
                 }
             })),
         );
@@ -355,6 +369,44 @@ impl Client {
         }
     }
 
+    pub fn add_entity_request_handler<T, M, F, Fut>(
+        self: &Arc<Self>,
+        remote_id: u64,
+        cx: &mut ModelContext<M>,
+        mut handler: F,
+    ) -> Subscription
+    where
+        T: EntityMessage + RequestMessage,
+        M: Entity,
+        F: 'static
+            + Send
+            + Sync
+            + FnMut(ModelHandle<M>, TypedEnvelope<T>, Arc<Self>, AsyncAppContext) -> Fut,
+        Fut: 'static + Future<Output = Result<T::Response>>,
+    {
+        self.add_entity_message_handler(remote_id, cx, move |model, envelope, client, cx| {
+            let receipt = envelope.receipt();
+            let response = handler(model, envelope, client.clone(), cx);
+            async move {
+                match response.await {
+                    Ok(response) => {
+                        client.respond(receipt, response)?;
+                        Ok(())
+                    }
+                    Err(error) => {
+                        client.respond_with_error(
+                            receipt,
+                            proto::Error {
+                                message: error.to_string(),
+                            },
+                        )?;
+                        Err(error)
+                    }
+                }
+            }
+        })
+    }
+
     pub fn has_keychain_credentials(&self, cx: &AsyncAppContext) -> bool {
         read_credentials_from_keychain(cx).is_some()
     }
@@ -442,7 +494,7 @@ impl Client {
         let (connection_id, handle_io, mut incoming) = self.peer.add_connection(conn).await;
         cx.foreground()
             .spawn({
-                let mut cx = cx.clone();
+                let cx = cx.clone();
                 let this = self.clone();
                 async move {
                     while let Some(message) = incoming.next().await {
@@ -462,23 +514,41 @@ impl Client {
                         if let Some(handler) = state.model_handlers.get_mut(&handler_key) {
                             let mut handler = handler.take().unwrap();
                             drop(state); // Avoid deadlocks if the handler interacts with rpc::Client
+                            let future = (handler)(message, &cx);
+                            {
+                                let mut state = this.state.write();
+                                if state.model_handlers.contains_key(&handler_key) {
+                                    state.model_handlers.insert(handler_key, Some(handler));
+                                }
+                            }
 
+                            let client_id = this.id;
                             log::debug!(
                                 "rpc message received. client_id:{}, name:{}",
-                                this.id,
+                                client_id,
                                 type_name
                             );
-                            (handler)(message, &mut cx);
-                            log::debug!(
-                                "rpc message handled. client_id:{}, name:{}",
-                                this.id,
-                                type_name
-                            );
-
-                            let mut state = this.state.write();
-                            if state.model_handlers.contains_key(&handler_key) {
-                                state.model_handlers.insert(handler_key, Some(handler));
-                            }
+                            cx.foreground()
+                                .spawn(async move {
+                                    match future.await {
+                                        Ok(()) => {
+                                            log::debug!(
+                                                "rpc message handled. client_id:{}, name:{}",
+                                                client_id,
+                                                type_name
+                                            );
+                                        }
+                                        Err(error) => {
+                                            log::error!(
+                                                "error handling rpc message. client_id:{}, name:{}, error:{}",
+                                                client_id,
+                                                type_name,
+                                                error
+                                            );
+                                        }
+                                    }
+                                })
+                                .detach();
                         } else {
                             log::info!("unhandled message {}", type_name);
                         }
@@ -715,16 +785,12 @@ impl Client {
         response
     }
 
-    pub fn respond<T: RequestMessage>(
-        &self,
-        receipt: Receipt<T>,
-        response: T::Response,
-    ) -> Result<()> {
+    fn respond<T: RequestMessage>(&self, receipt: Receipt<T>, response: T::Response) -> Result<()> {
         log::debug!("rpc respond. client_id: {}. name:{}", self.id, T::NAME);
         self.peer.respond(receipt, response)
     }
 
-    pub fn respond_with_error<T: RequestMessage>(
+    fn respond_with_error<T: RequestMessage>(
         &self,
         receipt: Receipt<T>,
         error: proto::Error,
@@ -861,22 +927,22 @@ mod tests {
         let (mut done_tx1, mut done_rx1) = postage::oneshot::channel();
         let (mut done_tx2, mut done_rx2) = postage::oneshot::channel();
         let _subscription1 = model.update(&mut cx, |_, cx| {
-            client.subscribe_to_entity(
+            client.add_entity_message_handler(
                 1,
                 cx,
                 move |_, _: TypedEnvelope<proto::UnshareProject>, _, _| {
                     postage::sink::Sink::try_send(&mut done_tx1, ()).unwrap();
-                    Ok(())
+                    async { Ok(()) }
                 },
             )
         });
         let _subscription2 = model.update(&mut cx, |_, cx| {
-            client.subscribe_to_entity(
+            client.add_entity_message_handler(
                 2,
                 cx,
                 move |_, _: TypedEnvelope<proto::UnshareProject>, _, _| {
                     postage::sink::Sink::try_send(&mut done_tx2, ()).unwrap();
-                    Ok(())
+                    async { Ok(()) }
                 },
             )
         });
@@ -884,10 +950,10 @@ mod tests {
         // Ensure dropping a subscription for the same entity type still allows receiving of
         // messages for other entity IDs of the same type.
         let subscription3 = model.update(&mut cx, |_, cx| {
-            client.subscribe_to_entity(
+            client.add_entity_message_handler(
                 3,
                 cx,
-                move |_, _: TypedEnvelope<proto::UnshareProject>, _, _| Ok(()),
+                |_, _: TypedEnvelope<proto::UnshareProject>, _, _| async { Ok(()) },
             )
         });
         drop(subscription3);
@@ -910,16 +976,16 @@ mod tests {
         let (mut done_tx1, _done_rx1) = postage::oneshot::channel();
         let (mut done_tx2, mut done_rx2) = postage::oneshot::channel();
         let subscription1 = model.update(&mut cx, |_, cx| {
-            client.subscribe(cx, move |_, _: TypedEnvelope<proto::Ping>, _, _| {
+            client.add_message_handler(cx, move |_, _: TypedEnvelope<proto::Ping>, _, _| {
                 postage::sink::Sink::try_send(&mut done_tx1, ()).unwrap();
-                Ok(())
+                async { Ok(()) }
             })
         });
         drop(subscription1);
         let _subscription2 = model.update(&mut cx, |_, cx| {
-            client.subscribe(cx, move |_, _: TypedEnvelope<proto::Ping>, _, _| {
+            client.add_message_handler(cx, move |_, _: TypedEnvelope<proto::Ping>, _, _| {
                 postage::sink::Sink::try_send(&mut done_tx2, ()).unwrap();
-                Ok(())
+                async { Ok(()) }
             })
         });
         server.send(proto::Ping {});
@@ -937,12 +1003,12 @@ mod tests {
         let model = cx.add_model(|_| Model { subscription: None });
         let (mut done_tx, mut done_rx) = postage::oneshot::channel();
         model.update(&mut cx, |model, cx| {
-            model.subscription = Some(client.subscribe(
+            model.subscription = Some(client.add_message_handler(
                 cx,
-                move |model, _: TypedEnvelope<proto::Ping>, _, _| {
-                    model.subscription.take();
+                move |model, _: TypedEnvelope<proto::Ping>, _, mut cx| {
+                    model.update(&mut cx, |model, _| model.subscription.take());
                     postage::sink::Sink::try_send(&mut done_tx, ()).unwrap();
-                    Ok(())
+                    async { Ok(()) }
                 },
             ));
         });

crates/client/src/user.rs šŸ”—

@@ -58,11 +58,11 @@ impl UserStore {
         let (mut current_user_tx, current_user_rx) = watch::channel();
         let (mut update_contacts_tx, mut update_contacts_rx) =
             watch::channel::<Option<proto::UpdateContacts>>();
-        let update_contacts_subscription = client.subscribe(
+        let update_contacts_subscription = client.add_message_handler(
             cx,
-            move |_: &mut Self, msg: TypedEnvelope<proto::UpdateContacts>, _, _| {
-                let _ = update_contacts_tx.blocking_send(Some(msg.payload));
-                Ok(())
+            move |_: ModelHandle<Self>, msg: TypedEnvelope<proto::UpdateContacts>, _, _| {
+                *update_contacts_tx.borrow_mut() = Some(msg.payload);
+                async move { Ok(()) }
             },
         );
         Self {

crates/diagnostics/src/diagnostics.rs šŸ”—

@@ -7,7 +7,7 @@ use editor::{
     display_map::{BlockDisposition, BlockId, BlockProperties, RenderBlock},
     highlight_diagnostic_message,
     items::BufferItemHandle,
-    Autoscroll, BuildSettings, Editor, ExcerptId, ExcerptProperties, MultiBuffer, ToOffset,
+    Autoscroll, BuildSettings, Editor, ExcerptId, MultiBuffer, ToOffset,
 };
 use gpui::{
     action, elements::*, fonts::TextStyle, keymap::Binding, AnyViewHandle, AppContext, Entity,
@@ -28,7 +28,7 @@ use std::{
     sync::Arc,
 };
 use util::TryFutureExt;
-use workspace::{ItemNavHistory, Workspace};
+use workspace::{ItemNavHistory, ItemViewHandle as _, Workspace};
 
 action!(Deploy);
 action!(OpenExcerpts);
@@ -68,7 +68,6 @@ struct ProjectDiagnosticsEditor {
 
 struct PathState {
     path: ProjectPath,
-    header: Option<BlockId>,
     diagnostic_groups: Vec<DiagnosticGroupState>,
 }
 
@@ -145,7 +144,12 @@ impl ProjectDiagnosticsEditor {
         let excerpts = cx.add_model(|cx| MultiBuffer::new(project.read(cx).replica_id()));
         let build_settings = editor::settings_builder(excerpts.downgrade(), settings.clone());
         let editor = cx.add_view(|cx| {
-            let mut editor = Editor::for_buffer(excerpts.clone(), build_settings.clone(), cx);
+            let mut editor = Editor::for_buffer(
+                excerpts.clone(),
+                build_settings.clone(),
+                Some(project.clone()),
+                cx,
+            );
             editor.set_vertical_scroll_margin(5, cx);
             editor
         });
@@ -187,7 +191,7 @@ impl ProjectDiagnosticsEditor {
 
             for selection in editor.local_selections::<usize>(cx) {
                 for (buffer, mut range) in
-                    excerpts.excerpted_buffers(selection.start..selection.end, cx)
+                    excerpts.range_to_buffer_ranges(selection.start..selection.end, cx)
                 {
                     if selection.reversed {
                         mem::swap(&mut range.start, &mut range.end);
@@ -253,7 +257,6 @@ impl ProjectDiagnosticsEditor {
                     ix,
                     PathState {
                         path: path.clone(),
-                        header: None,
                         diagnostic_groups: Default::default(),
                     },
                 );
@@ -330,14 +333,15 @@ impl ProjectDiagnosticsEditor {
                                 Point::new(range.end.row + CONTEXT_LINE_COUNT, u32::MAX),
                                 Bias::Left,
                             );
-                            let excerpt_id = excerpts.insert_excerpt_after(
-                                &prev_excerpt_id,
-                                ExcerptProperties {
-                                    buffer: &buffer,
-                                    range: excerpt_start..excerpt_end,
-                                },
-                                excerpts_cx,
-                            );
+                            let excerpt_id = excerpts
+                                .insert_excerpts_after(
+                                    &prev_excerpt_id,
+                                    buffer.clone(),
+                                    [excerpt_start..excerpt_end],
+                                    excerpts_cx,
+                                )
+                                .pop()
+                                .unwrap();
 
                             prev_excerpt_id = excerpt_id.clone();
                             first_excerpt_id.get_or_insert_with(|| prev_excerpt_id.clone());
@@ -360,14 +364,6 @@ impl ProjectDiagnosticsEditor {
                                     ),
                                     disposition: BlockDisposition::Above,
                                 });
-                            } else {
-                                group_state.block_count += 1;
-                                blocks_to_add.push(BlockProperties {
-                                    position: header_position,
-                                    height: 1,
-                                    render: context_header_renderer(self.build_settings.clone()),
-                                    disposition: BlockDisposition::Above,
-                                });
                             }
 
                             for entry in &group.entries[*start_ix..ix] {
@@ -416,27 +412,17 @@ impl ProjectDiagnosticsEditor {
         });
 
         self.editor.update(cx, |editor, cx| {
-            blocks_to_remove.extend(path_state.header);
             editor.remove_blocks(blocks_to_remove, cx);
-            let header_block = first_excerpt_id.map(|excerpt_id| BlockProperties {
-                position: excerpts_snapshot.anchor_in_excerpt(excerpt_id, language::Anchor::min()),
-                height: 2,
-                render: path_header_renderer(buffer, self.build_settings.clone()),
-                disposition: BlockDisposition::Above,
-            });
             let block_ids = editor.insert_blocks(
-                blocks_to_add
-                    .into_iter()
-                    .map(|block| {
-                        let (excerpt_id, text_anchor) = block.position;
-                        BlockProperties {
-                            position: excerpts_snapshot.anchor_in_excerpt(excerpt_id, text_anchor),
-                            height: block.height,
-                            render: block.render,
-                            disposition: block.disposition,
-                        }
-                    })
-                    .chain(header_block.into_iter()),
+                blocks_to_add.into_iter().map(|block| {
+                    let (excerpt_id, text_anchor) = block.position;
+                    BlockProperties {
+                        position: excerpts_snapshot.anchor_in_excerpt(excerpt_id, text_anchor),
+                        height: block.height,
+                        render: block.render,
+                        disposition: block.disposition,
+                    }
+                }),
                 cx,
             );
 
@@ -444,7 +430,6 @@ impl ProjectDiagnosticsEditor {
             for group_state in &mut groups_to_add {
                 group_state.blocks = block_ids.by_ref().take(group_state.block_count).collect();
             }
-            path_state.header = block_ids.next();
         });
 
         for ix in group_ixs_to_remove.into_iter().rev() {
@@ -554,10 +539,8 @@ impl workspace::Item for ProjectDiagnostics {
 }
 
 impl workspace::ItemView for ProjectDiagnosticsEditor {
-    type ItemHandle = ModelHandle<ProjectDiagnostics>;
-
-    fn item_handle(&self, _: &AppContext) -> Self::ItemHandle {
-        self.model.clone()
+    fn item_id(&self, _: &AppContext) -> usize {
+        self.model.id()
     }
 
     fn tab_content(&self, style: &theme::Tab, _: &AppContext) -> ElementBox {
@@ -589,8 +572,12 @@ impl workspace::ItemView for ProjectDiagnosticsEditor {
         true
     }
 
-    fn save(&mut self, cx: &mut ViewContext<Self>) -> Task<Result<()>> {
-        self.excerpts.update(cx, |excerpts, cx| excerpts.save(cx))
+    fn save(
+        &mut self,
+        project: ModelHandle<Project>,
+        cx: &mut ViewContext<Self>,
+    ) -> Task<Result<()>> {
+        self.editor.save(project, cx)
     }
 
     fn can_save_as(&self, _: &AppContext) -> bool {
@@ -655,51 +642,6 @@ impl workspace::ItemView for ProjectDiagnosticsEditor {
     }
 }
 
-fn path_header_renderer(buffer: ModelHandle<Buffer>, build_settings: BuildSettings) -> RenderBlock {
-    Arc::new(move |cx| {
-        let settings = build_settings(cx);
-        let style = settings.style.diagnostic_path_header;
-        let font_size = (style.text_scale_factor * settings.style.text.font_size).round();
-
-        let mut filename = None;
-        let mut path = None;
-        if let Some(file) = buffer.read(&**cx).file() {
-            filename = file
-                .path()
-                .file_name()
-                .map(|f| f.to_string_lossy().to_string());
-            path = file
-                .path()
-                .parent()
-                .map(|p| p.to_string_lossy().to_string() + "/");
-        }
-
-        Flex::row()
-            .with_child(
-                Label::new(
-                    filename.unwrap_or_else(|| "untitled".to_string()),
-                    style.filename.text.clone().with_font_size(font_size),
-                )
-                .contained()
-                .with_style(style.filename.container)
-                .boxed(),
-            )
-            .with_children(path.map(|path| {
-                Label::new(path, style.path.text.clone().with_font_size(font_size))
-                    .contained()
-                    .with_style(style.path.container)
-                    .boxed()
-            }))
-            .aligned()
-            .left()
-            .contained()
-            .with_style(style.container)
-            .with_padding_left(cx.gutter_padding + cx.scroll_x * cx.em_width)
-            .expanded()
-            .named("path header block")
-    })
-}
-
 fn diagnostic_header_renderer(
     diagnostic: Diagnostic,
     build_settings: BuildSettings,
@@ -753,17 +695,6 @@ fn diagnostic_header_renderer(
     })
 }
 
-fn context_header_renderer(build_settings: BuildSettings) -> RenderBlock {
-    Arc::new(move |cx| {
-        let settings = build_settings(cx);
-        let text_style = settings.style.text.clone();
-        Label::new("…".to_string(), text_style)
-            .contained()
-            .with_padding_left(cx.gutter_padding + cx.scroll_x * cx.em_width)
-            .named("collapsed context")
-    })
-}
-
 pub(crate) fn render_summary(
     summary: &DiagnosticSummary,
     text_style: &TextStyle,
@@ -838,7 +769,10 @@ fn compare_diagnostics<L: language::ToOffset, R: language::ToOffset>(
 #[cfg(test)]
 mod tests {
     use super::*;
-    use editor::{display_map::BlockContext, DisplayPoint, EditorSnapshot};
+    use editor::{
+        display_map::{BlockContext, TransformBlock},
+        DisplayPoint, EditorSnapshot,
+    };
     use gpui::TestAppContext;
     use language::{Diagnostic, DiagnosticEntry, DiagnosticSeverity, PointUtf16};
     use serde_json::json;
@@ -985,8 +919,9 @@ mod tests {
                 [
                     (0, "path header block".into()),
                     (2, "diagnostic header".into()),
-                    (15, "diagnostic header".into()),
-                    (24, "collapsed context".into()),
+                    (15, "collapsed context".into()),
+                    (16, "diagnostic header".into()),
+                    (25, "collapsed context".into()),
                 ]
             );
             assert_eq!(
@@ -1011,6 +946,7 @@ mod tests {
                     "    c(y);\n",
                     "\n", // supporting diagnostic
                     "    d(x);\n",
+                    "\n", // context ellipsis
                     // diagnostic group 2
                     "\n", // primary message
                     "\n", // padding
@@ -1073,8 +1009,9 @@ mod tests {
                     (2, "diagnostic header".into()),
                     (7, "path header block".into()),
                     (9, "diagnostic header".into()),
-                    (22, "diagnostic header".into()),
-                    (31, "collapsed context".into()),
+                    (22, "collapsed context".into()),
+                    (23, "diagnostic header".into()),
+                    (32, "collapsed context".into()),
                 ]
             );
             assert_eq!(
@@ -1110,6 +1047,7 @@ mod tests {
                     "    c(y);\n",
                     "\n", // supporting diagnostic
                     "    d(x);\n",
+                    "\n", // collapsed context
                     // diagnostic group 2
                     "\n", // primary message
                     "\n", // filename
@@ -1184,11 +1122,13 @@ mod tests {
                 [
                     (0, "path header block".into()),
                     (2, "diagnostic header".into()),
-                    (7, "diagnostic header".into()),
-                    (12, "path header block".into()),
-                    (14, "diagnostic header".into()),
-                    (27, "diagnostic header".into()),
-                    (36, "collapsed context".into()),
+                    (7, "collapsed context".into()),
+                    (8, "diagnostic header".into()),
+                    (13, "path header block".into()),
+                    (15, "diagnostic header".into()),
+                    (28, "collapsed context".into()),
+                    (29, "diagnostic header".into()),
+                    (38, "collapsed context".into()),
                 ]
             );
             assert_eq!(
@@ -1205,6 +1145,7 @@ mod tests {
                     "const a: i32 = 'a';\n",
                     "\n", // supporting diagnostic
                     "const b: i32 = c;\n",
+                    "\n", // context ellipsis
                     // diagnostic group 2
                     "\n", // primary message
                     "\n", // padding
@@ -1230,6 +1171,7 @@ mod tests {
                     "    c(y);\n",
                     "\n", // supporting diagnostic
                     "    d(x);\n",
+                    "\n", // context ellipsis
                     // diagnostic group 2
                     "\n", // primary message
                     "\n", // filename
@@ -1254,18 +1196,31 @@ mod tests {
         editor
             .blocks_in_range(0..editor.max_point().row())
             .filter_map(|(row, block)| {
-                block
-                    .render(&BlockContext {
-                        cx,
-                        anchor_x: 0.,
-                        scroll_x: 0.,
-                        gutter_padding: 0.,
-                        gutter_width: 0.,
-                        line_height: 0.,
-                        em_width: 0.,
-                    })
-                    .name()
-                    .map(|s| (row, s.to_string()))
+                let name = match block {
+                    TransformBlock::Custom(block) => block
+                        .render(&BlockContext {
+                            cx,
+                            anchor_x: 0.,
+                            scroll_x: 0.,
+                            gutter_padding: 0.,
+                            gutter_width: 0.,
+                            line_height: 0.,
+                            em_width: 0.,
+                        })
+                        .name()?
+                        .to_string(),
+                    TransformBlock::ExcerptHeader {
+                        starts_new_buffer, ..
+                    } => {
+                        if *starts_new_buffer {
+                            "path header block".to_string()
+                        } else {
+                            "collapsed context".to_string()
+                        }
+                    }
+                };
+
+                Some((row, name))
             })
             .collect()
     }

crates/editor/Cargo.toml šŸ”—

@@ -12,6 +12,7 @@ test-support = [
     "text/test-support",
     "language/test-support",
     "gpui/test-support",
+    "project/test-support",
     "util/test-support",
 ]
 
@@ -48,6 +49,7 @@ language = { path = "../language", features = ["test-support"] }
 lsp = { path = "../lsp", features = ["test-support"] }
 gpui = { path = "../gpui", features = ["test-support"] }
 util = { path = "../util", features = ["test-support"] }
+project = { path = "../project", features = ["test-support"] }
 ctor = "0.1"
 env_logger = "0.8"
 rand = "0.8"

crates/editor/src/display_map.rs šŸ”—

@@ -15,8 +15,8 @@ use tab_map::TabMap;
 use wrap_map::WrapMap;
 
 pub use block_map::{
-    AlignedBlock, BlockBufferRows as DisplayBufferRows, BlockChunks as DisplayChunks, BlockContext,
-    BlockDisposition, BlockId, BlockProperties, RenderBlock,
+    BlockBufferRows as DisplayBufferRows, BlockChunks as DisplayChunks, BlockContext,
+    BlockDisposition, BlockId, BlockProperties, RenderBlock, TransformBlock,
 };
 
 pub trait ToDisplayPoint {
@@ -43,13 +43,15 @@ impl DisplayMap {
         font_id: FontId,
         font_size: f32,
         wrap_width: Option<f32>,
+        buffer_header_height: u8,
+        excerpt_header_height: u8,
         cx: &mut ModelContext<Self>,
     ) -> Self {
         let buffer_subscription = buffer.update(cx, |buffer, _| buffer.subscribe());
         let (fold_map, snapshot) = FoldMap::new(buffer.read(cx).snapshot(cx));
         let (tab_map, snapshot) = TabMap::new(snapshot, tab_size);
         let (wrap_map, snapshot) = WrapMap::new(snapshot, font_id, font_size, wrap_width, cx);
-        let block_map = BlockMap::new(snapshot);
+        let block_map = BlockMap::new(snapshot, buffer_header_height, excerpt_header_height);
         cx.observe(&wrap_map, |_, _, cx| cx.notify()).detach();
         DisplayMap {
             buffer,
@@ -318,7 +320,7 @@ impl DisplaySnapshot {
     pub fn blocks_in_range<'a>(
         &'a self,
         rows: Range<u32>,
-    ) -> impl Iterator<Item = (u32, &'a AlignedBlock)> {
+    ) -> impl Iterator<Item = (u32, &'a TransformBlock)> {
         self.blocks_snapshot.blocks_in_range(rows)
     }
 
@@ -471,6 +473,8 @@ mod tests {
 
         let font_cache = cx.font_cache().clone();
         let tab_size = rng.gen_range(1..=4);
+        let buffer_start_excerpt_header_height = rng.gen_range(1..=5);
+        let excerpt_header_height = rng.gen_range(1..=5);
         let family_id = font_cache.load_family(&["Helvetica"]).unwrap();
         let font_id = font_cache
             .select_font(family_id, &Default::default())
@@ -497,7 +501,16 @@ mod tests {
         });
 
         let map = cx.add_model(|cx| {
-            DisplayMap::new(buffer.clone(), tab_size, font_id, font_size, wrap_width, cx)
+            DisplayMap::new(
+                buffer.clone(),
+                tab_size,
+                font_id,
+                font_size,
+                wrap_width,
+                buffer_start_excerpt_header_height,
+                excerpt_header_height,
+                cx,
+            )
         });
         let mut notifications = observe(&map, &mut cx);
         let mut fold_count = 0;
@@ -711,7 +724,16 @@ mod tests {
         let text = "one two three four five\nsix seven eight";
         let buffer = MultiBuffer::build_simple(text, cx);
         let map = cx.add_model(|cx| {
-            DisplayMap::new(buffer.clone(), tab_size, font_id, font_size, wrap_width, cx)
+            DisplayMap::new(
+                buffer.clone(),
+                tab_size,
+                font_id,
+                font_size,
+                wrap_width,
+                1,
+                1,
+                cx,
+            )
         });
 
         let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
@@ -791,7 +813,7 @@ mod tests {
             .unwrap();
         let font_size = 14.0;
         let map = cx.add_model(|cx| {
-            DisplayMap::new(buffer.clone(), tab_size, font_id, font_size, None, cx)
+            DisplayMap::new(buffer.clone(), tab_size, font_id, font_size, None, 1, 1, cx)
         });
         buffer.update(cx, |buffer, cx| {
             buffer.edit(
@@ -870,8 +892,8 @@ mod tests {
             .unwrap();
         let font_size = 14.0;
 
-        let map =
-            cx.add_model(|cx| DisplayMap::new(buffer, tab_size, font_id, font_size, None, cx));
+        let map = cx
+            .add_model(|cx| DisplayMap::new(buffer, tab_size, font_id, font_size, None, 1, 1, cx));
         assert_eq!(
             cx.update(|cx| chunks(0..5, &map, &theme, cx)),
             vec![
@@ -958,8 +980,9 @@ mod tests {
             .unwrap();
         let font_size = 16.0;
 
-        let map = cx
-            .add_model(|cx| DisplayMap::new(buffer, tab_size, font_id, font_size, Some(40.0), cx));
+        let map = cx.add_model(|cx| {
+            DisplayMap::new(buffer, tab_size, font_id, font_size, Some(40.0), 1, 1, cx)
+        });
         assert_eq!(
             cx.update(|cx| chunks(0..5, &map, &theme, cx)),
             [
@@ -1003,7 +1026,7 @@ mod tests {
             .unwrap();
         let font_size = 14.0;
         let map = cx.add_model(|cx| {
-            DisplayMap::new(buffer.clone(), tab_size, font_id, font_size, None, cx)
+            DisplayMap::new(buffer.clone(), tab_size, font_id, font_size, None, 1, 1, cx)
         });
         let map = map.update(cx, |map, cx| map.snapshot(cx));
 
@@ -1047,7 +1070,7 @@ mod tests {
         let font_size = 14.0;
 
         let map = cx.add_model(|cx| {
-            DisplayMap::new(buffer.clone(), tab_size, font_id, font_size, None, cx)
+            DisplayMap::new(buffer.clone(), tab_size, font_id, font_size, None, 1, 1, cx)
         });
         let map = map.update(cx, |map, cx| map.snapshot(cx));
         assert_eq!(map.text(), "āœ…       α\nβ   \nšŸ€Ī²      γ");
@@ -1105,7 +1128,7 @@ mod tests {
             .unwrap();
         let font_size = 14.0;
         let map = cx.add_model(|cx| {
-            DisplayMap::new(buffer.clone(), tab_size, font_id, font_size, None, cx)
+            DisplayMap::new(buffer.clone(), tab_size, font_id, font_size, None, 1, 1, cx)
         });
         assert_eq!(
             map.update(cx, |map, cx| map.snapshot(cx)).max_point(),

crates/editor/src/display_map/block_map.rs šŸ”—

@@ -1,11 +1,12 @@
 use super::wrap_map::{self, WrapEdit, WrapPoint, WrapSnapshot};
 use crate::{Anchor, ToPoint as _};
-use collections::{HashMap, HashSet};
+use collections::{Bound, HashMap, HashSet};
 use gpui::{AppContext, ElementBox};
-use language::Chunk;
+use language::{BufferSnapshot, Chunk, Patch};
 use parking_lot::Mutex;
 use std::{
-    cmp::{self, Ordering, Reverse},
+    cell::RefCell,
+    cmp::{self, Ordering},
     fmt::Debug,
     ops::{Deref, Range},
     sync::{
@@ -20,9 +21,11 @@ const NEWLINES: &'static [u8] = &[b'\n'; u8::MAX as usize];
 
 pub struct BlockMap {
     next_block_id: AtomicUsize,
-    wrap_snapshot: Mutex<WrapSnapshot>,
+    wrap_snapshot: RefCell<WrapSnapshot>,
     blocks: Vec<Arc<Block>>,
-    transforms: Mutex<SumTree<Transform>>,
+    transforms: RefCell<SumTree<Transform>>,
+    buffer_header_height: u8,
+    excerpt_header_height: u8,
 }
 
 pub struct BlockMapWriter<'a>(&'a mut BlockMap);
@@ -84,13 +87,46 @@ pub enum BlockDisposition {
 #[derive(Clone, Debug)]
 struct Transform {
     summary: TransformSummary,
-    block: Option<AlignedBlock>,
+    block: Option<TransformBlock>,
 }
 
-#[derive(Clone, Debug)]
-pub struct AlignedBlock {
-    block: Arc<Block>,
-    column: u32,
+#[derive(Clone)]
+pub enum TransformBlock {
+    Custom(Arc<Block>),
+    ExcerptHeader {
+        buffer: BufferSnapshot,
+        range: Range<text::Anchor>,
+        height: u8,
+        starts_new_buffer: bool,
+    },
+}
+
+impl TransformBlock {
+    fn disposition(&self) -> BlockDisposition {
+        match self {
+            TransformBlock::Custom(block) => block.disposition,
+            TransformBlock::ExcerptHeader { .. } => BlockDisposition::Above,
+        }
+    }
+
+    pub fn height(&self) -> u8 {
+        match self {
+            TransformBlock::Custom(block) => block.height,
+            TransformBlock::ExcerptHeader { height, .. } => *height,
+        }
+    }
+}
+
+impl Debug for TransformBlock {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        match self {
+            Self::Custom(block) => f.debug_struct("Custom").field("block", block).finish(),
+            Self::ExcerptHeader { buffer, .. } => f
+                .debug_struct("ExcerptHeader")
+                .field("path", &buffer.path())
+                .finish(),
+        }
+    }
 }
 
 #[derive(Clone, Debug, Default)]
@@ -115,40 +151,71 @@ pub struct BlockBufferRows<'a> {
 }
 
 impl BlockMap {
-    pub fn new(wrap_snapshot: WrapSnapshot) -> Self {
-        Self {
+    pub fn new(
+        wrap_snapshot: WrapSnapshot,
+        buffer_header_height: u8,
+        excerpt_header_height: u8,
+    ) -> Self {
+        let row_count = wrap_snapshot.max_point().row() + 1;
+        let map = Self {
             next_block_id: AtomicUsize::new(0),
             blocks: Vec::new(),
-            transforms: Mutex::new(SumTree::from_item(
-                Transform::isomorphic(wrap_snapshot.text_summary().lines.row + 1),
-                &(),
-            )),
-            wrap_snapshot: Mutex::new(wrap_snapshot),
-        }
+            transforms: RefCell::new(SumTree::from_item(Transform::isomorphic(row_count), &())),
+            wrap_snapshot: RefCell::new(wrap_snapshot.clone()),
+            buffer_header_height,
+            excerpt_header_height,
+        };
+        map.sync(
+            &wrap_snapshot,
+            Patch::new(vec![Edit {
+                old: 0..row_count,
+                new: 0..row_count,
+            }]),
+        );
+        map
     }
 
-    pub fn read(&self, wrap_snapshot: WrapSnapshot, edits: Vec<WrapEdit>) -> BlockSnapshot {
+    pub fn read(&self, wrap_snapshot: WrapSnapshot, edits: Patch<u32>) -> BlockSnapshot {
         self.sync(&wrap_snapshot, edits);
-        *self.wrap_snapshot.lock() = wrap_snapshot.clone();
+        *self.wrap_snapshot.borrow_mut() = wrap_snapshot.clone();
         BlockSnapshot {
             wrap_snapshot,
-            transforms: self.transforms.lock().clone(),
+            transforms: self.transforms.borrow().clone(),
         }
     }
 
-    pub fn write(&mut self, wrap_snapshot: WrapSnapshot, edits: Vec<WrapEdit>) -> BlockMapWriter {
+    pub fn write(&mut self, wrap_snapshot: WrapSnapshot, edits: Patch<u32>) -> BlockMapWriter {
         self.sync(&wrap_snapshot, edits);
-        *self.wrap_snapshot.lock() = wrap_snapshot;
+        *self.wrap_snapshot.borrow_mut() = wrap_snapshot;
         BlockMapWriter(self)
     }
 
-    fn sync(&self, wrap_snapshot: &WrapSnapshot, edits: Vec<WrapEdit>) {
+    fn sync(&self, wrap_snapshot: &WrapSnapshot, mut edits: Patch<u32>) {
+        let buffer = wrap_snapshot.buffer_snapshot();
+
+        // Handle changing the last excerpt if it is empty.
+        if buffer.trailing_excerpt_update_count()
+            != self
+                .wrap_snapshot
+                .borrow()
+                .buffer_snapshot()
+                .trailing_excerpt_update_count()
+        {
+            let max_point = wrap_snapshot.max_point();
+            let edit_start = wrap_snapshot.prev_row_boundary(max_point);
+            let edit_end = max_point.row() + 1;
+            edits = edits.compose([WrapEdit {
+                old: edit_start..edit_end,
+                new: edit_start..edit_end,
+            }]);
+        }
+
+        let edits = edits.into_inner();
         if edits.is_empty() {
             return;
         }
 
-        let buffer = wrap_snapshot.buffer_snapshot();
-        let mut transforms = self.transforms.lock();
+        let mut transforms = self.transforms.borrow_mut();
         let mut new_transforms = SumTree::new();
         let old_row_count = transforms.summary().input_rows;
         let new_row_count = wrap_snapshot.max_point().row() + 1;
@@ -170,7 +237,7 @@ impl BlockMap {
                         if transform
                             .block
                             .as_ref()
-                            .map_or(false, |b| b.disposition.is_below())
+                            .map_or(false, |b| b.disposition().is_below())
                         {
                             new_transforms.push(transform.clone(), &());
                             cursor.next(&());
@@ -195,7 +262,7 @@ impl BlockMap {
                     if transform
                         .block
                         .as_ref()
-                        .map_or(false, |b| b.disposition.is_below())
+                        .map_or(false, |b| b.disposition().is_below())
                     {
                         cursor.next(&());
                     } else {
@@ -216,7 +283,7 @@ impl BlockMap {
                             if transform
                                 .block
                                 .as_ref()
-                                .map_or(false, |b| b.disposition.is_below())
+                                .map_or(false, |b| b.disposition().is_below())
                             {
                                 cursor.next(&());
                             } else {
@@ -233,28 +300,30 @@ impl BlockMap {
             // Find the blocks within this edited region.
             let new_buffer_start =
                 wrap_snapshot.to_point(WrapPoint::new(new_start.0, 0), Bias::Left);
-            let start_anchor = buffer.anchor_before(new_buffer_start);
+            let start_bound = Bound::Included(new_buffer_start);
             let start_block_ix = match self.blocks[last_block_ix..].binary_search_by(|probe| {
                 probe
                     .position
-                    .cmp(&start_anchor, &buffer)
-                    .unwrap()
+                    .to_point(&buffer)
+                    .cmp(&new_buffer_start)
                     .then(Ordering::Greater)
             }) {
                 Ok(ix) | Err(ix) => last_block_ix + ix,
             };
 
+            let end_bound;
             let end_block_ix = if new_end.0 > wrap_snapshot.max_point().row() {
+                end_bound = Bound::Unbounded;
                 self.blocks.len()
             } else {
                 let new_buffer_end =
                     wrap_snapshot.to_point(WrapPoint::new(new_end.0, 0), Bias::Left);
-                let end_anchor = buffer.anchor_before(new_buffer_end);
+                end_bound = Bound::Excluded(new_buffer_end);
                 match self.blocks[start_block_ix..].binary_search_by(|probe| {
                     probe
                         .position
-                        .cmp(&end_anchor, &buffer)
-                        .unwrap()
+                        .to_point(&buffer)
+                        .cmp(&new_buffer_end)
                         .then(Ordering::Greater)
                 }) {
                     Ok(ix) | Err(ix) => start_block_ix + ix,
@@ -268,7 +337,6 @@ impl BlockMap {
                     .iter()
                     .map(|block| {
                         let mut position = block.position.to_point(&buffer);
-                        let column = wrap_snapshot.from_point(position, Bias::Left).column();
                         match block.disposition {
                             BlockDisposition::Above => position.column = 0,
                             BlockDisposition::Below => {
@@ -276,25 +344,57 @@ impl BlockMap {
                             }
                         }
                         let position = wrap_snapshot.from_point(position, Bias::Left);
-                        (position.row(), column, block.clone())
+                        (position.row(), TransformBlock::Custom(block.clone()))
+                    }),
+            );
+            blocks_in_edit.extend(
+                buffer
+                    .excerpt_boundaries_in_range((start_bound, end_bound))
+                    .map(|excerpt_boundary| {
+                        (
+                            wrap_snapshot
+                                .from_point(Point::new(excerpt_boundary.row, 0), Bias::Left)
+                                .row(),
+                            TransformBlock::ExcerptHeader {
+                                buffer: excerpt_boundary.buffer,
+                                range: excerpt_boundary.range,
+                                height: if excerpt_boundary.starts_new_buffer {
+                                    self.buffer_header_height
+                                } else {
+                                    self.excerpt_header_height
+                                },
+                                starts_new_buffer: excerpt_boundary.starts_new_buffer,
+                            },
+                        )
                     }),
             );
 
-            // When multiple blocks are on the same row, newer blocks appear above older
-            // blocks. This is arbitrary, but we currently rely on it in ProjectDiagnosticsEditor.
-            blocks_in_edit
-                .sort_by_key(|(row, _, block)| (*row, block.disposition, Reverse(block.id)));
+            // Place excerpt headers above custom blocks on the same row.
+            blocks_in_edit.sort_unstable_by(|(row_a, block_a), (row_b, block_b)| {
+                row_a.cmp(&row_b).then_with(|| match (block_a, block_b) {
+                    (
+                        TransformBlock::ExcerptHeader { .. },
+                        TransformBlock::ExcerptHeader { .. },
+                    ) => Ordering::Equal,
+                    (TransformBlock::ExcerptHeader { .. }, _) => Ordering::Less,
+                    (_, TransformBlock::ExcerptHeader { .. }) => Ordering::Greater,
+                    (TransformBlock::Custom(block_a), TransformBlock::Custom(block_b)) => block_a
+                        .disposition
+                        .cmp(&block_b.disposition)
+                        .then_with(|| block_a.id.cmp(&block_b.id)),
+                })
+            });
 
             // For each of these blocks, insert a new isomorphic transform preceding the block,
             // and then insert the block itself.
-            for (block_row, column, block) in blocks_in_edit.drain(..) {
-                let insertion_row = match block.disposition {
+            for (block_row, block) in blocks_in_edit.drain(..) {
+                let insertion_row = match block.disposition() {
                     BlockDisposition::Above => block_row,
                     BlockDisposition::Below => block_row + 1,
                 };
                 let extent_before_block = insertion_row - new_transforms.summary().input_rows;
                 push_isomorphic(&mut new_transforms, extent_before_block);
-                new_transforms.push(Transform::block(block, column), &());
+                new_transforms.push(Transform::block(block), &());
             }
 
             old_end = WrapRow(old_end.0.min(old_row_count));
@@ -375,8 +475,8 @@ impl<'a> BlockMapWriter<'a> {
         blocks: impl IntoIterator<Item = BlockProperties<Anchor>>,
     ) -> Vec<BlockId> {
         let mut ids = Vec::new();
-        let mut edits = Vec::<Edit<u32>>::new();
-        let wrap_snapshot = &*self.0.wrap_snapshot.lock();
+        let mut edits = Patch::default();
+        let wrap_snapshot = &*self.0.wrap_snapshot.borrow();
         let buffer = wrap_snapshot.buffer_snapshot();
 
         for block in blocks {
@@ -411,15 +511,10 @@ impl<'a> BlockMapWriter<'a> {
                 }),
             );
 
-            if let Err(edit_ix) = edits.binary_search_by_key(&start_row, |edit| edit.old.start) {
-                edits.insert(
-                    edit_ix,
-                    Edit {
-                        old: start_row..end_row,
-                        new: start_row..end_row,
-                    },
-                );
-            }
+            edits = edits.compose([Edit {
+                old: start_row..end_row,
+                new: start_row..end_row,
+            }]);
         }
 
         self.0.sync(wrap_snapshot, edits);
@@ -427,9 +522,9 @@ impl<'a> BlockMapWriter<'a> {
     }
 
     pub fn remove(&mut self, block_ids: HashSet<BlockId>) {
-        let wrap_snapshot = &*self.0.wrap_snapshot.lock();
+        let wrap_snapshot = &*self.0.wrap_snapshot.borrow();
         let buffer = wrap_snapshot.buffer_snapshot();
-        let mut edits = Vec::new();
+        let mut edits = Patch::default();
         let mut last_block_buffer_row = None;
         self.0.blocks.retain(|block| {
             if block_ids.contains(&block.id) {
@@ -524,7 +619,7 @@ impl BlockSnapshot {
     pub fn blocks_in_range<'a>(
         &'a self,
         rows: Range<u32>,
-    ) -> impl Iterator<Item = (u32, &'a AlignedBlock)> {
+    ) -> impl Iterator<Item = (u32, &'a TransformBlock)> {
         let mut cursor = self.transforms.cursor::<BlockRow>();
         cursor.seek(&BlockRow(rows.start), Bias::Right, &());
         std::iter::from_fn(move || {
@@ -644,7 +739,7 @@ impl BlockSnapshot {
         let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>();
         cursor.seek(&BlockRow(block_point.row), Bias::Right, &());
         if let Some(transform) = cursor.item() {
-            match transform.block.as_ref().map(|b| b.disposition) {
+            match transform.block.as_ref().map(|b| b.disposition()) {
                 Some(BlockDisposition::Above) => WrapPoint::new(cursor.start().1 .0, 0),
                 Some(BlockDisposition::Below) => {
                     let wrap_row = cursor.start().1 .0 - 1;
@@ -673,13 +768,13 @@ impl Transform {
         }
     }
 
-    fn block(block: Arc<Block>, column: u32) -> Self {
+    fn block(block: TransformBlock) -> Self {
         Self {
             summary: TransformSummary {
                 input_rows: 0,
-                output_rows: block.height as u32,
+                output_rows: block.height() as u32,
             },
-            block: Some(AlignedBlock { block, column }),
+            block: Some(block),
         }
     }
 
@@ -809,37 +904,21 @@ impl BlockDisposition {
     }
 }
 
-impl AlignedBlock {
-    pub fn height(&self) -> u32 {
-        self.height as u32
-    }
+impl<'a> Deref for BlockContext<'a> {
+    type Target = AppContext;
 
-    pub fn column(&self) -> u32 {
-        self.column
+    fn deref(&self) -> &Self::Target {
+        &self.cx
     }
+}
 
+impl Block {
     pub fn render(&self, cx: &BlockContext) -> ElementBox {
         self.render.lock()(cx)
     }
 
     pub fn position(&self) -> &Anchor {
-        &self.block.position
-    }
-}
-
-impl Deref for AlignedBlock {
-    type Target = Block;
-
-    fn deref(&self) -> &Self::Target {
-        self.block.as_ref()
-    }
-}
-
-impl<'a> Deref for BlockContext<'a> {
-    type Target = AppContext;
-
-    fn deref(&self) -> &Self::Target {
-        &self.cx
+        &self.position
     }
 }
 
@@ -911,9 +990,9 @@ mod tests {
         let (fold_map, folds_snapshot) = FoldMap::new(buffer_snapshot.clone());
         let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), 1);
         let (wrap_map, wraps_snapshot) = WrapMap::new(tabs_snapshot, font_id, 14.0, None, cx);
-        let mut block_map = BlockMap::new(wraps_snapshot.clone());
+        let mut block_map = BlockMap::new(wraps_snapshot.clone(), 1, 1);
 
-        let mut writer = block_map.write(wraps_snapshot.clone(), vec![]);
+        let mut writer = block_map.write(wraps_snapshot.clone(), Default::default());
         writer.insert(vec![
             BlockProperties {
                 position: buffer_snapshot.anchor_after(Point::new(1, 0)),
@@ -935,15 +1014,15 @@ mod tests {
             },
         ]);
 
-        let snapshot = block_map.read(wraps_snapshot, vec![]);
+        let snapshot = block_map.read(wraps_snapshot, Default::default());
         assert_eq!(snapshot.text(), "aaa\n\n\n\nbbb\nccc\nddd\n\n\n");
 
         let blocks = snapshot
             .blocks_in_range(0..8)
             .map(|(start_row, block)| {
+                let block = block.as_custom().unwrap();
                 (
-                    start_row..start_row + block.height(),
-                    block.column(),
+                    start_row..start_row + block.height as u32,
                     block
                         .render(&BlockContext {
                             cx,
@@ -965,9 +1044,9 @@ mod tests {
         assert_eq!(
             blocks,
             &[
-                (1..3, 2, "block 2".to_string()),
-                (3..4, 0, "block 1".to_string()),
-                (7..10, 3, "block 3".to_string()),
+                (1..2, "block 1".to_string()),
+                (2..4, "block 2".to_string()),
+                (7..10, "block 3".to_string()),
             ]
         );
 
@@ -1089,9 +1168,9 @@ mod tests {
         let (_, folds_snapshot) = FoldMap::new(buffer_snapshot.clone());
         let (_, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), 1);
         let (_, wraps_snapshot) = WrapMap::new(tabs_snapshot, font_id, 14.0, Some(60.), cx);
-        let mut block_map = BlockMap::new(wraps_snapshot.clone());
+        let mut block_map = BlockMap::new(wraps_snapshot.clone(), 1, 1);
 
-        let mut writer = block_map.write(wraps_snapshot.clone(), vec![]);
+        let mut writer = block_map.write(wraps_snapshot.clone(), Default::default());
         writer.insert(vec![
             BlockProperties {
                 position: buffer_snapshot.anchor_after(Point::new(1, 12)),
@@ -1109,7 +1188,7 @@ mod tests {
 
         // Blocks with an 'above' disposition go above their corresponding buffer line.
         // Blocks with a 'below' disposition go below their corresponding buffer line.
-        let snapshot = block_map.read(wraps_snapshot, vec![]);
+        let snapshot = block_map.read(wraps_snapshot, Default::default());
         assert_eq!(
             snapshot.text(),
             "one two \nthree\n\nfour five \nsix\n\nseven \neight"
@@ -1134,8 +1213,11 @@ mod tests {
             .select_font(family_id, &Default::default())
             .unwrap();
         let font_size = 14.0;
+        let buffer_start_header_height = rng.gen_range(1..=5);
+        let excerpt_header_height = rng.gen_range(1..=5);
 
         log::info!("Wrap width: {:?}", wrap_width);
+        log::info!("Excerpt Header Height: {:?}", excerpt_header_height);
 
         let buffer = if rng.gen() {
             let len = rng.gen_range(0..10);
@@ -1151,8 +1233,12 @@ mod tests {
         let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), tab_size);
         let (wrap_map, wraps_snapshot) =
             WrapMap::new(tabs_snapshot, font_id, font_size, wrap_width, cx);
-        let mut block_map = BlockMap::new(wraps_snapshot);
-        let mut expected_blocks = Vec::new();
+        let mut block_map = BlockMap::new(
+            wraps_snapshot.clone(),
+            buffer_start_header_height,
+            excerpt_header_height,
+        );
+        let mut custom_blocks = Vec::new();
 
         for _ in 0..operations {
             let mut buffer_edits = Vec::new();
@@ -1205,15 +1291,15 @@ mod tests {
                     let mut block_map = block_map.write(wraps_snapshot, wrap_edits);
                     let block_ids = block_map.insert(block_properties.clone());
                     for (block_id, props) in block_ids.into_iter().zip(block_properties) {
-                        expected_blocks.push((block_id, props));
+                        custom_blocks.push((block_id, props));
                     }
                 }
-                40..=59 if !expected_blocks.is_empty() => {
-                    let block_count = rng.gen_range(1..=4.min(expected_blocks.len()));
+                40..=59 if !custom_blocks.is_empty() => {
+                    let block_count = rng.gen_range(1..=4.min(custom_blocks.len()));
                     let block_ids_to_remove = (0..block_count)
                         .map(|_| {
-                            expected_blocks
-                                .remove(rng.gen_range(0..expected_blocks.len()))
+                            custom_blocks
+                                .remove(rng.gen_range(0..custom_blocks.len()))
                                 .0
                         })
                         .collect();
@@ -1229,9 +1315,9 @@ mod tests {
                 }
                 _ => {
                     buffer.update(cx, |buffer, cx| {
-                        let edit_count = rng.gen_range(1..=5);
+                        let mutation_count = rng.gen_range(1..=5);
                         let subscription = buffer.subscribe();
-                        buffer.randomly_edit(&mut rng, edit_count, cx);
+                        buffer.randomly_mutate(&mut rng, mutation_count, cx);
                         buffer_snapshot = buffer.snapshot(cx);
                         buffer_edits.extend(subscription.consume());
                         log::info!("buffer text: {:?}", buffer_snapshot.text());
@@ -1251,36 +1337,46 @@ mod tests {
             );
             log::info!("blocks text: {:?}", blocks_snapshot.text());
 
-            let mut sorted_blocks = expected_blocks
-                .iter()
-                .cloned()
-                .map(|(id, block)| {
-                    let mut position = block.position.to_point(&buffer_snapshot);
-                    let column = wraps_snapshot.from_point(position, Bias::Left).column();
-                    match block.disposition {
-                        BlockDisposition::Above => {
-                            position.column = 0;
-                        }
-                        BlockDisposition::Below => {
-                            position.column = buffer_snapshot.line_len(position.row);
-                        }
-                    };
-                    let row = wraps_snapshot.from_point(position, Bias::Left).row();
+            let mut expected_blocks = Vec::new();
+            expected_blocks.extend(custom_blocks.iter().map(|(id, block)| {
+                let mut position = block.position.to_point(&buffer_snapshot);
+                match block.disposition {
+                    BlockDisposition::Above => {
+                        position.column = 0;
+                    }
+                    BlockDisposition::Below => {
+                        position.column = buffer_snapshot.line_len(position.row);
+                    }
+                };
+                let row = wraps_snapshot.from_point(position, Bias::Left).row();
+                (
+                    row,
+                    ExpectedBlock::Custom {
+                        disposition: block.disposition,
+                        id: *id,
+                        height: block.height,
+                    },
+                )
+            }));
+            expected_blocks.extend(buffer_snapshot.excerpt_boundaries_in_range(0..).map(
+                |boundary| {
+                    let position =
+                        wraps_snapshot.from_point(Point::new(boundary.row, 0), Bias::Left);
                     (
-                        id,
-                        BlockProperties {
-                            position: BlockPoint::new(row, column),
-                            height: block.height,
-                            disposition: block.disposition,
-                            render: block.render.clone(),
+                        position.row(),
+                        ExpectedBlock::ExcerptHeader {
+                            height: if boundary.starts_new_buffer {
+                                buffer_start_header_height
+                            } else {
+                                excerpt_header_height
+                            },
+                            starts_new_buffer: boundary.starts_new_buffer,
                         },
                     )
-                })
-                .collect::<Vec<_>>();
-            sorted_blocks.sort_unstable_by_key(|(id, block)| {
-                (block.position.row, block.disposition, Reverse(*id))
-            });
-            let mut sorted_blocks_iter = sorted_blocks.iter().peekable();
+                },
+            ));
+            expected_blocks.sort_unstable();
+            let mut sorted_blocks_iter = expected_blocks.into_iter().peekable();
 
             let input_buffer_rows = buffer_snapshot.buffer_rows(0).collect::<Vec<_>>();
             let mut expected_buffer_rows = Vec::new();
@@ -1297,16 +1393,17 @@ mod tests {
                     .to_point(WrapPoint::new(row, 0), Bias::Left)
                     .row as usize];
 
-                while let Some((block_id, block)) = sorted_blocks_iter.peek() {
-                    if block.position.row == row && block.disposition == BlockDisposition::Above {
+                while let Some((block_row, block)) = sorted_blocks_iter.peek() {
+                    if *block_row == row && block.disposition() == BlockDisposition::Above {
+                        let (_, block) = sorted_blocks_iter.next().unwrap();
+                        let height = block.height() as usize;
                         expected_block_positions
-                            .push((expected_text.matches('\n').count() as u32, *block_id));
-                        let text = "\n".repeat(block.height as usize);
+                            .push((expected_text.matches('\n').count() as u32, block));
+                        let text = "\n".repeat(height);
                         expected_text.push_str(&text);
-                        for _ in 0..block.height {
+                        for _ in 0..height {
                             expected_buffer_rows.push(None);
                         }
-                        sorted_blocks_iter.next();
                     } else {
                         break;
                     }
@@ -1316,16 +1413,17 @@ mod tests {
                 expected_buffer_rows.push(if soft_wrapped { None } else { buffer_row });
                 expected_text.push_str(input_line);
 
-                while let Some((block_id, block)) = sorted_blocks_iter.peek() {
-                    if block.position.row == row && block.disposition == BlockDisposition::Below {
+                while let Some((block_row, block)) = sorted_blocks_iter.peek() {
+                    if *block_row == row && block.disposition() == BlockDisposition::Below {
+                        let (_, block) = sorted_blocks_iter.next().unwrap();
+                        let height = block.height() as usize;
                         expected_block_positions
-                            .push((expected_text.matches('\n').count() as u32 + 1, *block_id));
-                        let text = "\n".repeat(block.height as usize);
+                            .push((expected_text.matches('\n').count() as u32 + 1, block));
+                        let text = "\n".repeat(height);
                         expected_text.push_str(&text);
-                        for _ in 0..block.height {
+                        for _ in 0..height {
                             expected_buffer_rows.push(None);
                         }
-                        sorted_blocks_iter.next();
                     } else {
                         break;
                     }
@@ -1337,7 +1435,7 @@ mod tests {
             for start_row in 0..expected_row_count {
                 let expected_text = expected_lines[start_row..].join("\n");
                 let actual_text = blocks_snapshot
-                    .chunks(start_row as u32..expected_row_count as u32, false)
+                    .chunks(start_row as u32..blocks_snapshot.max_point().row + 1, false)
                     .map(|chunk| chunk.text)
                     .collect::<String>();
                 assert_eq!(
@@ -1356,7 +1454,7 @@ mod tests {
             assert_eq!(
                 blocks_snapshot
                     .blocks_in_range(0..(expected_row_count as u32))
-                    .map(|(row, block)| (row, block.id))
+                    .map(|(row, block)| (row, block.clone().into()))
                     .collect::<Vec<_>>(),
                 expected_block_positions
             );
@@ -1435,6 +1533,64 @@ mod tests {
                 }
             }
         }
+
+        #[derive(Debug, Eq, PartialEq, Ord, PartialOrd)]
+        enum ExpectedBlock {
+            ExcerptHeader {
+                height: u8,
+                starts_new_buffer: bool,
+            },
+            Custom {
+                disposition: BlockDisposition,
+                id: BlockId,
+                height: u8,
+            },
+        }
+
+        impl ExpectedBlock {
+            fn height(&self) -> u8 {
+                match self {
+                    ExpectedBlock::ExcerptHeader { height, .. } => *height,
+                    ExpectedBlock::Custom { height, .. } => *height,
+                }
+            }
+
+            fn disposition(&self) -> BlockDisposition {
+                match self {
+                    ExpectedBlock::ExcerptHeader { .. } => BlockDisposition::Above,
+                    ExpectedBlock::Custom { disposition, .. } => *disposition,
+                }
+            }
+        }
+
+        impl From<TransformBlock> for ExpectedBlock {
+            fn from(block: TransformBlock) -> Self {
+                match block {
+                    TransformBlock::Custom(block) => ExpectedBlock::Custom {
+                        id: block.id,
+                        disposition: block.disposition,
+                        height: block.height,
+                    },
+                    TransformBlock::ExcerptHeader {
+                        height,
+                        starts_new_buffer,
+                        ..
+                    } => ExpectedBlock::ExcerptHeader {
+                        height,
+                        starts_new_buffer,
+                    },
+                }
+            }
+        }
+    }
+
+    impl TransformBlock {
+        fn as_custom(&self) -> Option<&Block> {
+            match self {
+                TransformBlock::Custom(block) => Some(block),
+                TransformBlock::ExcerptHeader { .. } => None,
+            }
+        }
     }
 
     impl BlockSnapshot {

crates/editor/src/display_map/fold_map.rs šŸ”—

@@ -107,14 +107,23 @@ impl<'a> FoldMapWriter<'a> {
         let buffer = self.0.buffer.lock().clone();
         for range in ranges.into_iter() {
             let range = range.start.to_offset(&buffer)..range.end.to_offset(&buffer);
-            if range.start != range.end {
-                let fold = Fold(buffer.anchor_after(range.start)..buffer.anchor_before(range.end));
-                folds.push(fold);
-                edits.push(text::Edit {
-                    old: range.clone(),
-                    new: range,
-                });
+
+            // Ignore any empty ranges.
+            if range.start == range.end {
+                continue;
+            }
+
+            // For now, ignore any ranges that span an excerpt boundary.
+            let fold = Fold(buffer.anchor_after(range.start)..buffer.anchor_before(range.end));
+            if fold.0.start.excerpt_id() != fold.0.end.excerpt_id() {
+                continue;
             }
+
+            folds.push(fold);
+            edits.push(text::Edit {
+                old: range.clone(),
+                new: range,
+            });
         }
 
         folds.sort_unstable_by(|a, b| sum_tree::SeekTarget::cmp(a, b, &buffer));
@@ -268,6 +277,8 @@ impl FoldMap {
             let mut buffer = self.buffer.lock();
             if buffer.parse_count() != new_buffer.parse_count()
                 || buffer.diagnostics_update_count() != new_buffer.diagnostics_update_count()
+                || buffer.trailing_excerpt_update_count()
+                    != new_buffer.trailing_excerpt_update_count()
             {
                 self.version.fetch_add(1, SeqCst);
             }
@@ -1281,7 +1292,7 @@ mod tests {
                 _ => buffer.update(cx, |buffer, cx| {
                     let subscription = buffer.subscribe();
                     let edit_count = rng.gen_range(1..=5);
-                    buffer.randomly_edit(&mut rng, edit_count, cx);
+                    buffer.randomly_mutate(&mut rng, edit_count, cx);
                     buffer_snapshot = buffer.snapshot(cx);
                     let edits = subscription.consume().into_inner();
                     log::info!("editing {:?}", edits);
@@ -1407,7 +1418,6 @@ mod tests {
                 fold_row = snapshot
                     .clip_point(FoldPoint::new(fold_row, 0), Bias::Right)
                     .row();
-                eprintln!("fold_row: {} of {}", fold_row, expected_buffer_rows.len());
                 assert_eq!(
                     snapshot.buffer_rows(fold_row).collect::<Vec<_>>(),
                     expected_buffer_rows[(fold_row as usize)..],

crates/editor/src/display_map/wrap_map.rs šŸ”—

@@ -106,7 +106,7 @@ impl WrapMap {
         tab_snapshot: TabSnapshot,
         edits: Vec<TabEdit>,
         cx: &mut ModelContext<Self>,
-    ) -> (WrapSnapshot, Vec<WrapEdit>) {
+    ) -> (WrapSnapshot, Patch<u32>) {
         if self.wrap_width.is_some() {
             self.pending_edits.push_back((tab_snapshot, edits));
             self.flush_edits(cx);
@@ -117,10 +117,7 @@ impl WrapMap {
             self.snapshot.interpolated = false;
         }
 
-        (
-            self.snapshot.clone(),
-            mem::take(&mut self.edits_since_sync).into_inner(),
-        )
+        (self.snapshot.clone(), mem::take(&mut self.edits_since_sync))
     }
 
     pub fn set_font(&mut self, font_id: FontId, font_size: f32, cx: &mut ModelContext<Self>) {
@@ -588,10 +585,6 @@ impl WrapSnapshot {
         }
     }
 
-    pub fn text_summary(&self) -> TextSummary {
-        self.transforms.summary().output
-    }
-
     pub fn max_point(&self) -> WrapPoint {
         WrapPoint(self.transforms.summary().output.lines)
     }
@@ -955,10 +948,6 @@ impl WrapPoint {
         &mut self.0.row
     }
 
-    pub fn column(&self) -> u32 {
-        self.0.column
-    }
-
     pub fn column_mut(&mut self) -> &mut u32 {
         &mut self.0.column
     }
@@ -1118,7 +1107,7 @@ mod tests {
                     buffer.update(&mut cx, |buffer, cx| {
                         let subscription = buffer.subscribe();
                         let edit_count = rng.gen_range(1..=5);
-                        buffer.randomly_edit(&mut rng, edit_count, cx);
+                        buffer.randomly_mutate(&mut rng, edit_count, cx);
                         buffer_snapshot = buffer.snapshot(cx);
                         buffer_edits.extend(subscription.consume());
                     });

crates/editor/src/editor.rs šŸ”—

@@ -10,7 +10,7 @@ mod test;
 use aho_corasick::AhoCorasick;
 use anyhow::Result;
 use clock::ReplicaId;
-use collections::{BTreeMap, HashMap, HashSet};
+use collections::{BTreeMap, Bound, HashMap, HashSet};
 pub use display_map::DisplayPoint;
 use display_map::*;
 pub use element::*;
@@ -27,19 +27,20 @@ use gpui::{
     text_layout, AppContext, ClipboardItem, Element, ElementBox, Entity, ModelHandle,
     MutableAppContext, RenderContext, Task, View, ViewContext, WeakModelHandle, WeakViewHandle,
 };
-use items::BufferItemHandle;
+use items::{BufferItemHandle, MultiBufferItemHandle};
 use itertools::Itertools as _;
 use language::{
-    AnchorRangeExt as _, BracketPair, Buffer, Completion, CompletionLabel, Diagnostic,
+    AnchorRangeExt as _, BracketPair, Buffer, CodeAction, Completion, CompletionLabel, Diagnostic,
     DiagnosticSeverity, Language, Point, Selection, SelectionGoal, TransactionId,
 };
 use multi_buffer::MultiBufferChunks;
 pub use multi_buffer::{
-    char_kind, Anchor, AnchorRangeExt, CharKind, ExcerptId, ExcerptProperties, MultiBuffer,
-    MultiBufferSnapshot, ToOffset, ToPoint,
+    char_kind, Anchor, AnchorRangeExt, CharKind, ExcerptId, MultiBuffer, MultiBufferSnapshot,
+    ToOffset, ToPoint,
 };
 use ordered_float::OrderedFloat;
 use postage::watch;
+use project::Project;
 use serde::{Deserialize, Serialize};
 use smallvec::SmallVec;
 use smol::Timer;
@@ -124,7 +125,9 @@ action!(FoldSelectedRanges);
 action!(Scroll, Vector2F);
 action!(Select, SelectPhase);
 action!(ShowCompletions);
+action!(ToggleCodeActions, bool);
 action!(ConfirmCompletion, Option<usize>);
+action!(ConfirmCodeAction, Option<usize>);
 
 pub fn init(cx: &mut MutableAppContext, path_openers: &mut Vec<Box<dyn PathOpener>>) {
     path_openers.push(Box::new(items::BufferOpener));
@@ -143,10 +146,19 @@ pub fn init(cx: &mut MutableAppContext, path_openers: &mut Vec<Box<dyn PathOpene
         Binding::new(
             "enter",
             ConfirmCompletion(None),
-            Some("Editor && completing"),
+            Some("Editor && showing_completions"),
+        ),
+        Binding::new(
+            "enter",
+            ConfirmCodeAction(None),
+            Some("Editor && showing_code_actions"),
         ),
         Binding::new("tab", Tab, Some("Editor")),
-        Binding::new("tab", ConfirmCompletion(None), Some("Editor && completing")),
+        Binding::new(
+            "tab",
+            ConfirmCompletion(None),
+            Some("Editor && showing_completions"),
+        ),
         Binding::new("shift-tab", Outdent, Some("Editor")),
         Binding::new("ctrl-shift-K", DeleteLine, Some("Editor")),
         Binding::new(
@@ -239,6 +251,7 @@ pub fn init(cx: &mut MutableAppContext, path_openers: &mut Vec<Box<dyn PathOpene
         Binding::new("alt-cmd-]", Unfold, Some("Editor")),
         Binding::new("alt-cmd-f", FoldSelectedRanges, Some("Editor")),
         Binding::new("ctrl-space", ShowCompletions, Some("Editor")),
+        Binding::new("cmd-.", ToggleCodeActions(false), Some("Editor")),
     ]);
 
     cx.add_action(Editor::open_new);
@@ -303,13 +316,9 @@ pub fn init(cx: &mut MutableAppContext, path_openers: &mut Vec<Box<dyn PathOpene
     cx.add_action(Editor::unfold);
     cx.add_action(Editor::fold_selected_ranges);
     cx.add_action(Editor::show_completions);
-    cx.add_action(
-        |editor: &mut Editor, &ConfirmCompletion(ix): &ConfirmCompletion, cx| {
-            if let Some(task) = editor.confirm_completion(ix, cx) {
-                task.detach_and_log_err(cx);
-            }
-        },
-    );
+    cx.add_action(Editor::toggle_code_actions);
+    cx.add_async_action(Editor::confirm_completion);
+    cx.add_async_action(Editor::confirm_code_action);
 }
 
 trait SelectionExt {
@@ -348,7 +357,7 @@ pub enum SelectPhase {
 }
 
 #[derive(Clone, Debug)]
-enum SelectMode {
+pub enum SelectMode {
     Character,
     Word(Range<Anchor>),
     Line(Range<Anchor>),
@@ -407,6 +416,7 @@ pub struct Editor {
     scroll_top_anchor: Option<Anchor>,
     autoscroll_request: Option<Autoscroll>,
     build_settings: BuildSettings,
+    project: Option<ModelHandle<Project>>,
     focused: bool,
     show_local_cursors: bool,
     blink_epoch: usize,
@@ -417,9 +427,11 @@ pub struct Editor {
     highlighted_rows: Option<Range<u32>>,
     highlighted_ranges: BTreeMap<TypeId, (Color, Vec<Range<Anchor>>)>,
     nav_history: Option<ItemNavHistory>,
-    completion_state: Option<CompletionState>,
+    context_menu: Option<ContextMenu>,
     completion_tasks: Vec<(CompletionId, Task<Option<()>>)>,
     next_completion_id: CompletionId,
+    available_code_actions: Option<(ModelHandle<Buffer>, Arc<[CodeAction]>)>,
+    code_actions_task: Option<Task<()>>,
 }
 
 pub struct EditorSnapshot {
@@ -431,7 +443,8 @@ pub struct EditorSnapshot {
     scroll_top_anchor: Option<Anchor>,
 }
 
-struct PendingSelection {
+#[derive(Clone)]
+pub struct PendingSelection {
     selection: Selection<Anchor>,
     mode: SelectMode,
 }
@@ -459,17 +472,157 @@ struct SnippetState {
 
 struct InvalidationStack<T>(Vec<T>);
 
-struct CompletionState {
+enum ContextMenu {
+    Completions(CompletionsMenu),
+    CodeActions(CodeActionsMenu),
+}
+
+impl ContextMenu {
+    fn select_prev(&mut self, cx: &mut ViewContext<Editor>) -> bool {
+        if self.visible() {
+            match self {
+                ContextMenu::Completions(menu) => menu.select_prev(cx),
+                ContextMenu::CodeActions(menu) => menu.select_prev(cx),
+            }
+            true
+        } else {
+            false
+        }
+    }
+
+    fn select_next(&mut self, cx: &mut ViewContext<Editor>) -> bool {
+        if self.visible() {
+            match self {
+                ContextMenu::Completions(menu) => menu.select_next(cx),
+                ContextMenu::CodeActions(menu) => menu.select_next(cx),
+            }
+            true
+        } else {
+            false
+        }
+    }
+
+    fn visible(&self) -> bool {
+        match self {
+            ContextMenu::Completions(menu) => menu.visible(),
+            ContextMenu::CodeActions(menu) => menu.visible(),
+        }
+    }
+
+    fn render(
+        &self,
+        cursor_position: DisplayPoint,
+        build_settings: BuildSettings,
+        cx: &AppContext,
+    ) -> (DisplayPoint, ElementBox) {
+        match self {
+            ContextMenu::Completions(menu) => (cursor_position, menu.render(build_settings, cx)),
+            ContextMenu::CodeActions(menu) => menu.render(cursor_position, build_settings, cx),
+        }
+    }
+}
+
+struct CompletionsMenu {
     id: CompletionId,
     initial_position: Anchor,
-    completions: Arc<[Completion<Anchor>]>,
+    buffer: ModelHandle<Buffer>,
+    completions: Arc<[Completion]>,
     match_candidates: Vec<StringMatchCandidate>,
     matches: Arc<[StringMatch]>,
     selected_item: usize,
     list: UniformListState,
 }
 
-impl CompletionState {
+impl CompletionsMenu {
+    fn select_prev(&mut self, cx: &mut ViewContext<Editor>) {
+        if self.selected_item > 0 {
+            self.selected_item -= 1;
+            self.list.scroll_to(ScrollTarget::Show(self.selected_item));
+        }
+        cx.notify();
+    }
+
+    fn select_next(&mut self, cx: &mut ViewContext<Editor>) {
+        if self.selected_item + 1 < self.matches.len() {
+            self.selected_item += 1;
+            self.list.scroll_to(ScrollTarget::Show(self.selected_item));
+        }
+        cx.notify();
+    }
+
+    fn visible(&self) -> bool {
+        !self.matches.is_empty()
+    }
+
+    fn render(&self, build_settings: BuildSettings, cx: &AppContext) -> ElementBox {
+        enum CompletionTag {}
+
+        let settings = build_settings(cx);
+        let completions = self.completions.clone();
+        let matches = self.matches.clone();
+        let selected_item = self.selected_item;
+        UniformList::new(self.list.clone(), matches.len(), move |range, items, cx| {
+            let settings = build_settings(cx);
+            let start_ix = range.start;
+            for (ix, mat) in matches[range].iter().enumerate() {
+                let completion = &completions[mat.candidate_id];
+                let item_ix = start_ix + ix;
+                items.push(
+                    MouseEventHandler::new::<CompletionTag, _, _, _>(
+                        mat.candidate_id,
+                        cx,
+                        |state, _| {
+                            let item_style = if item_ix == selected_item {
+                                settings.style.autocomplete.selected_item
+                            } else if state.hovered {
+                                settings.style.autocomplete.hovered_item
+                            } else {
+                                settings.style.autocomplete.item
+                            };
+
+                            Text::new(completion.label.text.clone(), settings.style.text.clone())
+                                .with_soft_wrap(false)
+                                .with_highlights(combine_syntax_and_fuzzy_match_highlights(
+                                    &completion.label.text,
+                                    settings.style.text.color.into(),
+                                    styled_runs_for_completion_label(
+                                        &completion.label,
+                                        settings.style.text.color,
+                                        &settings.style.syntax,
+                                    ),
+                                    &mat.positions,
+                                ))
+                                .contained()
+                                .with_style(item_style)
+                                .boxed()
+                        },
+                    )
+                    .with_cursor_style(CursorStyle::PointingHand)
+                    .on_mouse_down(move |cx| {
+                        cx.dispatch_action(ConfirmCompletion(Some(item_ix)));
+                    })
+                    .boxed(),
+                );
+            }
+        })
+        .with_width_from_item(
+            self.matches
+                .iter()
+                .enumerate()
+                .max_by_key(|(_, mat)| {
+                    self.completions[mat.candidate_id]
+                        .label
+                        .text
+                        .chars()
+                        .count()
+                })
+                .map(|(ix, _)| ix),
+        )
+        .contained()
+        .with_style(settings.style.autocomplete.container)
+        .boxed()
+    }
+
     pub async fn filter(&mut self, query: Option<&str>, executor: Arc<executor::Background>) {
         let mut matches = if let Some(query) = query {
             fuzzy::match_strings(
@@ -511,6 +664,94 @@ impl CompletionState {
     }
 }
 
+#[derive(Clone)]
+struct CodeActionsMenu {
+    actions: Arc<[CodeAction]>,
+    buffer: ModelHandle<Buffer>,
+    selected_item: usize,
+    list: UniformListState,
+    deployed_from_indicator: bool,
+}
+
+impl CodeActionsMenu {
+    fn select_prev(&mut self, cx: &mut ViewContext<Editor>) {
+        if self.selected_item > 0 {
+            self.selected_item -= 1;
+            cx.notify()
+        }
+    }
+
+    fn select_next(&mut self, cx: &mut ViewContext<Editor>) {
+        if self.selected_item + 1 < self.actions.len() {
+            self.selected_item += 1;
+            cx.notify()
+        }
+    }
+
+    fn visible(&self) -> bool {
+        !self.actions.is_empty()
+    }
+
+    fn render(
+        &self,
+        mut cursor_position: DisplayPoint,
+        build_settings: BuildSettings,
+        cx: &AppContext,
+    ) -> (DisplayPoint, ElementBox) {
+        enum ActionTag {}
+
+        let settings = build_settings(cx);
+        let actions = self.actions.clone();
+        let selected_item = self.selected_item;
+        let element =
+            UniformList::new(self.list.clone(), actions.len(), move |range, items, cx| {
+                let settings = build_settings(cx);
+                let start_ix = range.start;
+                for (ix, action) in actions[range].iter().enumerate() {
+                    let item_ix = start_ix + ix;
+                    items.push(
+                        MouseEventHandler::new::<ActionTag, _, _, _>(item_ix, cx, |state, _| {
+                            let item_style = if item_ix == selected_item {
+                                settings.style.autocomplete.selected_item
+                            } else if state.hovered {
+                                settings.style.autocomplete.hovered_item
+                            } else {
+                                settings.style.autocomplete.item
+                            };
+
+                            Text::new(action.lsp_action.title.clone(), settings.style.text.clone())
+                                .with_soft_wrap(false)
+                                .contained()
+                                .with_style(item_style)
+                                .boxed()
+                        })
+                        .with_cursor_style(CursorStyle::PointingHand)
+                        .on_mouse_down(move |cx| {
+                            cx.dispatch_action(ConfirmCodeAction(Some(item_ix)));
+                        })
+                        .boxed(),
+                    );
+                }
+            })
+            .with_width_from_item(
+                self.actions
+                    .iter()
+                    .enumerate()
+                    .max_by_key(|(_, action)| action.lsp_action.title.chars().count())
+                    .map(|(ix, _)| ix),
+            )
+            .contained()
+            .with_style(settings.style.autocomplete.container)
+            .boxed();
+
+        if self.deployed_from_indicator {
+            *cursor_position.column_mut() = 0;
+        }
+
+        (cursor_position, element)
+    }
+}
+
 #[derive(Debug)]
 struct ActiveDiagnosticGroup {
     primary_range: Range<Anchor>,
@@ -534,7 +775,7 @@ impl Editor {
     pub fn single_line(build_settings: BuildSettings, cx: &mut ViewContext<Self>) -> Self {
         let buffer = cx.add_model(|cx| Buffer::new(0, String::new(), cx));
         let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
-        let mut view = Self::for_buffer(buffer, build_settings, cx);
+        let mut view = Self::for_buffer(buffer, build_settings, None, cx);
         view.mode = EditorMode::SingleLine;
         view
     }
@@ -546,7 +787,7 @@ impl Editor {
     ) -> Self {
         let buffer = cx.add_model(|cx| Buffer::new(0, String::new(), cx));
         let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
-        let mut view = Self::for_buffer(buffer, build_settings, cx);
+        let mut view = Self::for_buffer(buffer, build_settings, None, cx);
         view.mode = EditorMode::AutoHeight { max_lines };
         view
     }
@@ -554,13 +795,19 @@ impl Editor {
     pub fn for_buffer(
         buffer: ModelHandle<MultiBuffer>,
         build_settings: BuildSettings,
+        project: Option<ModelHandle<Project>>,
         cx: &mut ViewContext<Self>,
     ) -> Self {
-        Self::new(buffer, build_settings, cx)
+        Self::new(buffer, build_settings, project, cx)
     }
 
     pub fn clone(&self, cx: &mut ViewContext<Self>) -> Self {
-        let mut clone = Self::new(self.buffer.clone(), self.build_settings.clone(), cx);
+        let mut clone = Self::new(
+            self.buffer.clone(),
+            self.build_settings.clone(),
+            self.project.clone(),
+            cx,
+        );
         clone.scroll_position = self.scroll_position;
         clone.scroll_top_anchor = self.scroll_top_anchor.clone();
         clone.nav_history = self
@@ -573,6 +820,7 @@ impl Editor {
     pub fn new(
         buffer: ModelHandle<MultiBuffer>,
         build_settings: BuildSettings,
+        project: Option<ModelHandle<Project>>,
         cx: &mut ViewContext<Self>,
     ) -> Self {
         let settings = build_settings(cx);
@@ -583,6 +831,8 @@ impl Editor {
                 settings.style.text.font_id,
                 settings.style.text.font_size,
                 None,
+                2,
+                1,
                 cx,
             )
         });
@@ -596,9 +846,18 @@ impl Editor {
             buffer,
             display_map,
             selections: Arc::from([]),
-            pending_selection: None,
+            pending_selection: Some(PendingSelection {
+                selection: Selection {
+                    id: 0,
+                    start: Anchor::min(),
+                    end: Anchor::min(),
+                    reversed: false,
+                    goal: SelectionGoal::None,
+                },
+                mode: SelectMode::Character,
+            }),
             columnar_selection_tail: None,
-            next_selection_id: 0,
+            next_selection_id: 1,
             add_selections_state: None,
             select_next_state: None,
             selection_history: Default::default(),
@@ -607,6 +866,7 @@ impl Editor {
             select_larger_syntax_node_stack: Vec::new(),
             active_diagnostics: None,
             build_settings,
+            project,
             scroll_position: Vector2F::zero(),
             scroll_top_anchor: None,
             autoscroll_request: None,
@@ -620,18 +880,13 @@ impl Editor {
             highlighted_rows: None,
             highlighted_ranges: Default::default(),
             nav_history: None,
-            completion_state: None,
+            context_menu: None,
             completion_tasks: Default::default(),
             next_completion_id: 0,
+            available_code_actions: Default::default(),
+            code_actions_task: Default::default(),
         };
-        let selection = Selection {
-            id: post_inc(&mut this.next_selection_id),
-            start: 0,
-            end: 0,
-            reversed: false,
-            goal: SelectionGoal::None,
-        };
-        this.update_selections(vec![selection], None, cx);
+        this.end_selection(cx);
         this
     }
 
@@ -654,16 +909,7 @@ impl Editor {
     }
 
     pub fn title(&self, cx: &AppContext) -> String {
-        let filename = self
-            .buffer()
-            .read(cx)
-            .file(cx)
-            .map(|file| file.file_name(cx));
-        if let Some(name) = filename {
-            name.to_string_lossy().into()
-        } else {
-            "untitled".into()
-        }
+        self.buffer().read(cx).title(cx)
     }
 
     pub fn snapshot(&mut self, cx: &mut MutableAppContext) -> EditorSnapshot {
@@ -880,7 +1126,7 @@ impl Editor {
     }
 
     fn select(&mut self, Select(phase): &Select, cx: &mut ViewContext<Self>) {
-        self.hide_completions(cx);
+        self.hide_context_menu(cx);
 
         match phase {
             SelectPhase::Begin {
@@ -919,7 +1165,7 @@ impl Editor {
 
         let position = position.to_offset(&display_map, Bias::Left);
         let tail_anchor = display_map.buffer_snapshot.anchor_before(tail);
-        let pending = self.pending_selection.as_mut().unwrap();
+        let mut pending = self.pending_selection.clone().unwrap();
 
         if position >= tail {
             pending.selection.start = tail_anchor.clone();
@@ -934,6 +1180,8 @@ impl Editor {
             }
             _ => {}
         }
+
+        self.set_selections(self.selections.clone(), Some(pending), cx);
     }
 
     fn begin_selection(
@@ -950,7 +1198,7 @@ impl Editor {
 
         let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
         let buffer = &display_map.buffer_snapshot;
-        let newest_selection = self.newest_anchor_selection().unwrap().clone();
+        let newest_selection = self.newest_anchor_selection().clone();
 
         let start;
         let end;
@@ -997,16 +1245,23 @@ impl Editor {
             goal: SelectionGoal::None,
         };
 
-        if !add {
-            self.update_selections::<usize>(Vec::new(), None, cx);
-        } else if click_count > 1 {
-            // Remove the newest selection since it was only added as part of this multi-click.
-            let mut selections = self.local_selections(cx);
-            selections.retain(|selection| selection.id != newest_selection.id);
-            self.update_selections::<usize>(selections, None, cx)
+        let mut selections;
+        if add {
+            selections = self.selections.clone();
+            // Remove the newest selection if it was added due to a previous mouse up
+            // within this multi-click.
+            if click_count > 1 {
+                selections = self
+                    .selections
+                    .iter()
+                    .filter(|selection| selection.id != newest_selection.id)
+                    .cloned()
+                    .collect();
+            }
+        } else {
+            selections = Arc::from([]);
         }
-
-        self.pending_selection = Some(PendingSelection { selection, mode });
+        self.set_selections(selections, Some(PendingSelection { selection, mode }), cx);
 
         cx.notify();
     }
@@ -1049,14 +1304,14 @@ impl Editor {
         if let Some(tail) = self.columnar_selection_tail.as_ref() {
             let tail = tail.to_display_point(&display_map);
             self.select_columns(tail, position, overshoot, &display_map, cx);
-        } else if let Some(PendingSelection { selection, mode }) = self.pending_selection.as_mut() {
+        } else if let Some(mut pending) = self.pending_selection.clone() {
             let buffer = self.buffer.read(cx).snapshot(cx);
             let head;
             let tail;
-            match mode {
+            match &pending.mode {
                 SelectMode::Character => {
                     head = position.to_point(&display_map);
-                    tail = selection.tail().to_point(&buffer);
+                    tail = pending.selection.tail().to_point(&buffer);
                 }
                 SelectMode::Word(original_range) => {
                     let original_display_range = original_range.start.to_display_point(&display_map)
@@ -1112,14 +1367,15 @@ impl Editor {
             };
 
             if head < tail {
-                selection.start = buffer.anchor_before(head);
-                selection.end = buffer.anchor_before(tail);
-                selection.reversed = true;
+                pending.selection.start = buffer.anchor_before(head);
+                pending.selection.end = buffer.anchor_before(tail);
+                pending.selection.reversed = true;
             } else {
-                selection.start = buffer.anchor_before(tail);
-                selection.end = buffer.anchor_before(head);
-                selection.reversed = false;
+                pending.selection.start = buffer.anchor_before(tail);
+                pending.selection.end = buffer.anchor_before(head);
+                pending.selection.reversed = false;
             }
+            self.set_selections(self.selections.clone(), Some(pending), cx);
         } else {
             log::error!("update_selection dispatched with no pending selection");
             return;
@@ -1182,7 +1438,7 @@ impl Editor {
     }
 
     pub fn cancel(&mut self, _: &Cancel, cx: &mut ViewContext<Self>) {
-        if self.hide_completions(cx).is_some() {
+        if self.hide_context_menu(cx).is_some() {
             return;
         }
 
@@ -1197,18 +1453,13 @@ impl Editor {
 
         if self.active_diagnostics.is_some() {
             self.dismiss_diagnostics(cx);
-        } else if let Some(PendingSelection { selection, .. }) = self.pending_selection.take() {
-            let buffer = self.buffer.read(cx).snapshot(cx);
-            let selection = Selection {
-                id: selection.id,
-                start: selection.start.to_point(&buffer),
-                end: selection.end.to_point(&buffer),
-                reversed: selection.reversed,
-                goal: selection.goal,
-            };
-            if self.local_selections::<Point>(cx).is_empty() {
-                self.update_selections(vec![selection], Some(Autoscroll::Fit), cx);
+        } else if let Some(pending) = self.pending_selection.clone() {
+            let mut selections = self.selections.clone();
+            if selections.is_empty() {
+                selections = Arc::from([pending.selection]);
             }
+            self.set_selections(selections, None, cx);
+            self.request_autoscroll(Autoscroll::Fit, cx);
         } else {
             let buffer = self.buffer.read(cx).snapshot(cx);
             let mut oldest_selection = self.oldest_selection::<usize>(&buffer);
@@ -1502,16 +1753,15 @@ impl Editor {
     }
 
     fn trigger_completion_on_input(&mut self, text: &str, cx: &mut ViewContext<Self>) {
-        if let Some(selection) = self.newest_anchor_selection() {
-            if self
-                .buffer
-                .read(cx)
-                .is_completion_trigger(selection.head(), text, cx)
-            {
-                self.show_completions(&ShowCompletions, cx);
-            } else {
-                self.hide_completions(cx);
-            }
+        let selection = self.newest_anchor_selection();
+        if self
+            .buffer
+            .read(cx)
+            .is_completion_trigger(selection.head(), text, cx)
+        {
+            self.show_completions(&ShowCompletions, cx);
+        } else {
+            self.hide_context_menu(cx);
         }
     }
 
@@ -1656,23 +1906,37 @@ impl Editor {
     }
 
     fn show_completions(&mut self, _: &ShowCompletions, cx: &mut ViewContext<Self>) {
-        let position = if let Some(selection) = self.newest_anchor_selection() {
-            selection.head()
+        let project = if let Some(project) = self.project.clone() {
+            project
         } else {
             return;
         };
 
-        let query = Self::completion_query(&self.buffer.read(cx).read(cx), position.clone());
-        let completions = self
+        let position = self.newest_anchor_selection().head();
+        let (buffer, buffer_position) = if let Some(output) = self
             .buffer
-            .update(cx, |buffer, cx| buffer.completions(position.clone(), cx));
+            .read(cx)
+            .text_anchor_for_position(position.clone(), cx)
+        {
+            output
+        } else {
+            return;
+        };
+
+        let query = Self::completion_query(&self.buffer.read(cx).read(cx), position.clone());
+        let completions = project.update(cx, |project, cx| {
+            project.completions(&buffer, buffer_position.clone(), cx)
+        });
 
         let id = post_inc(&mut self.next_completion_id);
         let task = cx.spawn_weak(|this, mut cx| {
             async move {
                 let completions = completions.await?;
+                if completions.is_empty() {
+                    return Ok(());
+                }
 
-                let mut completion_state = CompletionState {
+                let mut menu = CompletionsMenu {
                     id,
                     initial_position: position,
                     match_candidates: completions
@@ -1685,30 +1949,30 @@ impl Editor {
                             )
                         })
                         .collect(),
+                    buffer,
                     completions: completions.into(),
                     matches: Vec::new().into(),
                     selected_item: 0,
                     list: Default::default(),
                 };
 
-                completion_state
-                    .filter(query.as_deref(), cx.background())
-                    .await;
+                menu.filter(query.as_deref(), cx.background()).await;
 
-                if let Some(this) = cx.read(|cx| this.upgrade(cx)) {
+                if let Some(this) = this.upgrade(&cx) {
                     this.update(&mut cx, |this, cx| {
-                        if let Some(prev_completion_state) = this.completion_state.as_ref() {
-                            if prev_completion_state.id > completion_state.id {
-                                return;
+                        match this.context_menu.as_ref() {
+                            None => {}
+                            Some(ContextMenu::Completions(prev_menu)) => {
+                                if prev_menu.id > menu.id {
+                                    return;
+                                }
                             }
+                            _ => return,
                         }
 
-                        this.completion_tasks
-                            .retain(|(id, _)| *id > completion_state.id);
-                        if completion_state.matches.is_empty() {
-                            this.hide_completions(cx);
-                        } else if this.focused {
-                            this.completion_state = Some(completion_state);
+                        this.completion_tasks.retain(|(id, _)| *id > menu.id);
+                        if this.focused {
+                            this.show_context_menu(ContextMenu::Completions(menu), cx);
                         }
 
                         cx.notify();
@@ -1721,22 +1985,24 @@ impl Editor {
         self.completion_tasks.push((id, task));
     }
 
-    fn hide_completions(&mut self, cx: &mut ViewContext<Self>) -> Option<CompletionState> {
-        cx.notify();
-        self.completion_tasks.clear();
-        self.completion_state.take()
-    }
-
     pub fn confirm_completion(
         &mut self,
-        completion_ix: Option<usize>,
+        ConfirmCompletion(completion_ix): &ConfirmCompletion,
         cx: &mut ViewContext<Self>,
     ) -> Option<Task<Result<()>>> {
-        let completion_state = self.hide_completions(cx)?;
-        let mat = completion_state
+        use language::ToOffset as _;
+
+        let completions_menu = if let ContextMenu::Completions(menu) = self.hide_context_menu(cx)? {
+            menu
+        } else {
+            return None;
+        };
+
+        let mat = completions_menu
             .matches
-            .get(completion_ix.unwrap_or(completion_state.selected_item))?;
-        let completion = completion_state.completions.get(mat.candidate_id)?;
+            .get(completion_ix.unwrap_or(completions_menu.selected_item))?;
+        let buffer_handle = completions_menu.buffer;
+        let completion = completions_menu.completions.get(mat.candidate_id)?;
 
         let snippet;
         let text;
@@ -1747,22 +2013,31 @@ impl Editor {
             snippet = None;
             text = completion.new_text.clone();
         };
-        let snapshot = self.buffer.read(cx).snapshot(cx);
-        let old_range = completion.old_range.to_offset(&snapshot);
-        let old_text = snapshot
-            .text_for_range(old_range.clone())
-            .collect::<String>();
+        let buffer = buffer_handle.read(cx);
+        let old_range = completion.old_range.to_offset(&buffer);
+        let old_text = buffer.text_for_range(old_range.clone()).collect::<String>();
 
         let selections = self.local_selections::<usize>(cx);
-        let newest_selection = selections.iter().max_by_key(|s| s.id)?;
-        let lookbehind = newest_selection.start.saturating_sub(old_range.start);
-        let lookahead = old_range.end.saturating_sub(newest_selection.end);
+        let newest_selection = self.newest_anchor_selection();
+        if newest_selection.start.buffer_id != Some(buffer_handle.id()) {
+            return None;
+        }
+
+        let lookbehind = newest_selection
+            .start
+            .text_anchor
+            .to_offset(buffer)
+            .saturating_sub(old_range.start);
+        let lookahead = old_range
+            .end
+            .saturating_sub(newest_selection.end.text_anchor.to_offset(buffer));
         let mut common_prefix_len = old_text
             .bytes()
             .zip(text.bytes())
             .take_while(|(a, b)| a == b)
             .count();
 
+        let snapshot = self.buffer.read(cx).snapshot(cx);
         let mut ranges = Vec::new();
         for selection in &selections {
             if snapshot.contains_str_at(selection.start.saturating_sub(lookbehind), &old_text) {
@@ -1800,95 +2075,240 @@ impl Editor {
         }
         self.end_transaction(cx);
 
-        Some(self.buffer.update(cx, |buffer, cx| {
-            buffer.apply_additional_edits_for_completion(completion.clone(), cx)
+        let project = self.project.clone()?;
+        let apply_edits = project.update(cx, |project, cx| {
+            project.apply_additional_edits_for_completion(
+                buffer_handle,
+                completion.clone(),
+                true,
+                cx,
+            )
+        });
+        Some(cx.foreground().spawn(async move {
+            apply_edits.await?;
+            Ok(())
         }))
     }
 
-    pub fn has_completions(&self) -> bool {
-        self.completion_state
-            .as_ref()
-            .map_or(false, |c| !c.matches.is_empty())
-    }
+    pub fn toggle_code_actions(
+        &mut self,
+        &ToggleCodeActions(deployed_from_indicator): &ToggleCodeActions,
+        cx: &mut ViewContext<Self>,
+    ) {
+        if matches!(
+            self.context_menu.as_ref(),
+            Some(ContextMenu::CodeActions(_))
+        ) {
+            self.context_menu.take();
+            cx.notify();
+            return;
+        }
 
-    pub fn render_completions(&self, cx: &AppContext) -> Option<ElementBox> {
-        enum CompletionTag {}
+        let mut task = self.code_actions_task.take();
+        cx.spawn_weak(|this, mut cx| async move {
+            while let Some(prev_task) = task {
+                prev_task.await;
+                task = this
+                    .upgrade(&cx)
+                    .and_then(|this| this.update(&mut cx, |this, _| this.code_actions_task.take()));
+            }
 
-        self.completion_state.as_ref().map(|state| {
-            let build_settings = self.build_settings.clone();
-            let settings = build_settings(cx);
-            let completions = state.completions.clone();
-            let matches = state.matches.clone();
-            let selected_item = state.selected_item;
-            UniformList::new(
-                state.list.clone(),
-                matches.len(),
-                move |range, items, cx| {
-                    let settings = build_settings(cx);
-                    let start_ix = range.start;
-                    for (ix, mat) in matches[range].iter().enumerate() {
-                        let completion = &completions[mat.candidate_id];
-                        let item_ix = start_ix + ix;
-                        items.push(
-                            MouseEventHandler::new::<CompletionTag, _, _, _>(
-                                mat.candidate_id,
+            if let Some(this) = this.upgrade(&cx) {
+                this.update(&mut cx, |this, cx| {
+                    if this.focused {
+                        if let Some((buffer, actions)) = this.available_code_actions.clone() {
+                            this.show_context_menu(
+                                ContextMenu::CodeActions(CodeActionsMenu {
+                                    buffer,
+                                    actions,
+                                    selected_item: Default::default(),
+                                    list: Default::default(),
+                                    deployed_from_indicator,
+                                }),
                                 cx,
-                                |state, _| {
-                                    let item_style = if item_ix == selected_item {
-                                        settings.style.autocomplete.selected_item
-                                    } else if state.hovered {
-                                        settings.style.autocomplete.hovered_item
-                                    } else {
-                                        settings.style.autocomplete.item
-                                    };
-
-                                    Text::new(
-                                        completion.label.text.clone(),
-                                        settings.style.text.clone(),
-                                    )
-                                    .with_soft_wrap(false)
-                                    .with_highlights(combine_syntax_and_fuzzy_match_highlights(
-                                        &completion.label.text,
-                                        settings.style.text.color.into(),
-                                        styled_runs_for_completion_label(
-                                            &completion.label,
-                                            settings.style.text.color,
-                                            &settings.style.syntax,
-                                        ),
-                                        &mat.positions,
-                                    ))
-                                    .contained()
-                                    .with_style(item_style)
-                                    .boxed()
-                                },
-                            )
-                            .with_cursor_style(CursorStyle::PointingHand)
-                            .on_mouse_down(move |cx| {
-                                cx.dispatch_action(ConfirmCompletion(Some(item_ix)));
-                            })
-                            .boxed(),
-                        );
+                            );
+                        }
                     }
-                },
-            )
-            .with_width_from_item(
-                state
-                    .matches
-                    .iter()
-                    .enumerate()
-                    .max_by_key(|(_, mat)| {
-                        state.completions[mat.candidate_id]
-                            .label
-                            .text
-                            .chars()
-                            .count()
-                    })
-                    .map(|(ix, _)| ix),
-            )
-            .contained()
-            .with_style(settings.style.autocomplete.container)
-            .boxed()
+                })
+            }
+            Ok::<_, anyhow::Error>(())
         })
+        .detach_and_log_err(cx);
+    }
+
+    pub fn confirm_code_action(
+        workspace: &mut Workspace,
+        ConfirmCodeAction(action_ix): &ConfirmCodeAction,
+        cx: &mut ViewContext<Workspace>,
+    ) -> Option<Task<Result<()>>> {
+        let editor = workspace.active_item(cx)?.act_as::<Editor>(cx)?;
+        let actions_menu = if let ContextMenu::CodeActions(menu) =
+            editor.update(cx, |editor, cx| editor.hide_context_menu(cx))?
+        {
+            menu
+        } else {
+            return None;
+        };
+        let action_ix = action_ix.unwrap_or(actions_menu.selected_item);
+        let action = actions_menu.actions.get(action_ix)?.clone();
+        let title = action.lsp_action.title.clone();
+        let buffer = actions_menu.buffer;
+        let replica_id = editor.read(cx).replica_id(cx);
+
+        let apply_code_actions = workspace.project().clone().update(cx, |project, cx| {
+            project.apply_code_action(buffer, action, true, cx)
+        });
+        Some(cx.spawn(|workspace, mut cx| async move {
+            let project_transaction = apply_code_actions.await?;
+
+            // If the code action's edits are all contained within this editor, then
+            // avoid opening a new editor to display them.
+            let mut entries = project_transaction.0.iter();
+            if let Some((buffer, transaction)) = entries.next() {
+                if entries.next().is_none() {
+                    let excerpt = editor.read_with(&cx, |editor, cx| {
+                        editor
+                            .buffer()
+                            .read(cx)
+                            .excerpt_containing(editor.newest_anchor_selection().head(), cx)
+                    });
+                    if let Some((excerpted_buffer, excerpt_range)) = excerpt {
+                        if excerpted_buffer == *buffer {
+                            let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
+                            let excerpt_range = excerpt_range.to_offset(&snapshot);
+                            if snapshot
+                                .edited_ranges_for_transaction(transaction)
+                                .all(|range| {
+                                    excerpt_range.start <= range.start
+                                        && excerpt_range.end >= range.end
+                                })
+                            {
+                                return Ok(());
+                            }
+                        }
+                    }
+                }
+            }
+
+            let mut ranges_to_highlight = Vec::new();
+            let excerpt_buffer = cx.add_model(|cx| {
+                let mut multibuffer = MultiBuffer::new(replica_id).with_title(title);
+                for (buffer, transaction) in &project_transaction.0 {
+                    let snapshot = buffer.read(cx).snapshot();
+                    ranges_to_highlight.extend(
+                        multibuffer.push_excerpts_with_context_lines(
+                            buffer.clone(),
+                            snapshot
+                                .edited_ranges_for_transaction::<usize>(transaction)
+                                .collect(),
+                            1,
+                            cx,
+                        ),
+                    );
+                }
+                multibuffer.push_transaction(&project_transaction.0);
+                multibuffer
+            });
+
+            workspace.update(&mut cx, |workspace, cx| {
+                let editor = workspace.open_item(MultiBufferItemHandle(excerpt_buffer), cx);
+                if let Some(editor) = editor.act_as::<Self>(cx) {
+                    editor.update(cx, |editor, cx| {
+                        let settings = (editor.build_settings)(cx);
+                        editor.highlight_ranges::<Self>(
+                            ranges_to_highlight,
+                            settings.style.highlighted_line_background,
+                            cx,
+                        );
+                    });
+                }
+            });
+
+            Ok(())
+        }))
+    }
+
+    fn refresh_code_actions(&mut self, cx: &mut ViewContext<Self>) -> Option<()> {
+        let project = self.project.as_ref()?;
+        let buffer = self.buffer.read(cx);
+        let newest_selection = self.newest_anchor_selection().clone();
+        let (start_buffer, start) = buffer.text_anchor_for_position(newest_selection.start, cx)?;
+        let (end_buffer, end) = buffer.text_anchor_for_position(newest_selection.end, cx)?;
+        if start_buffer != end_buffer {
+            return None;
+        }
+
+        let actions = project.update(cx, |project, cx| {
+            project.code_actions(&start_buffer, start..end, cx)
+        });
+        self.code_actions_task = Some(cx.spawn_weak(|this, mut cx| async move {
+            let actions = actions.await;
+            if let Some(this) = this.upgrade(&cx) {
+                this.update(&mut cx, |this, cx| {
+                    this.available_code_actions = actions.log_err().and_then(|actions| {
+                        if actions.is_empty() {
+                            None
+                        } else {
+                            Some((start_buffer, actions.into()))
+                        }
+                    });
+                    cx.notify();
+                })
+            }
+        }));
+        None
+    }
+
+    pub fn render_code_actions_indicator(&self, cx: &mut ViewContext<Self>) -> Option<ElementBox> {
+        if self.available_code_actions.is_some() {
+            enum Tag {}
+            let style = (self.build_settings)(cx).style;
+            Some(
+                MouseEventHandler::new::<Tag, _, _, _>(0, cx, |_, _| {
+                    Svg::new("icons/zap.svg")
+                        .with_color(style.code_actions_indicator)
+                        .boxed()
+                })
+                .with_cursor_style(CursorStyle::PointingHand)
+                .with_padding(Padding::uniform(3.))
+                .on_mouse_down(|cx| {
+                    cx.dispatch_action(ToggleCodeActions(true));
+                })
+                .boxed(),
+            )
+        } else {
+            None
+        }
+    }
+
+    pub fn context_menu_visible(&self) -> bool {
+        self.context_menu
+            .as_ref()
+            .map_or(false, |menu| menu.visible())
+    }
+
+    pub fn render_context_menu(
+        &self,
+        cursor_position: DisplayPoint,
+        cx: &AppContext,
+    ) -> Option<(DisplayPoint, ElementBox)> {
+        self.context_menu
+            .as_ref()
+            .map(|menu| menu.render(cursor_position, self.build_settings.clone(), cx))
+    }
+
+    fn show_context_menu(&mut self, menu: ContextMenu, cx: &mut ViewContext<Self>) {
+        if !matches!(menu, ContextMenu::Completions(_)) {
+            self.completion_tasks.clear();
+        }
+        self.context_menu = Some(menu);
+        cx.notify();
+    }
+
+    fn hide_context_menu(&mut self, cx: &mut ViewContext<Self>) -> Option<ContextMenu> {
+        cx.notify();
+        self.completion_tasks.clear();
+        self.context_menu.take()
     }
 
     pub fn insert_snippet(

crates/editor/src/element.rs šŸ”—

@@ -3,11 +3,12 @@ use super::{
     Anchor, DisplayPoint, Editor, EditorMode, EditorSettings, EditorSnapshot, EditorStyle, Input,
     Scroll, Select, SelectPhase, SoftWrap, ToPoint, MAX_LINE_LEN,
 };
+use crate::display_map::TransformBlock;
 use clock::ReplicaId;
 use collections::{BTreeMap, HashMap};
 use gpui::{
     color::Color,
-    elements::layout_highlighted_chunks,
+    elements::*,
     fonts::{HighlightStyle, Underline},
     geometry::{
         rect::RectF,
@@ -280,7 +281,7 @@ impl EditorElement {
         &mut self,
         bounds: RectF,
         visible_bounds: RectF,
-        layout: &LayoutState,
+        layout: &mut LayoutState,
         cx: &mut PaintContext,
     ) {
         let scroll_top = layout.snapshot.scroll_position().y() * layout.line_height;
@@ -294,6 +295,14 @@ impl EditorElement {
                 line.paint(line_origin, visible_bounds, layout.line_height, cx);
             }
         }
+
+        if let Some((row, indicator)) = layout.code_actions_indicator.as_mut() {
+            let mut x = bounds.width() - layout.gutter_padding;
+            let mut y = *row as f32 * layout.line_height - scroll_top;
+            x += ((layout.gutter_padding + layout.text_offset.x()) - indicator.size().x()) / 2.;
+            y += (layout.line_height - indicator.size().y()) / 2.;
+            indicator.paint(bounds.origin() + vec2f(x, y), visible_bounds, cx);
+        }
     }
 
     fn paint_text(
@@ -392,20 +401,20 @@ impl EditorElement {
         }
         cx.scene.pop_layer();
 
-        if let Some((position, completions_list)) = layout.completions.as_mut() {
+        if let Some((position, context_menu)) = layout.context_menu.as_mut() {
             cx.scene.push_stacking_context(None);
 
             let cursor_row_layout = &layout.line_layouts[(position.row() - start_row) as usize];
             let x = cursor_row_layout.x_for_index(position.column() as usize) - scroll_left;
             let y = (position.row() + 1) as f32 * layout.line_height - scroll_top;
             let mut list_origin = content_origin + vec2f(x, y);
-            let list_height = completions_list.size().y();
+            let list_height = context_menu.size().y();
 
             if list_origin.y() + list_height > bounds.lower_left().y() {
                 list_origin.set_y(list_origin.y() - layout.line_height - list_height);
             }
 
-            completions_list.paint(
+            context_menu.paint(
                 list_origin,
                 RectF::from_points(Vector2F::zero(), vec2f(f32::MAX, f32::MAX)), // Let content bleed outside of editor
                 cx,
@@ -649,33 +658,91 @@ impl EditorElement {
         line_layouts: &[text_layout::Line],
         cx: &mut LayoutContext,
     ) -> Vec<(u32, ElementBox)> {
+        let scroll_x = snapshot.scroll_position.x();
         snapshot
             .blocks_in_range(rows.clone())
-            .map(|(start_row, block)| {
-                let anchor_row = block
-                    .position()
-                    .to_point(&snapshot.buffer_snapshot)
-                    .to_display_point(snapshot)
-                    .row();
-
-                let anchor_x = text_x
-                    + if rows.contains(&anchor_row) {
-                        line_layouts[(anchor_row - rows.start) as usize]
-                            .x_for_index(block.column() as usize)
-                    } else {
-                        layout_line(anchor_row, snapshot, style, cx.text_layout_cache)
-                            .x_for_index(block.column() as usize)
-                    };
+            .map(|(block_row, block)| {
+                let mut element = match block {
+                    TransformBlock::Custom(block) => {
+                        let align_to = block
+                            .position()
+                            .to_point(&snapshot.buffer_snapshot)
+                            .to_display_point(snapshot);
+                        let anchor_x = text_x
+                            + if rows.contains(&align_to.row()) {
+                                line_layouts[(align_to.row() - rows.start) as usize]
+                                    .x_for_index(align_to.column() as usize)
+                            } else {
+                                layout_line(align_to.row(), snapshot, style, cx.text_layout_cache)
+                                    .x_for_index(align_to.column() as usize)
+                            };
+
+                        block.render(&BlockContext {
+                            cx,
+                            anchor_x,
+                            gutter_padding,
+                            line_height,
+                            scroll_x,
+                            gutter_width,
+                            em_width,
+                        })
+                    }
+                    TransformBlock::ExcerptHeader {
+                        buffer,
+                        starts_new_buffer,
+                        ..
+                    } => {
+                        if *starts_new_buffer {
+                            let style = &self.settings.style.diagnostic_path_header;
+                            let font_size = (style.text_scale_factor
+                                * self.settings.style.text.font_size)
+                                .round();
+
+                            let mut filename = None;
+                            let mut parent_path = None;
+                            if let Some(path) = buffer.path() {
+                                filename =
+                                    path.file_name().map(|f| f.to_string_lossy().to_string());
+                                parent_path =
+                                    path.parent().map(|p| p.to_string_lossy().to_string() + "/");
+                            }
+
+                            Flex::row()
+                                .with_child(
+                                    Label::new(
+                                        filename.unwrap_or_else(|| "untitled".to_string()),
+                                        style.filename.text.clone().with_font_size(font_size),
+                                    )
+                                    .contained()
+                                    .with_style(style.filename.container)
+                                    .boxed(),
+                                )
+                                .with_children(parent_path.map(|path| {
+                                    Label::new(
+                                        path,
+                                        style.path.text.clone().with_font_size(font_size),
+                                    )
+                                    .contained()
+                                    .with_style(style.path.container)
+                                    .boxed()
+                                }))
+                                .aligned()
+                                .left()
+                                .contained()
+                                .with_style(style.container)
+                                .with_padding_left(gutter_padding + scroll_x * em_width)
+                                .expanded()
+                                .named("path header block")
+                        } else {
+                            let text_style = self.settings.style.text.clone();
+                            Label::new("…".to_string(), text_style)
+                                .contained()
+                                .with_padding_left(gutter_padding + scroll_x * em_width)
+                                .named("collapsed context")
+                        }
+                    }
+                };
 
-                let mut element = block.render(&BlockContext {
-                    cx,
-                    anchor_x,
-                    gutter_padding,
-                    line_height,
-                    scroll_x: snapshot.scroll_position.x(),
-                    gutter_width,
-                    em_width,
-                });
                 element.layout(
                     SizeConstraint {
                         min: Vector2F::zero(),
@@ -683,7 +750,7 @@ impl EditorElement {
                     },
                     cx,
                 );
-                (start_row, element)
+                (block_row, element)
             })
             .collect()
     }
@@ -859,7 +926,8 @@ impl Element for EditorElement {
             max_row.saturating_sub(1) as f32,
         );
 
-        let mut completions = None;
+        let mut context_menu = None;
+        let mut code_actions_indicator = None;
         self.update_view(cx.app, |view, cx| {
             let clamped = view.clamp_scroll_left(scroll_max.x());
             let autoscrolled;
@@ -880,21 +948,24 @@ impl Element for EditorElement {
                 snapshot = view.snapshot(cx);
             }
 
-            if view.has_completions() {
-                let newest_selection_head = view
-                    .newest_selection::<usize>(&snapshot.buffer_snapshot)
-                    .head()
-                    .to_display_point(&snapshot);
+            let newest_selection_head = view
+                .newest_selection::<usize>(&snapshot.buffer_snapshot)
+                .head()
+                .to_display_point(&snapshot);
 
-                if (start_row..end_row).contains(&newest_selection_head.row()) {
-                    let list = view.render_completions(cx).unwrap();
-                    completions = Some((newest_selection_head, list));
+            if (start_row..end_row).contains(&newest_selection_head.row()) {
+                if view.context_menu_visible() {
+                    context_menu = view.render_context_menu(newest_selection_head, cx);
                 }
+
+                code_actions_indicator = view
+                    .render_code_actions_indicator(cx)
+                    .map(|indicator| (newest_selection_head.row(), indicator));
             }
         });
 
-        if let Some((_, completions_list)) = completions.as_mut() {
-            completions_list.layout(
+        if let Some((_, context_menu)) = context_menu.as_mut() {
+            context_menu.layout(
                 SizeConstraint {
                     min: Vector2F::zero(),
                     max: vec2f(
@@ -906,6 +977,13 @@ impl Element for EditorElement {
             );
         }
 
+        if let Some((_, indicator)) = code_actions_indicator.as_mut() {
+            indicator.layout(
+                SizeConstraint::strict_along(Axis::Vertical, line_height * 0.618),
+                cx,
+            );
+        }
+
         let blocks = self.layout_blocks(
             start_row..end_row,
             &snapshot,
@@ -940,7 +1018,8 @@ impl Element for EditorElement {
                 em_width,
                 em_advance,
                 selections,
-                completions,
+                context_menu,
+                code_actions_indicator,
             },
         )
     }
@@ -989,8 +1068,14 @@ impl Element for EditorElement {
         paint: &mut PaintState,
         cx: &mut EventContext,
     ) -> bool {
-        if let Some((_, completion_list)) = &mut layout.completions {
-            if completion_list.dispatch_event(event, cx) {
+        if let Some((_, context_menu)) = &mut layout.context_menu {
+            if context_menu.dispatch_event(event, cx) {
+                return true;
+            }
+        }
+
+        if let Some((_, indicator)) = &mut layout.code_actions_indicator {
+            if indicator.dispatch_event(event, cx) {
                 return true;
             }
         }
@@ -1051,7 +1136,8 @@ pub struct LayoutState {
     highlighted_ranges: Vec<(Range<DisplayPoint>, Color)>,
     selections: HashMap<ReplicaId, Vec<text::Selection<DisplayPoint>>>,
     text_offset: Vector2F,
-    completions: Option<(DisplayPoint, ElementBox)>,
+    context_menu: Option<(DisplayPoint, ElementBox)>,
+    code_actions_indicator: Option<(u32, ElementBox)>,
 }
 
 fn layout_line(
@@ -1298,6 +1384,7 @@ mod tests {
                     let settings = settings.clone();
                     Arc::new(move |_| settings.clone())
                 },
+                None,
                 cx,
             )
         });

crates/editor/src/items.rs šŸ”—

@@ -11,7 +11,7 @@ use std::path::PathBuf;
 use std::rc::Rc;
 use std::{cell::RefCell, fmt::Write};
 use text::{Point, Selection};
-use util::TryFutureExt;
+use util::ResultExt;
 use workspace::{
     ItemHandle, ItemNavHistory, ItemView, ItemViewHandle, NavHistory, PathOpener, Settings,
     StatusItemView, WeakItemHandle, Workspace,
@@ -25,6 +25,12 @@ pub struct BufferItemHandle(pub ModelHandle<Buffer>);
 #[derive(Clone)]
 struct WeakBufferItemHandle(WeakModelHandle<Buffer>);
 
+#[derive(Clone)]
+pub struct MultiBufferItemHandle(pub ModelHandle<MultiBuffer>);
+
+#[derive(Clone)]
+struct WeakMultiBufferItemHandle(WeakModelHandle<MultiBuffer>);
+
 impl PathOpener for BufferOpener {
     fn open(
         &self,
@@ -55,6 +61,7 @@ impl ItemHandle for BufferItemHandle {
             let mut editor = Editor::for_buffer(
                 buffer,
                 crate::settings_builder(weak_buffer, workspace.settings()),
+                Some(workspace.project().clone()),
                 cx,
             );
             editor.nav_history = Some(ItemNavHistory::new(nav_history, &cx.handle()));
@@ -86,6 +93,48 @@ impl ItemHandle for BufferItemHandle {
     }
 }
 
+impl ItemHandle for MultiBufferItemHandle {
+    fn add_view(
+        &self,
+        window_id: usize,
+        workspace: &Workspace,
+        nav_history: Rc<RefCell<NavHistory>>,
+        cx: &mut MutableAppContext,
+    ) -> Box<dyn ItemViewHandle> {
+        let weak_buffer = self.0.downgrade();
+        Box::new(cx.add_view(window_id, |cx| {
+            let mut editor = Editor::for_buffer(
+                self.0.clone(),
+                crate::settings_builder(weak_buffer, workspace.settings()),
+                Some(workspace.project().clone()),
+                cx,
+            );
+            editor.nav_history = Some(ItemNavHistory::new(nav_history, &cx.handle()));
+            editor
+        }))
+    }
+
+    fn boxed_clone(&self) -> Box<dyn ItemHandle> {
+        Box::new(self.clone())
+    }
+
+    fn to_any(&self) -> gpui::AnyModelHandle {
+        self.0.clone().into()
+    }
+
+    fn downgrade(&self) -> Box<dyn WeakItemHandle> {
+        Box::new(WeakMultiBufferItemHandle(self.0.downgrade()))
+    }
+
+    fn project_path(&self, _: &AppContext) -> Option<ProjectPath> {
+        None
+    }
+
+    fn id(&self) -> usize {
+        self.0.id()
+    }
+}
+
 impl WeakItemHandle for WeakBufferItemHandle {
     fn upgrade(&self, cx: &AppContext) -> Option<Box<dyn ItemHandle>> {
         self.0
@@ -98,11 +147,25 @@ impl WeakItemHandle for WeakBufferItemHandle {
     }
 }
 
-impl ItemView for Editor {
-    type ItemHandle = BufferItemHandle;
+impl WeakItemHandle for WeakMultiBufferItemHandle {
+    fn upgrade(&self, cx: &AppContext) -> Option<Box<dyn ItemHandle>> {
+        self.0
+            .upgrade(cx)
+            .map(|buffer| Box::new(MultiBufferItemHandle(buffer)) as Box<dyn ItemHandle>)
+    }
 
-    fn item_handle(&self, cx: &AppContext) -> Self::ItemHandle {
-        BufferItemHandle(self.buffer.read(cx).as_singleton().unwrap())
+    fn id(&self) -> usize {
+        self.0.id()
+    }
+}
+
+impl ItemView for Editor {
+    fn item_id(&self, cx: &AppContext) -> usize {
+        if let Some(buffer) = self.buffer.read(cx).as_singleton() {
+            buffer.id()
+        } else {
+            self.buffer.id()
+        }
     }
 
     fn navigate(&mut self, data: Box<dyn std::any::Any>, cx: &mut ViewContext<Self>) {
@@ -141,9 +204,8 @@ impl ItemView for Editor {
     }
 
     fn deactivated(&mut self, cx: &mut ViewContext<Self>) {
-        if let Some(selection) = self.newest_anchor_selection() {
-            self.push_to_nav_history(selection.head(), None, cx);
-        }
+        let selection = self.newest_anchor_selection();
+        self.push_to_nav_history(selection.head(), None, cx);
     }
 
     fn is_dirty(&self, cx: &AppContext) -> bool {
@@ -155,25 +217,39 @@ impl ItemView for Editor {
     }
 
     fn can_save(&self, cx: &AppContext) -> bool {
-        self.project_path(cx).is_some()
+        !self.buffer().read(cx).is_singleton() || self.project_path(cx).is_some()
     }
 
-    fn save(&mut self, cx: &mut ViewContext<Self>) -> Task<Result<()>> {
+    fn save(
+        &mut self,
+        project: ModelHandle<Project>,
+        cx: &mut ViewContext<Self>,
+    ) -> Task<Result<()>> {
         let buffer = self.buffer().clone();
-        cx.spawn(|editor, mut cx| async move {
-            buffer
-                .update(&mut cx, |buffer, cx| buffer.format(cx).log_err())
-                .await;
-            editor.update(&mut cx, |editor, cx| {
+        let buffers = buffer.read(cx).all_buffers();
+        let transaction = project.update(cx, |project, cx| project.format(buffers, true, cx));
+        cx.spawn(|this, mut cx| async move {
+            let transaction = transaction.await.log_err();
+            this.update(&mut cx, |editor, cx| {
                 editor.request_autoscroll(Autoscroll::Fit, cx)
             });
-            buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
+            buffer
+                .update(&mut cx, |buffer, cx| {
+                    if let Some(transaction) = transaction {
+                        if !buffer.is_singleton() {
+                            buffer.push_transaction(&transaction.0);
+                        }
+                    }
+
+                    buffer.save(cx)
+                })
+                .await?;
             Ok(())
         })
     }
 
-    fn can_save_as(&self, _: &AppContext) -> bool {
-        true
+    fn can_save_as(&self, cx: &AppContext) -> bool {
+        self.buffer().read(cx).is_singleton()
     }
 
     fn save_as(
@@ -331,7 +407,7 @@ impl View for DiagnosticMessage {
         if let Some(diagnostic) = &self.diagnostic {
             let theme = &self.settings.borrow().theme.workspace.status_bar;
             Label::new(
-                diagnostic.message.lines().next().unwrap().to_string(),
+                diagnostic.message.split('\n').next().unwrap().to_string(),
                 theme.diagnostic_message.clone(),
             )
             .contained()

crates/editor/src/movement.rs šŸ”—

@@ -225,13 +225,8 @@ pub fn surrounding_word(map: &DisplaySnapshot, position: DisplayPoint) -> Range<
 #[cfg(test)]
 mod tests {
     use super::*;
-    use crate::{
-        display_map::{BlockDisposition, BlockProperties},
-        Buffer, DisplayMap, ExcerptProperties, MultiBuffer,
-    };
-    use gpui::{elements::Empty, Element};
+    use crate::{Buffer, DisplayMap, MultiBuffer};
     use language::Point;
-    use std::sync::Arc;
 
     #[gpui::test]
     fn test_move_up_and_down_with_excerpts(cx: &mut gpui::MutableAppContext) {
@@ -242,62 +237,24 @@ mod tests {
             .unwrap();
 
         let buffer = cx.add_model(|cx| Buffer::new(0, "abc\ndefg\nhijkl\nmn", cx));
-        let mut excerpt1_header_position = None;
-        let mut excerpt2_header_position = None;
         let multibuffer = cx.add_model(|cx| {
             let mut multibuffer = MultiBuffer::new(0);
-            let excerpt1_id = multibuffer.push_excerpt(
-                ExcerptProperties {
-                    buffer: &buffer,
-                    range: Point::new(0, 0)..Point::new(1, 4),
-                },
-                cx,
-            );
-            let excerpt2_id = multibuffer.push_excerpt(
-                ExcerptProperties {
-                    buffer: &buffer,
-                    range: Point::new(2, 0)..Point::new(3, 2),
-                },
+            multibuffer.push_excerpts(
+                buffer.clone(),
+                [
+                    Point::new(0, 0)..Point::new(1, 4),
+                    Point::new(2, 0)..Point::new(3, 2),
+                ],
                 cx,
             );
-
-            excerpt1_header_position = Some(
-                multibuffer
-                    .read(cx)
-                    .anchor_in_excerpt(excerpt1_id, language::Anchor::min()),
-            );
-            excerpt2_header_position = Some(
-                multibuffer
-                    .read(cx)
-                    .anchor_in_excerpt(excerpt2_id, language::Anchor::min()),
-            );
             multibuffer
         });
 
         let display_map =
-            cx.add_model(|cx| DisplayMap::new(multibuffer, 2, font_id, 14.0, None, cx));
-        display_map.update(cx, |display_map, cx| {
-            display_map.insert_blocks(
-                [
-                    BlockProperties {
-                        position: excerpt1_header_position.unwrap(),
-                        height: 2,
-                        render: Arc::new(|_| Empty::new().boxed()),
-                        disposition: BlockDisposition::Above,
-                    },
-                    BlockProperties {
-                        position: excerpt2_header_position.unwrap(),
-                        height: 3,
-                        render: Arc::new(|_| Empty::new().boxed()),
-                        disposition: BlockDisposition::Above,
-                    },
-                ],
-                cx,
-            )
-        });
+            cx.add_model(|cx| DisplayMap::new(multibuffer, 2, font_id, 14.0, None, 2, 2, cx));
 
         let snapshot = display_map.update(cx, |map, cx| map.snapshot(cx));
-        assert_eq!(snapshot.text(), "\n\nabc\ndefg\n\n\n\nhijkl\nmn");
+        assert_eq!(snapshot.text(), "\n\nabc\ndefg\n\n\nhijkl\nmn");
 
         // Can't move up into the first excerpt's header
         assert_eq!(
@@ -321,22 +278,22 @@ mod tests {
 
         // Move up and down across second excerpt's header
         assert_eq!(
-            up(&snapshot, DisplayPoint::new(7, 5), SelectionGoal::Column(5)).unwrap(),
+            up(&snapshot, DisplayPoint::new(6, 5), SelectionGoal::Column(5)).unwrap(),
             (DisplayPoint::new(3, 4), SelectionGoal::Column(5)),
         );
         assert_eq!(
             down(&snapshot, DisplayPoint::new(3, 4), SelectionGoal::Column(5)).unwrap(),
-            (DisplayPoint::new(7, 5), SelectionGoal::Column(5)),
+            (DisplayPoint::new(6, 5), SelectionGoal::Column(5)),
         );
 
         // Can't move down off the end
         assert_eq!(
-            down(&snapshot, DisplayPoint::new(8, 0), SelectionGoal::Column(0)).unwrap(),
-            (DisplayPoint::new(8, 2), SelectionGoal::Column(2)),
+            down(&snapshot, DisplayPoint::new(7, 0), SelectionGoal::Column(0)).unwrap(),
+            (DisplayPoint::new(7, 2), SelectionGoal::Column(2)),
         );
         assert_eq!(
-            down(&snapshot, DisplayPoint::new(8, 2), SelectionGoal::Column(2)).unwrap(),
-            (DisplayPoint::new(8, 2), SelectionGoal::Column(2)),
+            down(&snapshot, DisplayPoint::new(7, 2), SelectionGoal::Column(2)).unwrap(),
+            (DisplayPoint::new(7, 2), SelectionGoal::Column(2)),
         );
     }
 
@@ -351,8 +308,8 @@ mod tests {
         let font_size = 14.0;
 
         let buffer = MultiBuffer::build_simple("a bcĪ” defγ hi—jk", cx);
-        let display_map =
-            cx.add_model(|cx| DisplayMap::new(buffer, tab_size, font_id, font_size, None, cx));
+        let display_map = cx
+            .add_model(|cx| DisplayMap::new(buffer, tab_size, font_id, font_size, None, 1, 1, cx));
         let snapshot = display_map.update(cx, |map, cx| map.snapshot(cx));
         assert_eq!(
             prev_word_boundary(&snapshot, DisplayPoint::new(0, 12)),
@@ -407,8 +364,8 @@ mod tests {
             .unwrap();
         let font_size = 14.0;
         let buffer = MultiBuffer::build_simple("lorem ipsum   dolor\n    sit", cx);
-        let display_map =
-            cx.add_model(|cx| DisplayMap::new(buffer, tab_size, font_id, font_size, None, cx));
+        let display_map = cx
+            .add_model(|cx| DisplayMap::new(buffer, tab_size, font_id, font_size, None, 1, 1, cx));
         let snapshot = display_map.update(cx, |map, cx| map.snapshot(cx));
 
         assert_eq!(

crates/editor/src/multi_buffer.rs šŸ”—

@@ -3,18 +3,18 @@ mod anchor;
 pub use anchor::{Anchor, AnchorRangeExt};
 use anyhow::Result;
 use clock::ReplicaId;
-use collections::{HashMap, HashSet};
+use collections::{Bound, HashMap, HashSet};
 use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task};
 pub use language::Completion;
 use language::{
     Buffer, BufferChunks, BufferSnapshot, Chunk, DiagnosticEntry, Event, File, Language, Outline,
-    OutlineItem, Selection, ToOffset as _, ToPoint as _, TransactionId,
+    OutlineItem, Selection, ToOffset as _, ToPoint as _, ToPointUtf16 as _, TransactionId,
 };
 use std::{
     cell::{Ref, RefCell},
     cmp, fmt, io,
     iter::{self, FromIterator},
-    ops::{Range, Sub},
+    ops::{Range, RangeBounds, Sub},
     str,
     sync::Arc,
     time::{Duration, Instant},
@@ -27,7 +27,6 @@ use text::{
     AnchorRangeExt as _, Edit, Point, PointUtf16, TextSummary,
 };
 use theme::SyntaxTheme;
-use util::post_inc;
 
 const NEWLINES: &'static [u8] = &[b'\n'; u8::MAX as usize];
 
@@ -40,10 +39,11 @@ pub struct MultiBuffer {
     singleton: bool,
     replica_id: ReplicaId,
     history: History,
+    title: Option<String>,
 }
 
 struct History {
-    next_transaction_id: usize,
+    next_transaction_id: TransactionId,
     undo_stack: Vec<Transaction>,
     redo_stack: Vec<Transaction>,
     transaction_depth: usize,
@@ -59,10 +59,11 @@ pub enum CharKind {
 }
 
 struct Transaction {
-    id: usize,
-    buffer_transactions: HashSet<(usize, text::TransactionId)>,
+    id: TransactionId,
+    buffer_transactions: HashMap<usize, text::TransactionId>,
     first_edit_at: Instant,
     last_edit_at: Instant,
+    suppress_grouping: bool,
 }
 
 pub trait ToOffset: 'static + fmt::Debug {
@@ -73,12 +74,17 @@ pub trait ToPoint: 'static + fmt::Debug {
     fn to_point(&self, snapshot: &MultiBufferSnapshot) -> Point;
 }
 
+pub trait ToPointUtf16: 'static + fmt::Debug {
+    fn to_point_utf16(&self, snapshot: &MultiBufferSnapshot) -> PointUtf16;
+}
+
 struct BufferState {
     buffer: ModelHandle<Buffer>,
     last_version: clock::Global,
     last_parse_count: usize,
     last_selections_update_count: usize,
     last_diagnostics_update_count: usize,
+    last_file_update_count: usize,
     excerpts: Vec<ExcerptId>,
     _subscriptions: [gpui::Subscription; 2],
 }
@@ -89,13 +95,16 @@ pub struct MultiBufferSnapshot {
     excerpts: SumTree<Excerpt>,
     parse_count: usize,
     diagnostics_update_count: usize,
+    trailing_excerpt_update_count: usize,
     is_dirty: bool,
     has_conflict: bool,
 }
 
-pub struct ExcerptProperties<'a, T> {
-    pub buffer: &'a ModelHandle<Buffer>,
-    pub range: Range<T>,
+pub struct ExcerptBoundary {
+    pub row: u32,
+    pub buffer: BufferSnapshot,
+    pub range: Range<text::Anchor>,
+    pub starts_new_buffer: bool,
 }
 
 #[derive(Clone)]
@@ -160,19 +169,19 @@ impl MultiBuffer {
                 transaction_depth: 0,
                 group_interval: Duration::from_millis(300),
             },
+            title: Default::default(),
         }
     }
 
+    pub fn with_title(mut self, title: String) -> Self {
+        self.title = Some(title);
+        self
+    }
+
     pub fn singleton(buffer: ModelHandle<Buffer>, cx: &mut ModelContext<Self>) -> Self {
         let mut this = Self::new(buffer.read(cx).replica_id());
         this.singleton = true;
-        this.push_excerpt(
-            ExcerptProperties {
-                buffer: &buffer,
-                range: text::Anchor::min()..text::Anchor::max(),
-            },
-            cx,
-        );
+        this.push_excerpts(buffer, [text::Anchor::min()..text::Anchor::max()], cx);
         this.snapshot.borrow_mut().singleton = true;
         this
     }
@@ -185,56 +194,13 @@ impl MultiBuffer {
 
     #[cfg(any(test, feature = "test-support"))]
     pub fn build_random(
-        mut rng: &mut impl rand::Rng,
+        rng: &mut impl rand::Rng,
         cx: &mut gpui::MutableAppContext,
     ) -> ModelHandle<Self> {
-        use rand::prelude::*;
-        use std::env;
-        use text::RandomCharIter;
-
-        let max_excerpts = env::var("MAX_EXCERPTS")
-            .map(|i| i.parse().expect("invalid `MAX_EXCERPTS` variable"))
-            .unwrap_or(5);
-        let excerpts = rng.gen_range(1..=max_excerpts);
-
         cx.add_model(|cx| {
             let mut multibuffer = MultiBuffer::new(0);
-            let mut buffers = Vec::new();
-            for _ in 0..excerpts {
-                let buffer_handle = if rng.gen() || buffers.is_empty() {
-                    let text = RandomCharIter::new(&mut rng).take(10).collect::<String>();
-                    buffers.push(cx.add_model(|cx| Buffer::new(0, text, cx)));
-                    let buffer = buffers.last().unwrap();
-                    log::info!(
-                        "Creating new buffer {} with text: {:?}",
-                        buffer.id(),
-                        buffer.read(cx).text()
-                    );
-                    buffers.last().unwrap()
-                } else {
-                    buffers.choose(rng).unwrap()
-                };
-
-                let buffer = buffer_handle.read(cx);
-                let end_ix = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Bias::Right);
-                let start_ix = buffer.clip_offset(rng.gen_range(0..=end_ix), Bias::Left);
-                let header_height = rng.gen_range(0..=5);
-                log::info!(
-                    "Inserting excerpt from buffer {} with header height {} and range {:?}: {:?}",
-                    buffer_handle.id(),
-                    header_height,
-                    start_ix..end_ix,
-                    &buffer.text()[start_ix..end_ix]
-                );
-
-                multibuffer.push_excerpt(
-                    ExcerptProperties {
-                        buffer: buffer_handle,
-                        range: start_ix..end_ix,
-                    },
-                    cx,
-                );
-            }
+            let mutation_count = rng.gen_range(1..=5);
+            multibuffer.randomly_edit_excerpts(rng, mutation_count, cx);
             multibuffer
         })
     }
@@ -269,6 +235,10 @@ impl MultiBuffer {
         }
     }
 
+    pub fn is_singleton(&self) -> bool {
+        self.singleton
+    }
+
     pub fn subscribe(&mut self) -> Subscription {
         self.subscriptions.subscribe()
     }
@@ -306,6 +276,10 @@ impl MultiBuffer {
         S: ToOffset,
         T: Into<String>,
     {
+        if self.buffers.borrow().is_empty() {
+            return;
+        }
+
         if let Some(buffer) = self.as_singleton() {
             let snapshot = self.read(cx);
             let ranges = ranges_iter
@@ -451,12 +425,12 @@ impl MultiBuffer {
             return buffer.update(cx, |buffer, cx| buffer.end_transaction_at(now, cx));
         }
 
-        let mut buffer_transactions = HashSet::default();
+        let mut buffer_transactions = HashMap::default();
         for BufferState { buffer, .. } in self.buffers.borrow().values() {
             if let Some(transaction_id) =
                 buffer.update(cx, |buffer, cx| buffer.end_transaction_at(now, cx))
             {
-                buffer_transactions.insert((buffer.id(), transaction_id));
+                buffer_transactions.insert(buffer.id(), transaction_id);
             }
         }
 
@@ -468,12 +442,24 @@ impl MultiBuffer {
         }
     }
 
-    pub fn avoid_grouping_next_transaction(&mut self, cx: &mut ModelContext<Self>) {
+    pub fn finalize_last_transaction(&mut self, cx: &mut ModelContext<Self>) {
+        self.history.finalize_last_transaction();
         for BufferState { buffer, .. } in self.buffers.borrow().values() {
-            buffer.update(cx, |buffer, _| buffer.avoid_grouping_next_transaction());
+            buffer.update(cx, |buffer, _| {
+                buffer.finalize_last_transaction();
+            });
         }
     }
 
+    pub fn push_transaction<'a, T>(&mut self, buffer_transactions: T)
+    where
+        T: IntoIterator<Item = (&'a ModelHandle<Buffer>, &'a language::Transaction)>,
+    {
+        self.history
+            .push_transaction(buffer_transactions, Instant::now());
+        self.history.finalize_last_transaction();
+    }
+
     pub fn set_active_selections(
         &mut self,
         selections: &[Selection<Anchor>],
@@ -571,10 +557,14 @@ impl MultiBuffer {
 
         while let Some(transaction) = self.history.pop_undo() {
             let mut undone = false;
-            for (buffer_id, buffer_transaction_id) in &transaction.buffer_transactions {
+            for (buffer_id, buffer_transaction_id) in &mut transaction.buffer_transactions {
                 if let Some(BufferState { buffer, .. }) = self.buffers.borrow().get(&buffer_id) {
-                    undone |= buffer.update(cx, |buf, cx| {
-                        buf.undo_transaction(*buffer_transaction_id, cx)
+                    undone |= buffer.update(cx, |buffer, cx| {
+                        let undo_to = *buffer_transaction_id;
+                        if let Some(entry) = buffer.peek_undo_stack() {
+                            *buffer_transaction_id = entry.transaction_id();
+                        }
+                        buffer.undo_to_transaction(undo_to, cx)
                     });
                 }
             }
@@ -594,10 +584,14 @@ impl MultiBuffer {
 
         while let Some(transaction) = self.history.pop_redo() {
             let mut redone = false;
-            for (buffer_id, buffer_transaction_id) in &transaction.buffer_transactions {
+            for (buffer_id, buffer_transaction_id) in &mut transaction.buffer_transactions {
                 if let Some(BufferState { buffer, .. }) = self.buffers.borrow().get(&buffer_id) {
-                    redone |= buffer.update(cx, |buf, cx| {
-                        buf.redo_transaction(*buffer_transaction_id, cx)
+                    redone |= buffer.update(cx, |buffer, cx| {
+                        let redo_to = *buffer_transaction_id;
+                        if let Some(entry) = buffer.peek_redo_stack() {
+                            *buffer_transaction_id = entry.transaction_id();
+                        }
+                        buffer.redo_to_transaction(redo_to, cx)
                     });
                 }
             }
@@ -610,32 +604,114 @@ impl MultiBuffer {
         None
     }
 
-    pub fn push_excerpt<O>(
+    pub fn push_excerpts<O>(
         &mut self,
-        props: ExcerptProperties<O>,
+        buffer: ModelHandle<Buffer>,
+        ranges: impl IntoIterator<Item = Range<O>>,
         cx: &mut ModelContext<Self>,
-    ) -> ExcerptId
+    ) -> Vec<ExcerptId>
     where
         O: text::ToOffset,
     {
-        self.insert_excerpt_after(&ExcerptId::max(), props, cx)
+        self.insert_excerpts_after(&ExcerptId::max(), buffer, ranges, cx)
     }
 
-    pub fn insert_excerpt_after<O>(
+    pub fn push_excerpts_with_context_lines<O>(
+        &mut self,
+        buffer: ModelHandle<Buffer>,
+        ranges: Vec<Range<O>>,
+        context_line_count: u32,
+        cx: &mut ModelContext<Self>,
+    ) -> Vec<Range<Anchor>>
+    where
+        O: text::ToPoint + text::ToOffset,
+    {
+        let buffer_id = buffer.id();
+        let buffer_snapshot = buffer.read(cx).snapshot();
+        let max_point = buffer_snapshot.max_point();
+
+        let mut range_counts = Vec::new();
+        let mut excerpt_ranges = Vec::new();
+        let mut range_iter = ranges
+            .iter()
+            .map(|range| {
+                range.start.to_point(&buffer_snapshot)..range.end.to_point(&buffer_snapshot)
+            })
+            .peekable();
+        while let Some(range) = range_iter.next() {
+            let excerpt_start = Point::new(range.start.row.saturating_sub(context_line_count), 0);
+            let mut excerpt_end =
+                Point::new(range.end.row + 1 + context_line_count, 0).min(max_point);
+            let mut ranges_in_excerpt = 1;
+
+            while let Some(next_range) = range_iter.peek() {
+                if next_range.start.row <= excerpt_end.row + context_line_count {
+                    excerpt_end =
+                        Point::new(next_range.end.row + 1 + context_line_count, 0).min(max_point);
+                    ranges_in_excerpt += 1;
+                    range_iter.next();
+                } else {
+                    break;
+                }
+            }
+
+            excerpt_ranges.push(excerpt_start..excerpt_end);
+            range_counts.push(ranges_in_excerpt);
+        }
+
+        let excerpt_ids = self.push_excerpts(buffer, excerpt_ranges, cx);
+
+        let mut anchor_ranges = Vec::new();
+        let mut ranges = ranges.into_iter();
+        for (excerpt_id, range_count) in excerpt_ids.into_iter().zip(range_counts.into_iter()) {
+            anchor_ranges.extend(ranges.by_ref().take(range_count).map(|range| {
+                let start = Anchor {
+                    buffer_id: Some(buffer_id),
+                    excerpt_id: excerpt_id.clone(),
+                    text_anchor: buffer_snapshot.anchor_after(range.start),
+                };
+                let end = Anchor {
+                    buffer_id: Some(buffer_id),
+                    excerpt_id: excerpt_id.clone(),
+                    text_anchor: buffer_snapshot.anchor_after(range.end),
+                };
+                start..end
+            }))
+        }
+        anchor_ranges
+    }
+
+    pub fn insert_excerpts_after<O>(
         &mut self,
         prev_excerpt_id: &ExcerptId,
-        props: ExcerptProperties<O>,
+        buffer: ModelHandle<Buffer>,
+        ranges: impl IntoIterator<Item = Range<O>>,
         cx: &mut ModelContext<Self>,
-    ) -> ExcerptId
+    ) -> Vec<ExcerptId>
     where
         O: text::ToOffset,
     {
         assert_eq!(self.history.transaction_depth, 0);
         self.sync(cx);
 
-        let buffer_snapshot = props.buffer.read(cx).snapshot();
-        let range = buffer_snapshot.anchor_before(&props.range.start)
-            ..buffer_snapshot.anchor_after(&props.range.end);
+        let buffer_id = buffer.id();
+        let buffer_snapshot = buffer.read(cx).snapshot();
+
+        let mut buffers = self.buffers.borrow_mut();
+        let buffer_state = buffers.entry(buffer_id).or_insert_with(|| BufferState {
+            last_version: buffer_snapshot.version().clone(),
+            last_parse_count: buffer_snapshot.parse_count(),
+            last_selections_update_count: buffer_snapshot.selections_update_count(),
+            last_diagnostics_update_count: buffer_snapshot.diagnostics_update_count(),
+            last_file_update_count: buffer_snapshot.file_update_count(),
+            excerpts: Default::default(),
+            _subscriptions: [
+                cx.observe(&buffer, |_, _, cx| cx.notify()),
+                cx.subscribe(&buffer, Self::on_buffer_event),
+            ],
+            buffer,
+        });
+
         let mut snapshot = self.snapshot.borrow_mut();
         let mut cursor = snapshot.excerpts.cursor::<Option<&ExcerptId>>();
         let mut new_excerpts = cursor.slice(&Some(prev_excerpt_id), Bias::Right, &());
@@ -655,40 +731,37 @@ impl MultiBuffer {
             next_id = next_excerpt.id.clone();
         }
 
-        let id = ExcerptId::between(&prev_id, &next_id);
-
-        let mut buffers = self.buffers.borrow_mut();
-        let buffer_state = buffers
-            .entry(props.buffer.id())
-            .or_insert_with(|| BufferState {
-                last_version: buffer_snapshot.version().clone(),
-                last_parse_count: buffer_snapshot.parse_count(),
-                last_selections_update_count: buffer_snapshot.selections_update_count(),
-                last_diagnostics_update_count: buffer_snapshot.diagnostics_update_count(),
-                excerpts: Default::default(),
-                _subscriptions: [
-                    cx.observe(&props.buffer, |_, _, cx| cx.notify()),
-                    cx.subscribe(&props.buffer, Self::on_buffer_event),
-                ],
-                buffer: props.buffer.clone(),
-            });
-        if let Err(ix) = buffer_state.excerpts.binary_search(&id) {
-            buffer_state.excerpts.insert(ix, id.clone());
+        let mut ids = Vec::new();
+        let mut ranges = ranges.into_iter().peekable();
+        while let Some(range) = ranges.next() {
+            let id = ExcerptId::between(&prev_id, &next_id);
+            if let Err(ix) = buffer_state.excerpts.binary_search(&id) {
+                buffer_state.excerpts.insert(ix, id.clone());
+            }
+            let range = buffer_snapshot.anchor_before(&range.start)
+                ..buffer_snapshot.anchor_after(&range.end);
+            let excerpt = Excerpt::new(
+                id.clone(),
+                buffer_id,
+                buffer_snapshot.clone(),
+                range,
+                ranges.peek().is_some() || cursor.item().is_some(),
+            );
+            new_excerpts.push(excerpt, &());
+            prev_id = id.clone();
+            ids.push(id);
         }
 
-        let excerpt = Excerpt::new(
-            id.clone(),
-            props.buffer.id(),
-            buffer_snapshot,
-            range,
-            cursor.item().is_some(),
-        );
-        new_excerpts.push(excerpt, &());
         let edit_end = new_excerpts.summary().text.bytes;
 
-        new_excerpts.push_tree(cursor.suffix(&()), &());
+        let suffix = cursor.suffix(&());
+        let changed_trailing_excerpt = suffix.is_empty();
+        new_excerpts.push_tree(suffix, &());
         drop(cursor);
         snapshot.excerpts = new_excerpts;
+        if changed_trailing_excerpt {
+            snapshot.trailing_excerpt_update_count += 1;
+        }
 
         self.subscriptions.publish_mut([Edit {
             old: edit_start..edit_start,
@@ -696,7 +769,7 @@ impl MultiBuffer {
         }]);
 
         cx.notify();
-        id
+        ids
     }
 
     pub fn excerpt_ids_for_buffer(&self, buffer: &ModelHandle<Buffer>) -> Vec<ExcerptId> {
@@ -706,12 +779,35 @@ impl MultiBuffer {
             .map_or(Vec::new(), |state| state.excerpts.clone())
     }
 
-    pub fn excerpted_buffers<'a, T: ToOffset>(
+    pub fn excerpt_containing(
+        &self,
+        position: impl ToOffset,
+        cx: &AppContext,
+    ) -> Option<(ModelHandle<Buffer>, Range<text::Anchor>)> {
+        let snapshot = self.read(cx);
+        let position = position.to_offset(&snapshot);
+
+        let mut cursor = snapshot.excerpts.cursor::<usize>();
+        cursor.seek(&position, Bias::Right, &());
+        cursor.item().map(|excerpt| {
+            (
+                self.buffers
+                    .borrow()
+                    .get(&excerpt.buffer_id)
+                    .unwrap()
+                    .buffer
+                    .clone(),
+                excerpt.range.clone(),
+            )
+        })
+    }
+
+    pub fn range_to_buffer_ranges<'a, T: ToOffset>(
         &'a self,
         range: Range<T>,
         cx: &AppContext,
     ) -> Vec<(ModelHandle<Buffer>, Range<usize>)> {
-        let snapshot = self.snapshot(cx);
+        let snapshot = self.read(cx);
         let start = range.start.to_offset(&snapshot);
         let end = range.end.to_offset(&snapshot);
 
@@ -799,9 +895,15 @@ impl MultiBuffer {
                 });
             }
         }
-        new_excerpts.push_tree(cursor.suffix(&()), &());
+        let suffix = cursor.suffix(&());
+        let changed_trailing_excerpt = suffix.is_empty();
+        new_excerpts.push_tree(suffix, &());
         drop(cursor);
         snapshot.excerpts = new_excerpts;
+        if changed_trailing_excerpt {
+            snapshot.trailing_excerpt_update_count += 1;
+        }
+
         self.subscriptions.publish_mut(edits);
         cx.notify();
     }
@@ -810,13 +912,13 @@ impl MultiBuffer {
         &'a self,
         position: T,
         cx: &AppContext,
-    ) -> (ModelHandle<Buffer>, language::Anchor) {
+    ) -> Option<(ModelHandle<Buffer>, language::Anchor)> {
         let snapshot = self.read(cx);
         let anchor = snapshot.anchor_before(position);
-        (
-            self.buffers.borrow()[&anchor.buffer_id].buffer.clone(),
+        Some((
+            self.buffers.borrow()[&anchor.buffer_id?].buffer.clone(),
             anchor.text_anchor,
-        )
+        ))
     }
 
     fn on_buffer_event(
@@ -828,18 +930,12 @@ impl MultiBuffer {
         cx.emit(event.clone());
     }
 
-    pub fn format(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
-        let mut format_tasks = Vec::new();
-        for BufferState { buffer, .. } in self.buffers.borrow().values() {
-            format_tasks.push(buffer.update(cx, |buffer, cx| buffer.format(cx)));
-        }
-
-        cx.spawn(|_, _| async move {
-            for format in format_tasks {
-                format.await?;
-            }
-            Ok(())
-        })
+    pub fn all_buffers(&self) -> HashSet<ModelHandle<Buffer>> {
+        self.buffers
+            .borrow()
+            .values()
+            .map(|state| state.buffer.clone())
+            .collect()
     }
 
     pub fn save(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
@@ -856,41 +952,6 @@ impl MultiBuffer {
         })
     }
 
-    pub fn completions<T>(
-        &self,
-        position: T,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<Vec<Completion<Anchor>>>>
-    where
-        T: ToOffset,
-    {
-        let anchor = self.read(cx).anchor_before(position);
-        let buffer = self.buffers.borrow()[&anchor.buffer_id].buffer.clone();
-        let completions =
-            buffer.update(cx, |buffer, cx| buffer.completions(anchor.text_anchor, cx));
-        cx.spawn(|this, cx| async move {
-            completions.await.map(|completions| {
-                let snapshot = this.read_with(&cx, |buffer, cx| buffer.snapshot(cx));
-                completions
-                    .into_iter()
-                    .map(|completion| Completion {
-                        old_range: snapshot.anchor_in_excerpt(
-                            anchor.excerpt_id.clone(),
-                            completion.old_range.start,
-                        )
-                            ..snapshot.anchor_in_excerpt(
-                                anchor.excerpt_id.clone(),
-                                completion.old_range.end,
-                            ),
-                        new_text: completion.new_text,
-                        label: completion.label,
-                        lsp_completion: completion.lsp_completion,
-                    })
-                    .collect()
-            })
-        })
-    }
-
     pub fn is_completion_trigger<T>(&self, position: T, text: &str, cx: &AppContext) -> bool
     where
         T: ToOffset,
@@ -911,45 +972,13 @@ impl MultiBuffer {
 
         let snapshot = self.snapshot(cx);
         let anchor = snapshot.anchor_before(position);
-        let buffer = self.buffers.borrow()[&anchor.buffer_id].buffer.clone();
-        buffer
-            .read(cx)
-            .completion_triggers()
-            .iter()
-            .any(|string| string == text)
-    }
-
-    pub fn apply_additional_edits_for_completion(
-        &self,
-        completion: Completion<Anchor>,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<()>> {
-        let buffer = if let Some(buffer_state) = self
-            .buffers
-            .borrow()
-            .get(&completion.old_range.start.buffer_id)
-        {
-            buffer_state.buffer.clone()
-        } else {
-            return Task::ready(Ok(()));
-        };
-
-        let apply_edits = buffer.update(cx, |buffer, cx| {
-            buffer.apply_additional_edits_for_completion(
-                Completion {
-                    old_range: completion.old_range.start.text_anchor
-                        ..completion.old_range.end.text_anchor,
-                    new_text: completion.new_text,
-                    label: completion.label,
-                    lsp_completion: completion.lsp_completion,
-                },
-                true,
-                cx,
-            )
-        });
-        cx.foreground().spawn(async move {
-            apply_edits.await?;
-            Ok(())
+        anchor.buffer_id.map_or(false, |buffer_id| {
+            let buffer = self.buffers.borrow()[&buffer_id].buffer.clone();
+            buffer
+                .read(cx)
+                .completion_triggers()
+                .iter()
+                .any(|string| string == text)
         })
     }
 
@@ -965,6 +994,16 @@ impl MultiBuffer {
         self.as_singleton()?.read(cx).file()
     }
 
+    pub fn title(&self, cx: &AppContext) -> String {
+        if let Some(title) = self.title.clone() {
+            title
+        } else if let Some(file) = self.file(cx) {
+            file.file_name(cx).to_string_lossy().into()
+        } else {
+            "untitled".into()
+        }
+    }
+
     #[cfg(test)]
     pub fn is_parsing(&self, cx: &AppContext) -> bool {
         self.as_singleton().unwrap().read(cx).is_parsing()
@@ -984,6 +1023,7 @@ impl MultiBuffer {
             let parse_count = buffer.parse_count();
             let selections_update_count = buffer.selections_update_count();
             let diagnostics_update_count = buffer.diagnostics_update_count();
+            let file_update_count = buffer.file_update_count();
 
             let buffer_edited = version.changed_since(&buffer_state.last_version);
             let buffer_reparsed = parse_count > buffer_state.last_parse_count;
@@ -991,15 +1031,18 @@ impl MultiBuffer {
                 selections_update_count > buffer_state.last_selections_update_count;
             let buffer_diagnostics_updated =
                 diagnostics_update_count > buffer_state.last_diagnostics_update_count;
+            let buffer_file_updated = file_update_count > buffer_state.last_file_update_count;
             if buffer_edited
                 || buffer_reparsed
                 || buffer_selections_updated
                 || buffer_diagnostics_updated
+                || buffer_file_updated
             {
                 buffer_state.last_version = version;
                 buffer_state.last_parse_count = parse_count;
                 buffer_state.last_selections_update_count = selections_update_count;
                 buffer_state.last_diagnostics_update_count = diagnostics_update_count;
+                buffer_state.last_file_update_count = file_update_count;
                 excerpts_to_edit.extend(
                     buffer_state
                         .excerpts
@@ -1105,6 +1148,88 @@ impl MultiBuffer {
 
         self.edit(old_ranges.iter().cloned(), new_text.as_str(), cx);
     }
+
+    pub fn randomly_edit_excerpts(
+        &mut self,
+        rng: &mut impl rand::Rng,
+        mutation_count: usize,
+        cx: &mut ModelContext<Self>,
+    ) {
+        use rand::prelude::*;
+        use std::env;
+        use text::RandomCharIter;
+
+        let max_excerpts = env::var("MAX_EXCERPTS")
+            .map(|i| i.parse().expect("invalid `MAX_EXCERPTS` variable"))
+            .unwrap_or(5);
+
+        let mut buffers = Vec::new();
+        for _ in 0..mutation_count {
+            let excerpt_ids = self
+                .buffers
+                .borrow()
+                .values()
+                .flat_map(|b| &b.excerpts)
+                .cloned()
+                .collect::<Vec<_>>();
+            if excerpt_ids.len() == 0 || (rng.gen() && excerpt_ids.len() < max_excerpts) {
+                let buffer_handle = if rng.gen() || self.buffers.borrow().is_empty() {
+                    let text = RandomCharIter::new(&mut *rng).take(10).collect::<String>();
+                    buffers.push(cx.add_model(|cx| Buffer::new(0, text, cx)));
+                    let buffer = buffers.last().unwrap();
+                    log::info!(
+                        "Creating new buffer {} with text: {:?}",
+                        buffer.id(),
+                        buffer.read(cx).text()
+                    );
+                    buffers.last().unwrap().clone()
+                } else {
+                    self.buffers
+                        .borrow()
+                        .values()
+                        .choose(rng)
+                        .unwrap()
+                        .buffer
+                        .clone()
+                };
+
+                let buffer = buffer_handle.read(cx);
+                let end_ix = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Bias::Right);
+                let start_ix = buffer.clip_offset(rng.gen_range(0..=end_ix), Bias::Left);
+                log::info!(
+                    "Inserting excerpt from buffer {} and range {:?}: {:?}",
+                    buffer_handle.id(),
+                    start_ix..end_ix,
+                    &buffer.text()[start_ix..end_ix]
+                );
+
+                let excerpt_id = self.push_excerpts(buffer_handle.clone(), [start_ix..end_ix], cx);
+                log::info!("Inserted with id: {:?}", excerpt_id);
+            } else {
+                let remove_count = rng.gen_range(1..=excerpt_ids.len());
+                let mut excerpts_to_remove = excerpt_ids
+                    .choose_multiple(rng, remove_count)
+                    .cloned()
+                    .collect::<Vec<_>>();
+                excerpts_to_remove.sort();
+                log::info!("Removing excerpts {:?}", excerpts_to_remove);
+                self.remove_excerpts(&excerpts_to_remove, cx);
+            }
+        }
+    }
+
+    pub fn randomly_mutate(
+        &mut self,
+        rng: &mut impl rand::Rng,
+        mutation_count: usize,
+        cx: &mut ModelContext<Self>,
+    ) {
+        if rng.gen_bool(0.7) || self.singleton {
+            self.randomly_edit(rng, mutation_count, cx);
+        } else {
+            self.randomly_edit_excerpts(rng, mutation_count, cx);
+        }
+    }
 }
 
 impl Entity for MultiBuffer {
@@ -1360,6 +1485,48 @@ impl MultiBufferSnapshot {
         }
     }
 
+    pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 {
+        if let Some(excerpt) = self.as_singleton() {
+            return excerpt.buffer.offset_to_point_utf16(offset);
+        }
+
+        let mut cursor = self.excerpts.cursor::<(usize, PointUtf16)>();
+        cursor.seek(&offset, Bias::Right, &());
+        if let Some(excerpt) = cursor.item() {
+            let (start_offset, start_point) = cursor.start();
+            let overshoot = offset - start_offset;
+            let excerpt_start_offset = excerpt.range.start.to_offset(&excerpt.buffer);
+            let excerpt_start_point = excerpt.range.start.to_point_utf16(&excerpt.buffer);
+            let buffer_point = excerpt
+                .buffer
+                .offset_to_point_utf16(excerpt_start_offset + overshoot);
+            *start_point + (buffer_point - excerpt_start_point)
+        } else {
+            self.excerpts.summary().text.lines_utf16
+        }
+    }
+
+    pub fn point_to_point_utf16(&self, point: Point) -> PointUtf16 {
+        if let Some(excerpt) = self.as_singleton() {
+            return excerpt.buffer.point_to_point_utf16(point);
+        }
+
+        let mut cursor = self.excerpts.cursor::<(Point, PointUtf16)>();
+        cursor.seek(&point, Bias::Right, &());
+        if let Some(excerpt) = cursor.item() {
+            let (start_offset, start_point) = cursor.start();
+            let overshoot = point - start_offset;
+            let excerpt_start_point = excerpt.range.start.to_point(&excerpt.buffer);
+            let excerpt_start_point_utf16 = excerpt.range.start.to_point_utf16(&excerpt.buffer);
+            let buffer_point = excerpt
+                .buffer
+                .point_to_point_utf16(excerpt_start_point + overshoot);
+            *start_point + (buffer_point - excerpt_start_point_utf16)
+        } else {
+            self.excerpts.summary().text.lines_utf16
+        }
+    }
+
     pub fn point_to_offset(&self, point: Point) -> usize {
         if let Some(excerpt) = self.as_singleton() {
             return excerpt.buffer.point_to_offset(point);
@@ -1523,7 +1690,7 @@ impl MultiBufferSnapshot {
 
         let mut position = D::from_text_summary(&cursor.start().text);
         if let Some(excerpt) = cursor.item() {
-            if excerpt.id == anchor.excerpt_id && excerpt.buffer_id == anchor.buffer_id {
+            if excerpt.id == anchor.excerpt_id && Some(excerpt.buffer_id) == anchor.buffer_id {
                 let excerpt_buffer_start = excerpt.range.start.summary::<D>(&excerpt.buffer);
                 let excerpt_buffer_end = excerpt.range.end.summary::<D>(&excerpt.buffer);
                 let buffer_position = cmp::min(
@@ -1572,7 +1739,7 @@ impl MultiBufferSnapshot {
 
             let position = D::from_text_summary(&cursor.start().text);
             if let Some(excerpt) = cursor.item() {
-                if excerpt.id == *excerpt_id && excerpt.buffer_id == buffer_id {
+                if excerpt.id == *excerpt_id && Some(excerpt.buffer_id) == buffer_id {
                     let excerpt_buffer_start = excerpt.range.start.summary::<D>(&excerpt.buffer);
                     let excerpt_buffer_end = excerpt.range.end.summary::<D>(&excerpt.buffer);
                     summaries.extend(
@@ -1665,7 +1832,7 @@ impl MultiBufferSnapshot {
                             text_anchor = excerpt.range.end.clone();
                         }
                         Anchor {
-                            buffer_id: excerpt.buffer_id,
+                            buffer_id: Some(excerpt.buffer_id),
                             excerpt_id: excerpt.id.clone(),
                             text_anchor,
                         }
@@ -1682,7 +1849,7 @@ impl MultiBufferSnapshot {
                             text_anchor = excerpt.range.start.clone();
                         }
                         Anchor {
-                            buffer_id: excerpt.buffer_id,
+                            buffer_id: Some(excerpt.buffer_id),
                             excerpt_id: excerpt.id.clone(),
                             text_anchor,
                         }
@@ -1712,7 +1879,7 @@ impl MultiBufferSnapshot {
         let offset = position.to_offset(self);
         if let Some(excerpt) = self.as_singleton() {
             return Anchor {
-                buffer_id: excerpt.buffer_id,
+                buffer_id: Some(excerpt.buffer_id),
                 excerpt_id: excerpt.id.clone(),
                 text_anchor: excerpt.buffer.anchor_at(offset, bias),
             };
@@ -1734,7 +1901,7 @@ impl MultiBufferSnapshot {
             let text_anchor =
                 excerpt.clip_anchor(excerpt.buffer.anchor_at(buffer_start + overshoot, bias));
             Anchor {
-                buffer_id: excerpt.buffer_id,
+                buffer_id: Some(excerpt.buffer_id),
                 excerpt_id: excerpt.id.clone(),
                 text_anchor,
             }
@@ -1753,7 +1920,7 @@ impl MultiBufferSnapshot {
                 let text_anchor = excerpt.clip_anchor(text_anchor);
                 drop(cursor);
                 return Anchor {
-                    buffer_id: excerpt.buffer_id,
+                    buffer_id: Some(excerpt.buffer_id),
                     excerpt_id,
                     text_anchor,
                 };
@@ -1768,27 +1935,72 @@ impl MultiBufferSnapshot {
         } else if let Some((buffer_id, buffer_snapshot)) =
             self.buffer_snapshot_for_excerpt(&anchor.excerpt_id)
         {
-            anchor.buffer_id == buffer_id && buffer_snapshot.can_resolve(&anchor.text_anchor)
+            anchor.buffer_id == Some(buffer_id) && buffer_snapshot.can_resolve(&anchor.text_anchor)
         } else {
             false
         }
     }
 
-    pub fn range_contains_excerpt_boundary<T: ToOffset>(&self, range: Range<T>) -> bool {
-        let start = range.start.to_offset(self);
-        let end = range.end.to_offset(self);
-        let mut cursor = self.excerpts.cursor::<(usize, Option<&ExcerptId>)>();
-        cursor.seek(&start, Bias::Right, &());
-        let start_id = cursor
-            .item()
-            .or_else(|| cursor.prev_item())
-            .map(|excerpt| &excerpt.id);
-        cursor.seek_forward(&end, Bias::Right, &());
-        let end_id = cursor
-            .item()
-            .or_else(|| cursor.prev_item())
-            .map(|excerpt| &excerpt.id);
-        start_id != end_id
+    pub fn excerpt_boundaries_in_range<'a, R, T>(
+        &'a self,
+        range: R,
+    ) -> impl Iterator<Item = ExcerptBoundary> + 'a
+    where
+        R: RangeBounds<T>,
+        T: ToOffset,
+    {
+        let start_offset;
+        let start = match range.start_bound() {
+            Bound::Included(start) => {
+                start_offset = start.to_offset(self);
+                Bound::Included(start_offset)
+            }
+            Bound::Excluded(start) => {
+                start_offset = start.to_offset(self);
+                Bound::Excluded(start_offset)
+            }
+            Bound::Unbounded => {
+                start_offset = 0;
+                Bound::Unbounded
+            }
+        };
+        let end = match range.end_bound() {
+            Bound::Included(end) => Bound::Included(end.to_offset(self)),
+            Bound::Excluded(end) => Bound::Excluded(end.to_offset(self)),
+            Bound::Unbounded => Bound::Unbounded,
+        };
+        let bounds = (start, end);
+
+        let mut cursor = self.excerpts.cursor::<(usize, Point)>();
+        cursor.seek(&start_offset, Bias::Right, &());
+        if cursor.item().is_none() {
+            cursor.prev(&());
+        }
+        if !bounds.contains(&cursor.start().0) {
+            cursor.next(&());
+        }
+
+        let mut prev_buffer_id = cursor.prev_item().map(|excerpt| excerpt.buffer_id);
+        std::iter::from_fn(move || {
+            if self.singleton {
+                None
+            } else if bounds.contains(&cursor.start().0) {
+                let excerpt = cursor.item()?;
+                let starts_new_buffer = Some(excerpt.buffer_id) != prev_buffer_id;
+                let boundary = ExcerptBoundary {
+                    row: cursor.start().1.row,
+                    buffer: excerpt.buffer.clone(),
+                    range: excerpt.range.clone(),
+                    starts_new_buffer,
+                };
+
+                prev_buffer_id = Some(excerpt.buffer_id);
+                cursor.next(&());
+                Some(boundary)
+            } else {
+                None
+            }
+        })
     }
 
     pub fn parse_count(&self) -> usize {

crates/editor/src/multi_buffer/anchor.rs šŸ”—

@@ -9,7 +9,7 @@ use text::{rope::TextDimension, Point};
 
 #[derive(Clone, Eq, PartialEq, Debug, Hash)]
 pub struct Anchor {
-    pub(crate) buffer_id: usize,
+    pub(crate) buffer_id: Option<usize>,
     pub(crate) excerpt_id: ExcerptId,
     pub(crate) text_anchor: text::Anchor,
 }
@@ -17,7 +17,7 @@ pub struct Anchor {
 impl Anchor {
     pub fn min() -> Self {
         Self {
-            buffer_id: 0,
+            buffer_id: None,
             excerpt_id: ExcerptId::min(),
             text_anchor: text::Anchor::min(),
         }
@@ -25,7 +25,7 @@ impl Anchor {
 
     pub fn max() -> Self {
         Self {
-            buffer_id: 0,
+            buffer_id: None,
             excerpt_id: ExcerptId::max(),
             text_anchor: text::Anchor::max(),
         }
@@ -46,11 +46,11 @@ impl Anchor {
                 // Even though the anchor refers to a valid excerpt the underlying buffer might have
                 // changed. In that case, treat the anchor as if it were at the start of that
                 // excerpt.
-                if self.buffer_id == buffer_id && other.buffer_id == buffer_id {
+                if self.buffer_id == Some(buffer_id) && other.buffer_id == Some(buffer_id) {
                     self.text_anchor.cmp(&other.text_anchor, buffer_snapshot)
-                } else if self.buffer_id == buffer_id {
+                } else if self.buffer_id == Some(buffer_id) {
                     Ok(Ordering::Greater)
-                } else if other.buffer_id == buffer_id {
+                } else if other.buffer_id == Some(buffer_id) {
                     Ok(Ordering::Less)
                 } else {
                     Ok(Ordering::Equal)
@@ -68,7 +68,7 @@ impl Anchor {
             if let Some((buffer_id, buffer_snapshot)) =
                 snapshot.buffer_snapshot_for_excerpt(&self.excerpt_id)
             {
-                if self.buffer_id == buffer_id {
+                if self.buffer_id == Some(buffer_id) {
                     return Self {
                         buffer_id: self.buffer_id,
                         excerpt_id: self.excerpt_id.clone(),
@@ -85,7 +85,7 @@ impl Anchor {
             if let Some((buffer_id, buffer_snapshot)) =
                 snapshot.buffer_snapshot_for_excerpt(&self.excerpt_id)
             {
-                if self.buffer_id == buffer_id {
+                if self.buffer_id == Some(buffer_id) {
                     return Self {
                         buffer_id: self.buffer_id,
                         excerpt_id: self.excerpt_id.clone(),

crates/find/src/find.rs šŸ”—

@@ -355,11 +355,8 @@ impl FindBar {
         if let Some(mut index) = self.active_match_index {
             if let Some(editor) = self.active_editor.as_ref() {
                 editor.update(cx, |editor, cx| {
-                    let newest_selection = editor.newest_anchor_selection().cloned();
-                    if let Some(((_, ranges), newest_selection)) = editor
-                        .highlighted_ranges_for_type::<Self>()
-                        .zip(newest_selection)
-                    {
+                    let newest_selection = editor.newest_anchor_selection().clone();
+                    if let Some((_, ranges)) = editor.highlighted_ranges_for_type::<Self>() {
                         let position = newest_selection.head();
                         let buffer = editor.buffer().read(cx).read(cx);
                         if ranges[index].start.cmp(&position, &buffer).unwrap().is_gt() {
@@ -467,7 +464,7 @@ impl FindBar {
                 self.pending_search = Some(cx.spawn(|this, mut cx| async move {
                     match ranges.await {
                         Ok(ranges) => {
-                            if let Some(editor) = cx.read(|cx| editor.upgrade(cx)) {
+                            if let Some(editor) = editor.upgrade(&cx) {
                                 this.update(&mut cx, |this, cx| {
                                     this.highlighted_editors.insert(editor.downgrade());
                                     editor.update(cx, |editor, cx| {
@@ -502,7 +499,7 @@ impl FindBar {
     fn active_match_index(&mut self, cx: &mut ViewContext<Self>) -> Option<usize> {
         let editor = self.active_editor.as_ref()?;
         let editor = editor.read(cx);
-        let position = editor.newest_anchor_selection()?.head();
+        let position = editor.newest_anchor_selection().head();
         let ranges = editor.highlighted_ranges_for_type::<Self>()?.1;
         if ranges.is_empty() {
             None
@@ -655,7 +652,7 @@ mod tests {
             )
         });
         let editor = cx.add_view(Default::default(), |cx| {
-            Editor::new(buffer.clone(), Arc::new(EditorSettings::test), cx)
+            Editor::new(buffer.clone(), Arc::new(EditorSettings::test), None, cx)
         });
 
         let find_bar = cx.add_view(Default::default(), |cx| {

crates/gpui/src/app.rs šŸ”—

@@ -80,8 +80,14 @@ pub trait UpdateModel {
 }
 
 pub trait UpgradeModelHandle {
-    fn upgrade_model_handle<T: Entity>(&self, handle: WeakModelHandle<T>)
-        -> Option<ModelHandle<T>>;
+    fn upgrade_model_handle<T: Entity>(
+        &self,
+        handle: &WeakModelHandle<T>,
+    ) -> Option<ModelHandle<T>>;
+}
+
+pub trait UpgradeViewHandle {
+    fn upgrade_view_handle<T: View>(&self, handle: &WeakViewHandle<T>) -> Option<ViewHandle<T>>;
 }
 
 pub trait ReadView {
@@ -558,12 +564,18 @@ impl UpdateModel for AsyncAppContext {
 impl UpgradeModelHandle for AsyncAppContext {
     fn upgrade_model_handle<T: Entity>(
         &self,
-        handle: WeakModelHandle<T>,
+        handle: &WeakModelHandle<T>,
     ) -> Option<ModelHandle<T>> {
         self.0.borrow_mut().upgrade_model_handle(handle)
     }
 }
 
+impl UpgradeViewHandle for AsyncAppContext {
+    fn upgrade_view_handle<T: View>(&self, handle: &WeakViewHandle<T>) -> Option<ViewHandle<T>> {
+        self.0.borrow_mut().upgrade_view_handle(handle)
+    }
+}
+
 impl ReadModelWith for AsyncAppContext {
     fn read_model_with<E: Entity, T>(
         &self,
@@ -831,6 +843,17 @@ impl MutableAppContext {
             .push(handler);
     }
 
+    pub fn add_async_action<A, V, F>(&mut self, mut handler: F)
+    where
+        A: Action,
+        V: View,
+        F: 'static + FnMut(&mut V, &A, &mut ViewContext<V>) -> Option<Task<Result<()>>>,
+    {
+        self.add_action(move |view, action, cx| {
+            handler(view, action, cx).map(|task| task.detach_and_log_err(cx));
+        })
+    }
+
     pub fn add_global_action<A, F>(&mut self, mut handler: F)
     where
         A: Action,
@@ -1721,12 +1744,18 @@ impl UpdateModel for MutableAppContext {
 impl UpgradeModelHandle for MutableAppContext {
     fn upgrade_model_handle<T: Entity>(
         &self,
-        handle: WeakModelHandle<T>,
+        handle: &WeakModelHandle<T>,
     ) -> Option<ModelHandle<T>> {
         self.cx.upgrade_model_handle(handle)
     }
 }
 
+impl UpgradeViewHandle for MutableAppContext {
+    fn upgrade_view_handle<T: View>(&self, handle: &WeakViewHandle<T>) -> Option<ViewHandle<T>> {
+        self.cx.upgrade_view_handle(handle)
+    }
+}
+
 impl ReadView for MutableAppContext {
     fn read_view<T: View>(&self, handle: &ViewHandle<T>) -> &T {
         if let Some(view) = self.cx.views.get(&(handle.window_id, handle.view_id)) {
@@ -1835,7 +1864,7 @@ impl ReadModel for AppContext {
 impl UpgradeModelHandle for AppContext {
     fn upgrade_model_handle<T: Entity>(
         &self,
-        handle: WeakModelHandle<T>,
+        handle: &WeakModelHandle<T>,
     ) -> Option<ModelHandle<T>> {
         if self.models.contains_key(&handle.model_id) {
             Some(ModelHandle::new(handle.model_id, &self.ref_counts))
@@ -1845,6 +1874,20 @@ impl UpgradeModelHandle for AppContext {
     }
 }
 
+impl UpgradeViewHandle for AppContext {
+    fn upgrade_view_handle<T: View>(&self, handle: &WeakViewHandle<T>) -> Option<ViewHandle<T>> {
+        if self.ref_counts.lock().is_entity_alive(handle.view_id) {
+            Some(ViewHandle::new(
+                handle.window_id,
+                handle.view_id,
+                &self.ref_counts,
+            ))
+        } else {
+            None
+        }
+    }
+}
+
 impl ReadView for AppContext {
     fn read_view<T: View>(&self, handle: &ViewHandle<T>) -> &T {
         if let Some(view) = self.views.get(&(handle.window_id, handle.view_id)) {
@@ -2217,7 +2260,7 @@ impl<M> UpdateModel for ModelContext<'_, M> {
 impl<M> UpgradeModelHandle for ModelContext<'_, M> {
     fn upgrade_model_handle<T: Entity>(
         &self,
-        handle: WeakModelHandle<T>,
+        handle: &WeakModelHandle<T>,
     ) -> Option<ModelHandle<T>> {
         self.cx.upgrade_model_handle(handle)
     }
@@ -2547,12 +2590,18 @@ impl<V> ReadModel for ViewContext<'_, V> {
 impl<V> UpgradeModelHandle for ViewContext<'_, V> {
     fn upgrade_model_handle<T: Entity>(
         &self,
-        handle: WeakModelHandle<T>,
+        handle: &WeakModelHandle<T>,
     ) -> Option<ModelHandle<T>> {
         self.cx.upgrade_model_handle(handle)
     }
 }
 
+impl<V> UpgradeViewHandle for ViewContext<'_, V> {
+    fn upgrade_view_handle<T: View>(&self, handle: &WeakViewHandle<T>) -> Option<ViewHandle<T>> {
+        self.cx.upgrade_view_handle(handle)
+    }
+}
+
 impl<V: View> UpdateModel for ViewContext<'_, V> {
     fn update_model<T: Entity, O>(
         &mut self,
@@ -2654,7 +2703,7 @@ impl<T: Entity> ModelHandle<T> {
         let (mut tx, mut rx) = mpsc::channel(1);
         let mut cx = cx.cx.borrow_mut();
         let subscription = cx.observe(self, move |_, _| {
-            tx.blocking_send(()).ok();
+            tx.try_send(()).ok();
         });
 
         let duration = if std::env::var("CI").is_ok() {
@@ -2850,7 +2899,7 @@ impl<T: Entity> WeakModelHandle<T> {
         self.model_id
     }
 
-    pub fn upgrade(self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<T>> {
+    pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<T>> {
         cx.upgrade_model_handle(self)
     }
 }
@@ -2958,7 +3007,7 @@ impl<T: View> ViewHandle<T> {
         let (mut tx, mut rx) = mpsc::channel(1);
         let mut cx = cx.cx.borrow_mut();
         let subscription = cx.observe(self, move |_, _| {
-            tx.blocking_send(()).ok();
+            tx.try_send(()).ok();
         });
 
         let duration = if std::env::var("CI").is_ok() {
@@ -3266,16 +3315,8 @@ impl<T: View> WeakViewHandle<T> {
         self.view_id
     }
 
-    pub fn upgrade(&self, cx: &AppContext) -> Option<ViewHandle<T>> {
-        if cx.ref_counts.lock().is_entity_alive(self.view_id) {
-            Some(ViewHandle::new(
-                self.window_id,
-                self.view_id,
-                &cx.ref_counts,
-            ))
-        } else {
-            None
-        }
+    pub fn upgrade(&self, cx: &impl UpgradeViewHandle) -> Option<ViewHandle<T>> {
+        cx.upgrade_view_handle(self)
     }
 }
 

crates/gpui/src/executor.rs šŸ”—

@@ -550,8 +550,11 @@ impl Background {
     pub async fn simulate_random_delay(&self) {
         match self {
             Self::Deterministic { executor, .. } => {
-                if executor.state.lock().rng.gen_range(0..100) < 20 {
-                    yield_now().await;
+                if executor.state.lock().rng.gen_bool(0.2) {
+                    let yields = executor.state.lock().rng.gen_range(1..=10);
+                    for _ in 0..yields {
+                        yield_now().await;
+                    }
                 }
             }
             _ => panic!("this method can only be called on a deterministic executor"),

crates/gpui/src/platform/mac/window.rs šŸ”—

@@ -107,6 +107,10 @@ unsafe fn build_classes() {
             sel!(scrollWheel:),
             handle_view_event as extern "C" fn(&Object, Sel, id),
         );
+        decl.add_method(
+            sel!(cancelOperation:),
+            cancel_operation as extern "C" fn(&Object, Sel, id),
+        );
 
         decl.add_method(
             sel!(makeBackingLayer),
@@ -538,6 +542,34 @@ extern "C" fn handle_view_event(this: &Object, _: Sel, native_event: id) {
     }
 }
 
+// Allows us to receive `cmd-.` (the shortcut for closing a dialog)
+// https://bugs.eclipse.org/bugs/show_bug.cgi?id=300620#c6
+extern "C" fn cancel_operation(this: &Object, _sel: Sel, _sender: id) {
+    let window_state = unsafe { get_window_state(this) };
+    let mut window_state_borrow = window_state.as_ref().borrow_mut();
+
+    let chars = ".".to_string();
+    let keystroke = Keystroke {
+        cmd: true,
+        ctrl: false,
+        alt: false,
+        shift: false,
+        key: chars.clone(),
+    };
+    let event = Event::KeyDown {
+        keystroke: keystroke.clone(),
+        chars: chars.clone(),
+        is_held: false,
+    };
+
+    window_state_borrow.last_fresh_keydown = Some((keystroke, chars));
+    if let Some(mut callback) = window_state_borrow.event_callback.take() {
+        drop(window_state_borrow);
+        callback(event);
+        window_state.borrow_mut().event_callback = Some(callback);
+    }
+}
+
 extern "C" fn send_event(this: &Object, _: Sel, native_event: id) {
     unsafe {
         let () = msg_send![super(this, class!(NSWindow)), sendEvent: native_event];

crates/gpui/src/presenter.rs šŸ”—

@@ -7,7 +7,8 @@ use crate::{
     platform::Event,
     text_layout::TextLayoutCache,
     Action, AnyAction, AnyViewHandle, AssetCache, ElementBox, Entity, FontSystem, ModelHandle,
-    ReadModel, ReadView, Scene, View, ViewHandle,
+    ReadModel, ReadView, Scene, UpgradeModelHandle, UpgradeViewHandle, View, ViewHandle,
+    WeakModelHandle, WeakViewHandle,
 };
 use pathfinder_geometry::vector::{vec2f, Vector2F};
 use serde_json::json;
@@ -270,6 +271,21 @@ impl<'a> ReadModel for LayoutContext<'a> {
     }
 }
 
+impl<'a> UpgradeModelHandle for LayoutContext<'a> {
+    fn upgrade_model_handle<T: Entity>(
+        &self,
+        handle: &WeakModelHandle<T>,
+    ) -> Option<ModelHandle<T>> {
+        self.app.upgrade_model_handle(handle)
+    }
+}
+
+impl<'a> UpgradeViewHandle for LayoutContext<'a> {
+    fn upgrade_view_handle<T: View>(&self, handle: &WeakViewHandle<T>) -> Option<ViewHandle<T>> {
+        self.app.upgrade_view_handle(handle)
+    }
+}
+
 pub struct PaintContext<'a> {
     rendered_views: &'a mut HashMap<usize, ElementBox>,
     pub scene: &'a mut Scene,

crates/language/src/buffer.rs šŸ”—

@@ -68,6 +68,7 @@ pub struct Buffer {
     remote_selections: TreeMap<ReplicaId, SelectionSet>,
     selections_update_count: usize,
     diagnostics_update_count: usize,
+    file_update_count: usize,
     language_server: Option<LanguageServerState>,
     completion_triggers: Vec<String>,
     deferred_ops: OperationQueue<Operation>,
@@ -78,8 +79,10 @@ pub struct Buffer {
 pub struct BufferSnapshot {
     text: text::BufferSnapshot,
     tree: Option<Tree>,
+    path: Option<Arc<Path>>,
     diagnostics: DiagnosticSet,
     diagnostics_update_count: usize,
+    file_update_count: usize,
     remote_selections: TreeMap<ReplicaId, SelectionSet>,
     selections_update_count: usize,
     is_parsing: bool,
@@ -111,16 +114,22 @@ pub struct Diagnostic {
 }
 
 #[derive(Clone, Debug)]
-pub struct Completion<T> {
-    pub old_range: Range<T>,
+pub struct Completion {
+    pub old_range: Range<Anchor>,
     pub new_text: String,
     pub label: CompletionLabel,
     pub lsp_completion: lsp::CompletionItem,
 }
 
+#[derive(Clone, Debug)]
+pub struct CodeAction {
+    pub range: Range<Anchor>,
+    pub lsp_action: lsp::CodeAction,
+}
+
 struct LanguageServerState {
     server: Arc<LanguageServer>,
-    latest_snapshot: watch::Sender<Option<LanguageServerSnapshot>>,
+    latest_snapshot: watch::Sender<LanguageServerSnapshot>,
     pending_snapshots: BTreeMap<usize, LanguageServerSnapshot>,
     next_version: usize,
     _maintain_server: Task<()>,
@@ -141,12 +150,12 @@ pub enum Operation {
         lamport_timestamp: clock::Lamport,
     },
     UpdateSelections {
-        replica_id: ReplicaId,
         selections: Arc<[Selection<Anchor>]>,
         lamport_timestamp: clock::Lamport,
     },
     UpdateCompletionTriggers {
         triggers: Vec<String>,
+        lamport_timestamp: clock::Lamport,
     },
 }
 
@@ -192,24 +201,6 @@ pub trait File {
         cx: &mut MutableAppContext,
     ) -> Task<Result<(clock::Global, SystemTime)>>;
 
-    fn format_remote(&self, buffer_id: u64, cx: &mut MutableAppContext)
-        -> Option<Task<Result<()>>>;
-
-    fn completions(
-        &self,
-        buffer_id: u64,
-        position: Anchor,
-        language: Option<Arc<Language>>,
-        cx: &mut MutableAppContext,
-    ) -> Task<Result<Vec<Completion<Anchor>>>>;
-
-    fn apply_additional_edits_for_completion(
-        &self,
-        buffer_id: u64,
-        completion: Completion<Anchor>,
-        cx: &mut MutableAppContext,
-    ) -> Task<Result<Vec<clock::Local>>>;
-
     fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext);
 
     fn buffer_removed(&self, buffer_id: u64, cx: &mut MutableAppContext);
@@ -234,12 +225,21 @@ pub trait LocalFile: File {
     );
 }
 
-#[cfg(feature = "test-support")]
+#[cfg(any(test, feature = "test-support"))]
 pub struct FakeFile {
     pub path: Arc<Path>,
 }
 
-#[cfg(feature = "test-support")]
+#[cfg(any(test, feature = "test-support"))]
+impl FakeFile {
+    pub fn new(path: impl AsRef<Path>) -> Self {
+        Self {
+            path: path.as_ref().into(),
+        }
+    }
+}
+
+#[cfg(any(test, feature = "test-support"))]
 impl File for FakeFile {
     fn as_local(&self) -> Option<&dyn LocalFile> {
         Some(self)
@@ -275,29 +275,6 @@ impl File for FakeFile {
         cx.spawn(|_| async move { Ok((Default::default(), SystemTime::UNIX_EPOCH)) })
     }
 
-    fn format_remote(&self, _: u64, _: &mut MutableAppContext) -> Option<Task<Result<()>>> {
-        None
-    }
-
-    fn completions(
-        &self,
-        _: u64,
-        _: Anchor,
-        _: Option<Arc<Language>>,
-        _: &mut MutableAppContext,
-    ) -> Task<Result<Vec<Completion<Anchor>>>> {
-        Task::ready(Ok(Default::default()))
-    }
-
-    fn apply_additional_edits_for_completion(
-        &self,
-        _: u64,
-        _: Completion<Anchor>,
-        _: &mut MutableAppContext,
-    ) -> Task<Result<Vec<clock::Local>>> {
-        Task::ready(Ok(Default::default()))
-    }
-
     fn buffer_updated(&self, _: u64, _: Operation, _: &mut MutableAppContext) {}
 
     fn buffer_removed(&self, _: u64, _: &mut MutableAppContext) {}
@@ -311,7 +288,7 @@ impl File for FakeFile {
     }
 }
 
-#[cfg(feature = "test-support")]
+#[cfg(any(test, feature = "test-support"))]
 impl LocalFile for FakeFile {
     fn abs_path(&self, _: &AppContext) -> PathBuf {
         self.path.to_path_buf()
@@ -378,6 +355,7 @@ pub(crate) struct Diff {
     base_version: clock::Global,
     new_text: Arc<str>,
     changes: Vec<(ChangeTag, usize)>,
+    start_offset: usize,
 }
 
 #[derive(Clone, Copy)]
@@ -425,27 +403,19 @@ impl Buffer {
         file: Option<Box<dyn File>>,
         cx: &mut ModelContext<Self>,
     ) -> Result<Self> {
-        let fragments_len = message.fragments.len();
-        let buffer = TextBuffer::from_parts(
+        let buffer = TextBuffer::new(
             replica_id,
             message.id,
-            &message.visible_text,
-            &message.deleted_text,
-            message
-                .undo_map
-                .into_iter()
-                .map(proto::deserialize_undo_map_entry),
-            message
-                .fragments
-                .into_iter()
-                .enumerate()
-                .map(|(i, fragment)| {
-                    proto::deserialize_buffer_fragment(fragment, i, fragments_len)
-                }),
-            message.lamport_timestamp,
-            From::from(message.version),
+            History::new(Arc::from(message.base_text)),
         );
         let mut this = Self::build(buffer, file);
+        let ops = message
+            .operations
+            .into_iter()
+            .map(proto::deserialize_operation)
+            .collect::<Result<Vec<_>>>()?;
+        this.apply_ops(ops, cx)?;
+
         for selection_set in message.selections {
             this.remote_selections.insert(
                 selection_set.replica_id as ReplicaId,
@@ -464,37 +434,24 @@ impl Buffer {
             DiagnosticSet::from_sorted_entries(entries.into_iter().cloned(), &snapshot),
             cx,
         );
-
         this.completion_triggers = message.completion_triggers;
 
-        let deferred_ops = message
-            .deferred_operations
-            .into_iter()
-            .map(proto::deserialize_operation)
-            .collect::<Result<Vec<_>>>()?;
-        this.apply_ops(deferred_ops, cx)?;
-
         Ok(this)
     }
 
     pub fn to_proto(&self) -> proto::BufferState {
+        let mut operations = self
+            .text
+            .history()
+            .map(|op| proto::serialize_operation(&Operation::Buffer(op.clone())))
+            .chain(self.deferred_ops.iter().map(proto::serialize_operation))
+            .collect::<Vec<_>>();
+        operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
         proto::BufferState {
             id: self.remote_id(),
             file: self.file.as_ref().map(|f| f.to_proto()),
-            visible_text: self.text.text(),
-            deleted_text: self.text.deleted_text(),
-            undo_map: self
-                .text
-                .undo_history()
-                .map(proto::serialize_undo_map_entry)
-                .collect(),
-            version: From::from(&self.version),
-            lamport_timestamp: self.lamport_clock.value,
-            fragments: self
-                .text
-                .fragments()
-                .map(proto::serialize_buffer_fragment)
-                .collect(),
+            base_text: self.base_text().to_string(),
+            operations,
             selections: self
                 .remote_selections
                 .iter()
@@ -505,16 +462,6 @@ impl Buffer {
                 })
                 .collect(),
             diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()),
-            deferred_operations: self
-                .deferred_ops
-                .iter()
-                .map(proto::serialize_operation)
-                .chain(
-                    self.text
-                        .deferred_ops()
-                        .map(|op| proto::serialize_operation(&Operation::Buffer(op.clone()))),
-                )
-                .collect(),
             completion_triggers: self.completion_triggers.clone(),
         }
     }
@@ -557,6 +504,7 @@ impl Buffer {
             selections_update_count: 0,
             diagnostics: Default::default(),
             diagnostics_update_count: 0,
+            file_update_count: 0,
             language_server: None,
             completion_triggers: Default::default(),
             deferred_ops: OperationQueue::new(),
@@ -569,9 +517,11 @@ impl Buffer {
         BufferSnapshot {
             text: self.text.snapshot(),
             tree: self.syntax_tree(),
+            path: self.file.as_ref().map(|f| f.path().clone()),
             remote_selections: self.remote_selections.clone(),
             diagnostics: self.diagnostics.clone(),
             diagnostics_update_count: self.diagnostics_update_count,
+            file_update_count: self.file_update_count,
             is_parsing: self.parsing_in_background,
             language: self.language.clone(),
             parse_count: self.parse_count,
@@ -583,52 +533,6 @@ impl Buffer {
         self.file.as_deref()
     }
 
-    pub fn format(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
-        let file = if let Some(file) = self.file.as_ref() {
-            file
-        } else {
-            return Task::ready(Err(anyhow!("buffer has no file")));
-        };
-
-        if let Some(LanguageServerState { server, .. }) = self.language_server.as_ref() {
-            let server = server.clone();
-            let abs_path = file.as_local().unwrap().abs_path(cx);
-            let version = self.version();
-            cx.spawn(|this, mut cx| async move {
-                let edits = server
-                    .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
-                        text_document: lsp::TextDocumentIdentifier::new(
-                            lsp::Url::from_file_path(&abs_path).unwrap(),
-                        ),
-                        options: Default::default(),
-                        work_done_progress_params: Default::default(),
-                    })
-                    .await?;
-
-                if let Some(edits) = edits {
-                    this.update(&mut cx, |this, cx| {
-                        if this.version == version {
-                            this.apply_lsp_edits(edits, cx)?;
-                            Ok(())
-                        } else {
-                            Err(anyhow!("buffer edited since starting to format"))
-                        }
-                    })
-                } else {
-                    Ok(())
-                }
-            })
-        } else {
-            let format = file.format_remote(self.remote_id(), cx.as_mut());
-            cx.spawn(|_, _| async move {
-                if let Some(format) = format {
-                    format.await?;
-                }
-                Ok(())
-            })
-        }
-    }
-
     pub fn save(
         &mut self,
         cx: &mut ModelContext<Self>,
@@ -660,76 +564,21 @@ impl Buffer {
         language_server: Option<Arc<lsp::LanguageServer>>,
         cx: &mut ModelContext<Self>,
     ) {
-        self.language_server = if let Some(server) = language_server {
+        self.language_server = if let Some((server, file)) =
+            language_server.zip(self.file.as_ref().and_then(|f| f.as_local()))
+        {
+            let initial_snapshot = LanguageServerSnapshot {
+                buffer_snapshot: self.text.snapshot(),
+                version: 0,
+                path: file.abs_path(cx).into(),
+            };
             let (latest_snapshot_tx, mut latest_snapshot_rx) =
-                watch::channel::<Option<LanguageServerSnapshot>>();
-
-            let maintain_changes = cx.background().spawn({
-                let server = server.clone();
-                async move {
-                    let mut prev_snapshot: Option<LanguageServerSnapshot> = None;
-                    while let Some(snapshot) = latest_snapshot_rx.recv().await {
-                        if let Some(snapshot) = snapshot {
-                            let uri = lsp::Url::from_file_path(&snapshot.path).unwrap();
-                            if let Some(prev_snapshot) = prev_snapshot {
-                                let changes = lsp::DidChangeTextDocumentParams {
-                                    text_document: lsp::VersionedTextDocumentIdentifier::new(
-                                        uri,
-                                        snapshot.version as i32,
-                                    ),
-                                    content_changes: snapshot
-                                        .buffer_snapshot
-                                        .edits_since::<(PointUtf16, usize)>(
-                                            prev_snapshot.buffer_snapshot.version(),
-                                        )
-                                        .map(|edit| {
-                                            let edit_start = edit.new.start.0;
-                                            let edit_end =
-                                                edit_start + (edit.old.end.0 - edit.old.start.0);
-                                            let new_text = snapshot
-                                                .buffer_snapshot
-                                                .text_for_range(edit.new.start.1..edit.new.end.1)
-                                                .collect();
-                                            lsp::TextDocumentContentChangeEvent {
-                                                range: Some(lsp::Range::new(
-                                                    edit_start.to_lsp_position(),
-                                                    edit_end.to_lsp_position(),
-                                                )),
-                                                range_length: None,
-                                                text: new_text,
-                                            }
-                                        })
-                                        .collect(),
-                                };
-                                server
-                                    .notify::<lsp::notification::DidChangeTextDocument>(changes)
-                                    .await?;
-                            } else {
-                                server
-                                    .notify::<lsp::notification::DidOpenTextDocument>(
-                                        lsp::DidOpenTextDocumentParams {
-                                            text_document: lsp::TextDocumentItem::new(
-                                                uri,
-                                                Default::default(),
-                                                snapshot.version as i32,
-                                                snapshot.buffer_snapshot.text().to_string(),
-                                            ),
-                                        },
-                                    )
-                                    .await?;
-                            }
-
-                            prev_snapshot = Some(snapshot);
-                        }
-                    }
-                    Ok(())
-                }
-            });
+                watch::channel_with::<LanguageServerSnapshot>(initial_snapshot.clone());
 
             Some(LanguageServerState {
                 latest_snapshot: latest_snapshot_tx,
-                pending_snapshots: Default::default(),
-                next_version: 0,
+                pending_snapshots: BTreeMap::from_iter([(0, initial_snapshot)]),
+                next_version: 1,
                 server: server.clone(),
                 _maintain_server: cx.spawn_weak(|this, mut cx| async move {
                     let mut capabilities = server.capabilities();
@@ -741,9 +590,13 @@ impl Buffer {
                                     .and_then(|c| c.trigger_characters)
                                     .unwrap_or_default();
                                 this.update(&mut cx, |this, cx| {
+                                    let lamport_timestamp = this.text.lamport_clock.tick();
                                     this.completion_triggers = triggers.clone();
                                     this.send_operation(
-                                        Operation::UpdateCompletionTriggers { triggers },
+                                        Operation::UpdateCompletionTriggers {
+                                            triggers,
+                                            lamport_timestamp,
+                                        },
                                         cx,
                                     );
                                     cx.notify();
@@ -756,14 +609,69 @@ impl Buffer {
                         }
                     }
 
+                    let maintain_changes = cx.background().spawn(async move {
+                        let initial_snapshot =
+                            latest_snapshot_rx.recv().await.ok_or_else(|| {
+                                anyhow!("buffer dropped before sending DidOpenTextDocument")
+                            })?;
+                        server
+                            .notify::<lsp::notification::DidOpenTextDocument>(
+                                lsp::DidOpenTextDocumentParams {
+                                    text_document: lsp::TextDocumentItem::new(
+                                        lsp::Url::from_file_path(initial_snapshot.path).unwrap(),
+                                        Default::default(),
+                                        initial_snapshot.version as i32,
+                                        initial_snapshot.buffer_snapshot.text(),
+                                    ),
+                                },
+                            )
+                            .await?;
+
+                        let mut prev_version = initial_snapshot.buffer_snapshot.version().clone();
+                        while let Some(snapshot) = latest_snapshot_rx.recv().await {
+                            let uri = lsp::Url::from_file_path(&snapshot.path).unwrap();
+                            let buffer_snapshot = snapshot.buffer_snapshot.clone();
+                            let content_changes = buffer_snapshot
+                                .edits_since::<(PointUtf16, usize)>(&prev_version)
+                                .map(|edit| {
+                                    let edit_start = edit.new.start.0;
+                                    let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
+                                    let new_text = buffer_snapshot
+                                        .text_for_range(edit.new.start.1..edit.new.end.1)
+                                        .collect();
+                                    lsp::TextDocumentContentChangeEvent {
+                                        range: Some(lsp::Range::new(
+                                            edit_start.to_lsp_position(),
+                                            edit_end.to_lsp_position(),
+                                        )),
+                                        range_length: None,
+                                        text: new_text,
+                                    }
+                                })
+                                .collect();
+                            let changes = lsp::DidChangeTextDocumentParams {
+                                text_document: lsp::VersionedTextDocumentIdentifier::new(
+                                    uri,
+                                    snapshot.version as i32,
+                                ),
+                                content_changes,
+                            };
+                            server
+                                .notify::<lsp::notification::DidChangeTextDocument>(changes)
+                                .await?;
+
+                            prev_version = snapshot.buffer_snapshot.version().clone();
+                        }
+
+                        Ok::<_, anyhow::Error>(())
+                    });
+
                     maintain_changes.log_err().await;
                 }),
             })
         } else {
             None
         };
-
-        self.update_language_server(cx);
     }
 
     pub fn did_save(
@@ -777,6 +685,7 @@ impl Buffer {
         self.saved_version = version;
         if let Some(new_file) = new_file {
             self.file = Some(new_file);
+            self.file_update_count += 1;
         }
         if let Some((state, local_file)) = &self
             .language_server
@@ -799,6 +708,7 @@ impl Buffer {
                 .detach()
         }
         cx.emit(Event::Saved);
+        cx.notify();
     }
 
     pub fn did_reload(
@@ -874,7 +784,9 @@ impl Buffer {
         }
 
         if file_changed {
+            self.file_update_count += 1;
             cx.emit(Event::FileHandleChanged);
+            cx.notify();
         }
         self.file = Some(new_file);
         task
@@ -904,6 +816,10 @@ impl Buffer {
         self.diagnostics_update_count
     }
 
+    pub fn file_update_count(&self) -> usize {
+        self.file_update_count
+    }
+
     pub(crate) fn syntax_tree(&self) -> Option<Tree> {
         if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() {
             self.interpolate_tree(syntax_tree);
@@ -1001,8 +917,8 @@ impl Buffer {
 
     pub fn update_diagnostics<T>(
         &mut self,
-        version: Option<i32>,
         mut diagnostics: Vec<DiagnosticEntry<T>>,
+        version: Option<i32>,
         cx: &mut ModelContext<Self>,
     ) -> Result<()>
     where
@@ -1019,14 +935,7 @@ impl Buffer {
         let version = version.map(|version| version as usize);
         let content =
             if let Some((version, language_server)) = version.zip(self.language_server.as_mut()) {
-                language_server
-                    .pending_snapshots
-                    .retain(|&v, _| v >= version);
-                let snapshot = language_server
-                    .pending_snapshots
-                    .get(&version)
-                    .ok_or_else(|| anyhow!("missing snapshot"))?;
-                &snapshot.buffer_snapshot
+                language_server.snapshot_for_version(version)?
             } else {
                 self.deref()
             };
@@ -1278,6 +1187,7 @@ impl Buffer {
                 base_version,
                 new_text,
                 changes,
+                start_offset: 0,
             }
         })
     }
@@ -1285,16 +1195,21 @@ impl Buffer {
     pub(crate) fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> bool {
         if self.version == diff.base_version {
             self.start_transaction();
-            let mut offset = 0;
+            let mut offset = diff.start_offset;
             for (tag, len) in diff.changes {
                 let range = offset..(offset + len);
                 match tag {
                     ChangeTag::Equal => offset += len,
                     ChangeTag::Delete => {
-                        self.edit(Some(range), "", cx);
+                        self.edit([range], "", cx);
                     }
                     ChangeTag::Insert => {
-                        self.edit(Some(offset..offset), &diff.new_text[range], cx);
+                        self.edit(
+                            [offset..offset],
+                            &diff.new_text
+                                [range.start - diff.start_offset..range.end - diff.start_offset],
+                            cx,
+                        );
                         offset += len;
                     }
                 }
@@ -1349,8 +1264,23 @@ impl Buffer {
         }
     }
 
-    pub fn avoid_grouping_next_transaction(&mut self) {
-        self.text.avoid_grouping_next_transaction();
+    pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
+        self.text.push_transaction(transaction, now);
+    }
+
+    pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
+        self.text.finalize_last_transaction()
+    }
+
+    pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
+        self.text.forget_transaction(transaction_id);
+    }
+
+    pub fn wait_for_edits(
+        &mut self,
+        edit_ids: impl IntoIterator<Item = clock::Local>,
+    ) -> impl Future<Output = ()> {
+        self.text.wait_for_edits(edit_ids)
     }
 
     pub fn set_active_selections(
@@ -1368,7 +1298,6 @@ impl Buffer {
         );
         self.send_operation(
             Operation::UpdateSelections {
-                replica_id: self.text.replica_id(),
                 selections,
                 lamport_timestamp,
             },
@@ -1386,24 +1315,22 @@ impl Buffer {
         } else {
             return;
         };
-        let abs_path = self
-            .file
-            .as_ref()
-            .and_then(|f| f.as_local())
-            .map_or(Path::new("/").to_path_buf(), |file| file.abs_path(cx));
+        let file = if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
+            file
+        } else {
+            return;
+        };
 
         let version = post_inc(&mut language_server.next_version);
         let snapshot = LanguageServerSnapshot {
             buffer_snapshot: self.text.snapshot(),
             version,
-            path: Arc::from(abs_path),
+            path: Arc::from(file.abs_path(cx)),
         };
         language_server
             .pending_snapshots
             .insert(version, snapshot.clone());
-        let _ = language_server
-            .latest_snapshot
-            .blocking_send(Some(snapshot));
+        let _ = language_server.latest_snapshot.blocking_send(snapshot);
     }
 
     pub fn edit<I, S, T>(
@@ -1492,7 +1419,7 @@ impl Buffer {
         let new_text_len = new_text.len();
 
         let edit = self.text.edit(ranges.iter().cloned(), new_text);
-        let edit_id = edit.timestamp.local();
+        let edit_id = edit.local_timestamp();
 
         if let Some((before_edit, edited)) = autoindent_request {
             let mut inserted = None;
@@ -1521,34 +1448,117 @@ impl Buffer {
         }
 
         self.end_transaction(cx);
-        self.send_operation(Operation::Buffer(text::Operation::Edit(edit)), cx);
+        self.send_operation(Operation::Buffer(edit), cx);
         Some(edit_id)
     }
 
-    fn apply_lsp_edits(
+    pub fn edits_from_lsp(
         &mut self,
-        edits: Vec<lsp::TextEdit>,
+        lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
+        version: Option<i32>,
         cx: &mut ModelContext<Self>,
-    ) -> Result<Vec<clock::Local>> {
-        for edit in &edits {
-            let range = range_from_lsp(edit.range);
-            if self.clip_point_utf16(range.start, Bias::Left) != range.start
-                || self.clip_point_utf16(range.end, Bias::Left) != range.end
-            {
-                return Err(anyhow!(
-                    "invalid formatting edits received from language server"
-                ));
+    ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
+        let snapshot = if let Some((version, state)) = version.zip(self.language_server.as_mut()) {
+            state
+                .snapshot_for_version(version as usize)
+                .map(Clone::clone)
+        } else {
+            Ok(TextBuffer::deref(self).clone())
+        };
+
+        cx.background().spawn(async move {
+            let snapshot = snapshot?;
+            let mut lsp_edits = lsp_edits
+                .into_iter()
+                .map(|edit| (range_from_lsp(edit.range), edit.new_text))
+                .peekable();
+
+            let mut edits = Vec::new();
+            while let Some((mut range, mut new_text)) = lsp_edits.next() {
+                // Combine any LSP edits that are adjacent.
+                //
+                // Also, combine LSP edits that are separated from each other by only
+                // a newline. This is important because for some code actions,
+                // Rust-analyzer rewrites the entire buffer via a series of edits that
+                // are separated by unchanged newline characters.
+                //
+                // In order for the diffing logic below to work properly, any edits that
+                // cancel each other out must be combined into one.
+                while let Some((next_range, next_text)) = lsp_edits.peek() {
+                    if next_range.start > range.end {
+                        if next_range.start.row > range.end.row + 1
+                            || next_range.start.column > 0
+                            || snapshot.clip_point_utf16(
+                                PointUtf16::new(range.end.row, u32::MAX),
+                                Bias::Left,
+                            ) > range.end
+                        {
+                            break;
+                        }
+                        new_text.push('\n');
+                    }
+                    range.end = next_range.end;
+                    new_text.push_str(&next_text);
+                    lsp_edits.next();
+                }
+
+                if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
+                    || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
+                {
+                    return Err(anyhow!("invalid edits received from language server"));
+                }
+
+                // For multiline edits, perform a diff of the old and new text so that
+                // we can identify the changes more precisely, preserving the locations
+                // of any anchors positioned in the unchanged regions.
+                if range.end.row > range.start.row {
+                    let mut offset = range.start.to_offset(&snapshot);
+                    let old_text = snapshot.text_for_range(range).collect::<String>();
+
+                    let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
+                    let mut moved_since_edit = true;
+                    for change in diff.iter_all_changes() {
+                        let tag = change.tag();
+                        let value = change.value();
+                        match tag {
+                            ChangeTag::Equal => {
+                                offset += value.len();
+                                moved_since_edit = true;
+                            }
+                            ChangeTag::Delete => {
+                                let start = snapshot.anchor_after(offset);
+                                let end = snapshot.anchor_before(offset + value.len());
+                                if moved_since_edit {
+                                    edits.push((start..end, String::new()));
+                                } else {
+                                    edits.last_mut().unwrap().0.end = end;
+                                }
+                                offset += value.len();
+                                moved_since_edit = false;
+                            }
+                            ChangeTag::Insert => {
+                                if moved_since_edit {
+                                    let anchor = snapshot.anchor_after(offset);
+                                    edits.push((anchor.clone()..anchor, value.to_string()));
+                                } else {
+                                    edits.last_mut().unwrap().1.push_str(value);
+                                }
+                                moved_since_edit = false;
+                            }
+                        }
+                    }
+                } else if range.end == range.start {
+                    let anchor = snapshot.anchor_after(range.start);
+                    edits.push((anchor.clone()..anchor, new_text));
+                } else {
+                    let edit_start = snapshot.anchor_after(range.start);
+                    let edit_end = snapshot.anchor_before(range.end);
+                    edits.push((edit_start..edit_end, new_text));
+                }
             }
-        }
 
-        self.start_transaction();
-        let edit_ids = edits
-            .into_iter()
-            .rev()
-            .filter_map(|edit| self.edit([range_from_lsp(edit.range)], edit.new_text, cx))
-            .collect();
-        self.end_transaction(cx);
-        Ok(edit_ids)
+            Ok(edits)
+        })
     }
 
     fn did_edit(
@@ -1655,18 +1665,17 @@ impl Buffer {
                 );
             }
             Operation::UpdateSelections {
-                replica_id,
                 selections,
                 lamport_timestamp,
             } => {
-                if let Some(set) = self.remote_selections.get(&replica_id) {
+                if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
                     if set.lamport_timestamp > lamport_timestamp {
                         return;
                     }
                 }
 
                 self.remote_selections.insert(
-                    replica_id,
+                    lamport_timestamp.replica_id,
                     SelectionSet {
                         selections,
                         lamport_timestamp,
@@ -1675,8 +1684,12 @@ impl Buffer {
                 self.text.lamport_clock.observe(lamport_timestamp);
                 self.selections_update_count += 1;
             }
-            Operation::UpdateCompletionTriggers { triggers } => {
+            Operation::UpdateCompletionTriggers {
+                triggers,
+                lamport_timestamp,
+            } => {
                 self.completion_triggers = triggers;
+                self.text.lamport_clock.observe(lamport_timestamp);
             }
         }
     }
@@ -1718,7 +1731,7 @@ impl Buffer {
         }
     }
 
-    pub fn undo_transaction(
+    pub fn undo_to_transaction(
         &mut self,
         transaction_id: TransactionId,
         cx: &mut ModelContext<Self>,
@@ -1726,13 +1739,15 @@ impl Buffer {
         let was_dirty = self.is_dirty();
         let old_version = self.version.clone();
 
-        if let Some(operation) = self.text.undo_transaction(transaction_id) {
+        let operations = self.text.undo_to_transaction(transaction_id);
+        let undone = !operations.is_empty();
+        for operation in operations {
             self.send_operation(Operation::Buffer(operation), cx);
-            self.did_edit(&old_version, was_dirty, cx);
-            true
-        } else {
-            false
         }
+        if undone {
+            self.did_edit(&old_version, was_dirty, cx)
+        }
+        undone
     }
 
     pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
@@ -1748,7 +1763,7 @@ impl Buffer {
         }
     }
 
-    pub fn redo_transaction(
+    pub fn redo_to_transaction(
         &mut self,
         transaction_id: TransactionId,
         cx: &mut ModelContext<Self>,
@@ -1756,158 +1771,15 @@ impl Buffer {
         let was_dirty = self.is_dirty();
         let old_version = self.version.clone();
 
-        if let Some(operation) = self.text.redo_transaction(transaction_id) {
+        let operations = self.text.redo_to_transaction(transaction_id);
+        let redone = !operations.is_empty();
+        for operation in operations {
             self.send_operation(Operation::Buffer(operation), cx);
-            self.did_edit(&old_version, was_dirty, cx);
-            true
-        } else {
-            false
         }
-    }
-
-    pub fn completions<T>(
-        &self,
-        position: T,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<Vec<Completion<Anchor>>>>
-    where
-        T: ToOffset,
-    {
-        let file = if let Some(file) = self.file.as_ref() {
-            file
-        } else {
-            return Task::ready(Ok(Default::default()));
-        };
-        let language = self.language.clone();
-
-        if let Some(file) = file.as_local() {
-            let server = if let Some(language_server) = self.language_server.as_ref() {
-                language_server.server.clone()
-            } else {
-                return Task::ready(Ok(Default::default()));
-            };
-            let abs_path = file.abs_path(cx);
-            let position = self.offset_to_point_utf16(position.to_offset(self));
-
-            cx.spawn(|this, cx| async move {
-                let completions = server
-                    .request::<lsp::request::Completion>(lsp::CompletionParams {
-                        text_document_position: lsp::TextDocumentPositionParams::new(
-                            lsp::TextDocumentIdentifier::new(
-                                lsp::Url::from_file_path(abs_path).unwrap(),
-                            ),
-                            position.to_lsp_position(),
-                        ),
-                        context: Default::default(),
-                        work_done_progress_params: Default::default(),
-                        partial_result_params: Default::default(),
-                    })
-                    .await?;
-
-                let completions = if let Some(completions) = completions {
-                    match completions {
-                        lsp::CompletionResponse::Array(completions) => completions,
-                        lsp::CompletionResponse::List(list) => list.items,
-                    }
-                } else {
-                    Default::default()
-                };
-
-                this.read_with(&cx, |this, _| {
-                    Ok(completions.into_iter().filter_map(|lsp_completion| {
-                        let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
-                            lsp::CompletionTextEdit::Edit(edit) => (range_from_lsp(edit.range), edit.new_text.clone()),
-                            lsp::CompletionTextEdit::InsertAndReplace(_) => {
-                                log::info!("received an insert and replace completion but we don't yet support that");
-                                return None
-                            },
-                        };
-
-                        let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
-                        let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left) ;
-                        if clipped_start == old_range.start && clipped_end == old_range.end {
-                            Some(Completion {
-                                old_range: this.anchor_before(old_range.start)..this.anchor_after(old_range.end),
-                                new_text,
-                                label: language.as_ref().and_then(|l| l.label_for_completion(&lsp_completion)).unwrap_or_else(|| CompletionLabel::plain(&lsp_completion)),
-                                lsp_completion,
-                            })
-                        } else {
-                            None
-                        }
-                    }).collect())
-                })
-            })
-        } else {
-            file.completions(
-                self.remote_id(),
-                self.anchor_before(position),
-                language,
-                cx.as_mut(),
-            )
-        }
-    }
-
-    pub fn apply_additional_edits_for_completion(
-        &mut self,
-        completion: Completion<Anchor>,
-        push_to_history: bool,
-        cx: &mut ModelContext<Self>,
-    ) -> Task<Result<Vec<clock::Local>>> {
-        let file = if let Some(file) = self.file.as_ref() {
-            file
-        } else {
-            return Task::ready(Ok(Default::default()));
-        };
-
-        if file.is_local() {
-            let server = if let Some(lang) = self.language_server.as_ref() {
-                lang.server.clone()
-            } else {
-                return Task::ready(Ok(Default::default()));
-            };
-
-            cx.spawn(|this, mut cx| async move {
-                let resolved_completion = server
-                    .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
-                    .await?;
-                if let Some(additional_edits) = resolved_completion.additional_text_edits {
-                    this.update(&mut cx, |this, cx| {
-                        if !push_to_history {
-                            this.avoid_grouping_next_transaction();
-                        }
-                        this.start_transaction();
-                        let edit_ids = this.apply_lsp_edits(additional_edits, cx);
-                        if let Some(transaction_id) = this.end_transaction(cx) {
-                            if !push_to_history {
-                                this.text.forget_transaction(transaction_id);
-                            }
-                        }
-                        edit_ids
-                    })
-                } else {
-                    Ok(Default::default())
-                }
-            })
-        } else {
-            let apply_edits = file.apply_additional_edits_for_completion(
-                self.remote_id(),
-                completion,
-                cx.as_mut(),
-            );
-            cx.spawn(|this, mut cx| async move {
-                let edit_ids = apply_edits.await?;
-                this.update(&mut cx, |this, _| this.text.wait_for_edits(&edit_ids))
-                    .await;
-                if push_to_history {
-                    this.update(&mut cx, |this, _| {
-                        this.text
-                            .push_transaction(edit_ids.iter().copied(), Instant::now());
-                    });
-                }
-                Ok(edit_ids)
-            })
+        if redone {
+            self.did_edit(&old_version, was_dirty, cx)
         }
+        redone
     }
 
     pub fn completion_triggers(&self) -> &[String] {

crates/language/src/language.rs šŸ”—

@@ -7,19 +7,20 @@ pub mod proto;
 mod tests;
 
 use anyhow::{anyhow, Result};
-pub use buffer::Operation;
-pub use buffer::*;
 use collections::HashSet;
-pub use diagnostic_set::DiagnosticEntry;
 use gpui::AppContext;
 use highlight_map::HighlightMap;
 use lazy_static::lazy_static;
-pub use outline::{Outline, OutlineItem};
 use parking_lot::Mutex;
 use serde::Deserialize;
 use std::{cell::RefCell, ops::Range, path::Path, str, sync::Arc};
 use theme::SyntaxTheme;
 use tree_sitter::{self, Query};
+
+pub use buffer::Operation;
+pub use buffer::*;
+pub use diagnostic_set::DiagnosticEntry;
+pub use outline::{Outline, OutlineItem};
 pub use tree_sitter::{Parser, Tree};
 
 thread_local! {
@@ -39,10 +40,6 @@ lazy_static! {
     ));
 }
 
-pub trait ToPointUtf16 {
-    fn to_point_utf16(self) -> PointUtf16;
-}
-
 pub trait ToLspPosition {
     fn to_lsp_position(self) -> lsp::Position;
 }
@@ -360,18 +357,15 @@ impl CompletionLabel {
 
 #[cfg(any(test, feature = "test-support"))]
 impl LanguageServerConfig {
-    pub async fn fake(
-        executor: Arc<gpui::executor::Background>,
-    ) -> (Self, lsp::FakeLanguageServer) {
-        Self::fake_with_capabilities(Default::default(), executor).await
+    pub async fn fake(cx: &gpui::TestAppContext) -> (Self, lsp::FakeLanguageServer) {
+        Self::fake_with_capabilities(Default::default(), cx).await
     }
 
     pub async fn fake_with_capabilities(
         capabilites: lsp::ServerCapabilities,
-        executor: Arc<gpui::executor::Background>,
+        cx: &gpui::TestAppContext,
     ) -> (Self, lsp::FakeLanguageServer) {
-        let (server, fake) =
-            lsp::LanguageServer::fake_with_capabilities(capabilites, executor).await;
+        let (server, fake) = lsp::LanguageServer::fake_with_capabilities(capabilites, cx).await;
         fake.started
             .store(false, std::sync::atomic::Ordering::SeqCst);
         let started = fake.started.clone();
@@ -386,18 +380,16 @@ impl LanguageServerConfig {
     }
 }
 
-impl ToPointUtf16 for lsp::Position {
-    fn to_point_utf16(self) -> PointUtf16 {
-        PointUtf16::new(self.line, self.character)
-    }
-}
-
 impl ToLspPosition for PointUtf16 {
     fn to_lsp_position(self) -> lsp::Position {
         lsp::Position::new(self.row, self.column)
     }
 }
 
+pub fn point_from_lsp(point: lsp::Position) -> PointUtf16 {
+    PointUtf16::new(point.line, point.character)
+}
+
 pub fn range_from_lsp(range: lsp::Range) -> Range<PointUtf16> {
     let start = PointUtf16::new(range.start.line, range.start.character);
     let end = PointUtf16::new(range.end.line, range.end.character);

crates/language/src/proto.rs šŸ”—

@@ -1,12 +1,13 @@
 use crate::{
-    diagnostic_set::DiagnosticEntry, Completion, CompletionLabel, Diagnostic, Language, Operation,
+    diagnostic_set::DiagnosticEntry, CodeAction, Completion, CompletionLabel, Diagnostic, Language,
+    Operation,
 };
 use anyhow::{anyhow, Result};
 use clock::ReplicaId;
 use collections::HashSet;
 use lsp::DiagnosticSeverity;
 use rpc::proto;
-use std::sync::Arc;
+use std::{ops::Range, sync::Arc};
 use text::*;
 
 pub use proto::{Buffer, BufferState, SelectionSet};
@@ -24,14 +25,7 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation {
                 replica_id: undo.id.replica_id as u32,
                 local_timestamp: undo.id.value,
                 lamport_timestamp: lamport_timestamp.value,
-                ranges: undo
-                    .ranges
-                    .iter()
-                    .map(|r| proto::Range {
-                        start: r.start.0 as u64,
-                        end: r.end.0 as u64,
-                    })
-                    .collect(),
+                ranges: undo.ranges.iter().map(serialize_range).collect(),
                 counts: undo
                     .counts
                     .iter()
@@ -44,11 +38,10 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation {
                 version: From::from(&undo.version),
             }),
             Operation::UpdateSelections {
-                replica_id,
                 selections,
                 lamport_timestamp,
             } => proto::operation::Variant::UpdateSelections(proto::operation::UpdateSelections {
-                replica_id: *replica_id as u32,
+                replica_id: lamport_timestamp.replica_id as u32,
                 lamport_timestamp: lamport_timestamp.value,
                 selections: serialize_selections(selections),
             }),
@@ -60,32 +53,27 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation {
                 lamport_timestamp: lamport_timestamp.value,
                 diagnostics: serialize_diagnostics(diagnostics.iter()),
             }),
-            Operation::UpdateCompletionTriggers { triggers } => {
-                proto::operation::Variant::UpdateCompletionTriggers(
-                    proto::operation::UpdateCompletionTriggers {
-                        triggers: triggers.clone(),
-                    },
-                )
-            }
+            Operation::UpdateCompletionTriggers {
+                triggers,
+                lamport_timestamp,
+            } => proto::operation::Variant::UpdateCompletionTriggers(
+                proto::operation::UpdateCompletionTriggers {
+                    replica_id: lamport_timestamp.replica_id as u32,
+                    lamport_timestamp: lamport_timestamp.value,
+                    triggers: triggers.clone(),
+                },
+            ),
         }),
     }
 }
 
 pub fn serialize_edit_operation(operation: &EditOperation) -> proto::operation::Edit {
-    let ranges = operation
-        .ranges
-        .iter()
-        .map(|range| proto::Range {
-            start: range.start.0 as u64,
-            end: range.end.0 as u64,
-        })
-        .collect();
     proto::operation::Edit {
         replica_id: operation.timestamp.replica_id as u32,
         local_timestamp: operation.timestamp.local,
         lamport_timestamp: operation.timestamp.lamport,
         version: From::from(&operation.version),
-        ranges,
+        ranges: operation.ranges.iter().map(serialize_range).collect(),
         new_text: operation.new_text.clone(),
     }
 }
@@ -208,11 +196,7 @@ pub fn deserialize_operation(message: proto::Operation) -> Result<Operation> {
                             )
                         })
                         .collect(),
-                    ranges: undo
-                        .ranges
-                        .into_iter()
-                        .map(|r| FullOffset(r.start as usize)..FullOffset(r.end as usize))
-                        .collect(),
+                    ranges: undo.ranges.into_iter().map(deserialize_range).collect(),
                     version: undo.version.into(),
                 },
             }),
@@ -232,7 +216,6 @@ pub fn deserialize_operation(message: proto::Operation) -> Result<Operation> {
                     .collect::<Vec<_>>();
 
                 Operation::UpdateSelections {
-                    replica_id: message.replica_id as ReplicaId,
                     lamport_timestamp: clock::Lamport {
                         replica_id: message.replica_id as ReplicaId,
                         value: message.lamport_timestamp,
@@ -250,6 +233,10 @@ pub fn deserialize_operation(message: proto::Operation) -> Result<Operation> {
             proto::operation::Variant::UpdateCompletionTriggers(message) => {
                 Operation::UpdateCompletionTriggers {
                     triggers: message.triggers,
+                    lamport_timestamp: clock::Lamport {
+                        replica_id: message.replica_id as ReplicaId,
+                        value: message.lamport_timestamp,
+                    },
                 }
             }
         },
@@ -257,11 +244,6 @@ pub fn deserialize_operation(message: proto::Operation) -> Result<Operation> {
 }
 
 pub fn deserialize_edit_operation(edit: proto::operation::Edit) -> EditOperation {
-    let ranges = edit
-        .ranges
-        .into_iter()
-        .map(|range| FullOffset(range.start as usize)..FullOffset(range.end as usize))
-        .collect();
     EditOperation {
         timestamp: InsertionTimestamp {
             replica_id: edit.replica_id as ReplicaId,
@@ -269,7 +251,7 @@ pub fn deserialize_edit_operation(edit: proto::operation::Edit) -> EditOperation
             lamport: edit.lamport_timestamp,
         },
         version: edit.version.into(),
-        ranges,
+        ranges: edit.ranges.into_iter().map(deserialize_range).collect(),
         new_text: edit.new_text,
     }
 }
@@ -380,7 +362,39 @@ pub fn deserialize_anchor(anchor: proto::Anchor) -> Option<Anchor> {
     })
 }
 
-pub fn serialize_completion(completion: &Completion<Anchor>) -> proto::Completion {
+pub fn lamport_timestamp_for_operation(operation: &proto::Operation) -> Option<clock::Lamport> {
+    let replica_id;
+    let value;
+    match operation.variant.as_ref()? {
+        proto::operation::Variant::Edit(op) => {
+            replica_id = op.replica_id;
+            value = op.lamport_timestamp;
+        }
+        proto::operation::Variant::Undo(op) => {
+            replica_id = op.replica_id;
+            value = op.lamport_timestamp;
+        }
+        proto::operation::Variant::UpdateDiagnostics(op) => {
+            replica_id = op.replica_id;
+            value = op.lamport_timestamp;
+        }
+        proto::operation::Variant::UpdateSelections(op) => {
+            replica_id = op.replica_id;
+            value = op.lamport_timestamp;
+        }
+        proto::operation::Variant::UpdateCompletionTriggers(op) => {
+            replica_id = op.replica_id;
+            value = op.lamport_timestamp;
+        }
+    }
+
+    Some(clock::Lamport {
+        replica_id: replica_id as ReplicaId,
+        value,
+    })
+}
+
+pub fn serialize_completion(completion: &Completion) -> proto::Completion {
     proto::Completion {
         old_start: Some(serialize_anchor(&completion.old_range.start)),
         old_end: Some(serialize_anchor(&completion.old_range.end)),
@@ -392,7 +406,7 @@ pub fn serialize_completion(completion: &Completion<Anchor>) -> proto::Completio
 pub fn deserialize_completion(
     completion: proto::Completion,
     language: Option<&Arc<Language>>,
-) -> Result<Completion<Anchor>> {
+) -> Result<Completion> {
     let old_start = completion
         .old_start
         .and_then(deserialize_anchor)
@@ -411,3 +425,89 @@ pub fn deserialize_completion(
         lsp_completion,
     })
 }
+
+pub fn serialize_code_action(action: &CodeAction) -> proto::CodeAction {
+    proto::CodeAction {
+        start: Some(serialize_anchor(&action.range.start)),
+        end: Some(serialize_anchor(&action.range.end)),
+        lsp_action: serde_json::to_vec(&action.lsp_action).unwrap(),
+    }
+}
+
+pub fn deserialize_code_action(action: proto::CodeAction) -> Result<CodeAction> {
+    let start = action
+        .start
+        .and_then(deserialize_anchor)
+        .ok_or_else(|| anyhow!("invalid start"))?;
+    let end = action
+        .end
+        .and_then(deserialize_anchor)
+        .ok_or_else(|| anyhow!("invalid end"))?;
+    let lsp_action = serde_json::from_slice(&action.lsp_action)?;
+    Ok(CodeAction {
+        range: start..end,
+        lsp_action,
+    })
+}
+
+pub fn serialize_transaction(transaction: &Transaction) -> proto::Transaction {
+    proto::Transaction {
+        id: Some(serialize_local_timestamp(transaction.id)),
+        edit_ids: transaction
+            .edit_ids
+            .iter()
+            .copied()
+            .map(serialize_local_timestamp)
+            .collect(),
+        start: (&transaction.start).into(),
+        end: (&transaction.end).into(),
+        ranges: transaction.ranges.iter().map(serialize_range).collect(),
+    }
+}
+
+pub fn deserialize_transaction(transaction: proto::Transaction) -> Result<Transaction> {
+    Ok(Transaction {
+        id: deserialize_local_timestamp(
+            transaction
+                .id
+                .ok_or_else(|| anyhow!("missing transaction id"))?,
+        ),
+        edit_ids: transaction
+            .edit_ids
+            .into_iter()
+            .map(deserialize_local_timestamp)
+            .collect(),
+        start: transaction.start.into(),
+        end: transaction.end.into(),
+        ranges: transaction
+            .ranges
+            .into_iter()
+            .map(deserialize_range)
+            .collect(),
+    })
+}
+
+pub fn serialize_local_timestamp(timestamp: clock::Local) -> proto::LocalTimestamp {
+    proto::LocalTimestamp {
+        replica_id: timestamp.replica_id as u32,
+        value: timestamp.value,
+    }
+}
+
+pub fn deserialize_local_timestamp(timestamp: proto::LocalTimestamp) -> clock::Local {
+    clock::Local {
+        replica_id: timestamp.replica_id as ReplicaId,
+        value: timestamp.value,
+    }
+}
+
+pub fn serialize_range(range: &Range<FullOffset>) -> proto::Range {
+    proto::Range {
+        start: range.start.0 as u64,
+        end: range.end.0 as u64,
+    }
+}
+
+pub fn deserialize_range(range: proto::Range) -> Range<FullOffset> {
+    FullOffset(range.start as usize)..FullOffset(range.end as usize)
+}

crates/language/src/tests.rs šŸ”—

@@ -557,7 +557,7 @@ fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppConte
 
 #[gpui::test]
 async fn test_diagnostics(mut cx: gpui::TestAppContext) {
-    let (language_server, mut fake) = lsp::LanguageServer::fake(cx.background()).await;
+    let (language_server, mut fake) = lsp::LanguageServer::fake(&cx).await;
     let mut rust_lang = rust_lang();
     rust_lang.config.language_server = Some(LanguageServerConfig {
         disk_based_diagnostic_sources: HashSet::from_iter(["disk".to_string()]),
@@ -572,7 +572,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
     .unindent();
 
     let buffer = cx.add_model(|cx| {
-        Buffer::new(0, text, cx)
+        Buffer::from_file(0, text, Box::new(FakeFile::new("/some/path")), cx)
             .with_language(Arc::new(rust_lang), cx)
             .with_language_server(language_server, cx)
     });
@@ -592,7 +592,6 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
         // Receive diagnostics for an earlier version of the buffer.
         buffer
             .update_diagnostics(
-                Some(open_notification.text_document.version),
                 vec![
                     DiagnosticEntry {
                         range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
@@ -628,6 +627,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
                         },
                     },
                 ],
+                Some(open_notification.text_document.version),
                 cx,
             )
             .unwrap();
@@ -687,7 +687,6 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
         // Ensure overlapping diagnostics are highlighted correctly.
         buffer
             .update_diagnostics(
-                Some(open_notification.text_document.version),
                 vec![
                     DiagnosticEntry {
                         range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10),
@@ -711,6 +710,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
                         },
                     },
                 ],
+                Some(open_notification.text_document.version),
                 cx,
             )
             .unwrap();
@@ -777,7 +777,6 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
     buffer.update(&mut cx, |buffer, cx| {
         buffer
             .update_diagnostics(
-                Some(change_notification_2.text_document.version),
                 vec![
                     DiagnosticEntry {
                         range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11),
@@ -802,6 +801,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
                         },
                     },
                 ],
+                Some(change_notification_2.text_document.version),
                 cx,
             )
             .unwrap();
@@ -838,6 +838,223 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) {
     });
 }
 
+#[gpui::test]
+async fn test_edits_from_lsp_with_past_version(mut cx: gpui::TestAppContext) {
+    let (language_server, mut fake) = lsp::LanguageServer::fake(&cx).await;
+
+    let text = "
+        fn a() {
+            f1();
+        }
+        fn b() {
+            f2();
+        }
+        fn c() {
+            f3();
+        }
+    "
+    .unindent();
+
+    let buffer = cx.add_model(|cx| {
+        Buffer::from_file(0, text, Box::new(FakeFile::new("/some/path")), cx)
+            .with_language(Arc::new(rust_lang()), cx)
+            .with_language_server(language_server, cx)
+    });
+
+    let lsp_document_version = fake
+        .receive_notification::<lsp::notification::DidOpenTextDocument>()
+        .await
+        .text_document
+        .version;
+
+    // Simulate editing the buffer after the language server computes some edits.
+    buffer.update(&mut cx, |buffer, cx| {
+        buffer.edit(
+            [Point::new(0, 0)..Point::new(0, 0)],
+            "// above first function\n",
+            cx,
+        );
+        buffer.edit(
+            [Point::new(2, 0)..Point::new(2, 0)],
+            "    // inside first function\n",
+            cx,
+        );
+        buffer.edit(
+            [Point::new(6, 4)..Point::new(6, 4)],
+            "// inside second function ",
+            cx,
+        );
+
+        assert_eq!(
+            buffer.text(),
+            "
+                // above first function
+                fn a() {
+                    // inside first function
+                    f1();
+                }
+                fn b() {
+                    // inside second function f2();
+                }
+                fn c() {
+                    f3();
+                }
+            "
+            .unindent()
+        );
+    });
+
+    let edits = buffer
+        .update(&mut cx, |buffer, cx| {
+            buffer.edits_from_lsp(
+                vec![
+                    // replace body of first function
+                    lsp::TextEdit {
+                        range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
+                        new_text: "
+                            fn a() {
+                                f10();
+                            }
+                        "
+                        .unindent(),
+                    },
+                    // edit inside second function
+                    lsp::TextEdit {
+                        range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
+                        new_text: "00".into(),
+                    },
+                    // edit inside third function via two distinct edits
+                    lsp::TextEdit {
+                        range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
+                        new_text: "4000".into(),
+                    },
+                    lsp::TextEdit {
+                        range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
+                        new_text: "".into(),
+                    },
+                ],
+                Some(lsp_document_version),
+                cx,
+            )
+        })
+        .await
+        .unwrap();
+
+    buffer.update(&mut cx, |buffer, cx| {
+        for (range, new_text) in edits {
+            buffer.edit([range], new_text, cx);
+        }
+        assert_eq!(
+            buffer.text(),
+            "
+                // above first function
+                fn a() {
+                    // inside first function
+                    f10();
+                }
+                fn b() {
+                    // inside second function f200();
+                }
+                fn c() {
+                    f4000();
+                }
+            "
+            .unindent()
+        );
+    });
+}
+
+#[gpui::test]
+async fn test_edits_from_lsp_with_edits_on_adjacent_lines(mut cx: gpui::TestAppContext) {
+    let text = "
+        use a::b;
+        use a::c;
+
+        fn f() {
+            b();
+            c();
+        }
+    "
+    .unindent();
+
+    let buffer = cx.add_model(|cx| Buffer::new(0, text, cx));
+
+    // Simulate the language server sending us a small edit in the form of a very large diff.
+    // Rust-analyzer does this when performing a merge-imports code action.
+    let edits = buffer
+        .update(&mut cx, |buffer, cx| {
+            buffer.edits_from_lsp(
+                [
+                    // Replace the first use statement without editing the semicolon.
+                    lsp::TextEdit {
+                        range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
+                        new_text: "a::{b, c}".into(),
+                    },
+                    // Reinsert the remainder of the file between the semicolon and the final
+                    // newline of the file.
+                    lsp::TextEdit {
+                        range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
+                        new_text: "\n\n".into(),
+                    },
+                    lsp::TextEdit {
+                        range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
+                        new_text: "
+                            fn f() {
+                                b();
+                                c();
+                            }"
+                        .unindent(),
+                    },
+                    // Delete everything after the first newline of the file.
+                    lsp::TextEdit {
+                        range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
+                        new_text: "".into(),
+                    },
+                ],
+                None,
+                cx,
+            )
+        })
+        .await
+        .unwrap();
+
+    buffer.update(&mut cx, |buffer, cx| {
+        let edits = edits
+            .into_iter()
+            .map(|(range, text)| {
+                (
+                    range.start.to_point(&buffer)..range.end.to_point(&buffer),
+                    text,
+                )
+            })
+            .collect::<Vec<_>>();
+
+        assert_eq!(
+            edits,
+            [
+                (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
+                (Point::new(1, 0)..Point::new(2, 0), "".into())
+            ]
+        );
+
+        for (range, new_text) in edits {
+            buffer.edit([range], new_text, cx);
+        }
+        assert_eq!(
+            buffer.text(),
+            "
+                use a::{b, c};
+
+                fn f() {
+                    b();
+                    c();
+                }
+            "
+            .unindent()
+        );
+    });
+}
+
 #[gpui::test]
 async fn test_empty_diagnostic_ranges(mut cx: gpui::TestAppContext) {
     cx.add_model(|cx| {
@@ -851,7 +1068,6 @@ async fn test_empty_diagnostic_ranges(mut cx: gpui::TestAppContext) {
         buffer.set_language(Some(Arc::new(rust_lang())), cx);
         buffer
             .update_diagnostics(
-                None,
                 vec![
                     DiagnosticEntry {
                         range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
@@ -870,6 +1086,7 @@ async fn test_empty_diagnostic_ranges(mut cx: gpui::TestAppContext) {
                         },
                     },
                 ],
+                None,
                 cx,
             )
             .unwrap();
@@ -1073,15 +1290,15 @@ fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) {
 
     for buffer in &buffers {
         let buffer = buffer.read(cx).snapshot();
+        let actual_remote_selections = buffer
+            .remote_selections_in_range(Anchor::min()..Anchor::max())
+            .map(|(replica_id, selections)| (replica_id, selections.collect::<Vec<_>>()))
+            .collect::<Vec<_>>();
         let expected_remote_selections = active_selections
             .iter()
             .filter(|(replica_id, _)| **replica_id != buffer.replica_id())
             .map(|(replica_id, selections)| (*replica_id, selections.iter().collect::<Vec<_>>()))
             .collect::<Vec<_>>();
-        let actual_remote_selections = buffer
-            .remote_selections_in_range(Anchor::min()..Anchor::max())
-            .map(|(replica_id, selections)| (replica_id, selections.collect::<Vec<_>>()))
-            .collect::<Vec<_>>();
         assert_eq!(actual_remote_selections, expected_remote_selections);
     }
 }

crates/lsp/Cargo.toml šŸ”—

@@ -27,5 +27,6 @@ smol = "1.2"
 gpui = { path = "../gpui", features = ["test-support"] }
 util = { path = "../util", features = ["test-support"] }
 async-pipe = { git = "https://github.com/routerify/async-pipe-rs", rev = "feeb77e83142a9ff837d0767652ae41bfc5d8e47" }
-simplelog = "0.9"
+ctor = "0.1"
+env_logger = "0.8"
 unindent = "0.1.7"

crates/lsp/src/lsp.rs šŸ”—

@@ -56,6 +56,18 @@ struct Request<'a, T> {
     params: T,
 }
 
+#[cfg(any(test, feature = "test-support"))]
+#[derive(Deserialize)]
+struct AnyRequest<'a> {
+    id: usize,
+    #[serde(borrow)]
+    jsonrpc: &'a str,
+    #[serde(borrow)]
+    method: &'a str,
+    #[serde(borrow)]
+    params: &'a RawValue,
+}
+
 #[derive(Serialize, Deserialize)]
 struct AnyResponse<'a> {
     id: usize,
@@ -238,6 +250,21 @@ impl LanguageServer {
                         link_support: Some(true),
                         ..Default::default()
                     }),
+                    code_action: Some(CodeActionClientCapabilities {
+                        code_action_literal_support: Some(CodeActionLiteralSupport {
+                            code_action_kind: CodeActionKindLiteralSupport {
+                                value_set: vec![
+                                    CodeActionKind::REFACTOR.as_str().into(),
+                                    CodeActionKind::QUICKFIX.as_str().into(),
+                                ],
+                            },
+                        }),
+                        data_support: Some(true),
+                        resolve_support: Some(CodeActionCapabilityResolveSupport {
+                            properties: vec!["edit".to_string()],
+                        }),
+                        ..Default::default()
+                    }),
                     completion: Some(CompletionClientCapabilities {
                         completion_item: Some(CompletionItemCapability {
                             snippet_support: Some(true),
@@ -454,48 +481,41 @@ impl Drop for Subscription {
 
 #[cfg(any(test, feature = "test-support"))]
 pub struct FakeLanguageServer {
-    buffer: Vec<u8>,
-    stdin: smol::io::BufReader<async_pipe::PipeReader>,
-    stdout: smol::io::BufWriter<async_pipe::PipeWriter>,
+    handlers: Arc<
+        Mutex<
+            HashMap<
+                &'static str,
+                Box<dyn Send + FnOnce(usize, &[u8]) -> (Vec<u8>, barrier::Sender)>,
+            >,
+        >,
+    >,
+    outgoing_tx: channel::Sender<Vec<u8>>,
+    incoming_rx: channel::Receiver<Vec<u8>>,
     pub started: Arc<std::sync::atomic::AtomicBool>,
 }
 
-#[cfg(any(test, feature = "test-support"))]
-pub struct RequestId<T> {
-    id: usize,
-    _type: std::marker::PhantomData<T>,
-}
-
 #[cfg(any(test, feature = "test-support"))]
 impl LanguageServer {
-    pub async fn fake(executor: Arc<executor::Background>) -> (Arc<Self>, FakeLanguageServer) {
-        Self::fake_with_capabilities(Default::default(), executor).await
+    pub async fn fake(cx: &gpui::TestAppContext) -> (Arc<Self>, FakeLanguageServer) {
+        Self::fake_with_capabilities(Default::default(), cx).await
     }
 
     pub async fn fake_with_capabilities(
         capabilities: ServerCapabilities,
-        executor: Arc<executor::Background>,
+        cx: &gpui::TestAppContext,
     ) -> (Arc<Self>, FakeLanguageServer) {
-        let stdin = async_pipe::pipe();
-        let stdout = async_pipe::pipe();
-        let mut fake = FakeLanguageServer {
-            stdin: smol::io::BufReader::new(stdin.1),
-            stdout: smol::io::BufWriter::new(stdout.0),
-            buffer: Vec::new(),
-            started: Arc::new(std::sync::atomic::AtomicBool::new(true)),
-        };
+        let (stdin_writer, stdin_reader) = async_pipe::pipe();
+        let (stdout_writer, stdout_reader) = async_pipe::pipe();
 
-        let server = Self::new_internal(stdin.0, stdout.1, Path::new("/"), executor).unwrap();
+        let mut fake = FakeLanguageServer::new(cx, stdin_reader, stdout_writer);
+        fake.handle_request::<request::Initialize, _>(move |_| InitializeResult {
+            capabilities,
+            ..Default::default()
+        });
 
-        let (init_id, _) = fake.receive_request::<request::Initialize>().await;
-        fake.respond(
-            init_id,
-            InitializeResult {
-                capabilities,
-                ..Default::default()
-            },
-        )
-        .await;
+        let server =
+            Self::new_internal(stdin_writer, stdout_reader, Path::new("/"), cx.background())
+                .unwrap();
         fake.receive_notification::<notification::Initialized>()
             .await;
 
@@ -505,6 +525,75 @@ impl LanguageServer {
 
 #[cfg(any(test, feature = "test-support"))]
 impl FakeLanguageServer {
+    fn new(
+        cx: &gpui::TestAppContext,
+        stdin: async_pipe::PipeReader,
+        stdout: async_pipe::PipeWriter,
+    ) -> Self {
+        use futures::StreamExt as _;
+
+        let (incoming_tx, incoming_rx) = channel::unbounded();
+        let (outgoing_tx, mut outgoing_rx) = channel::unbounded();
+        let this = Self {
+            outgoing_tx: outgoing_tx.clone(),
+            incoming_rx,
+            handlers: Default::default(),
+            started: Arc::new(std::sync::atomic::AtomicBool::new(true)),
+        };
+
+        // Receive incoming messages
+        let handlers = this.handlers.clone();
+        cx.background()
+            .spawn(async move {
+                let mut buffer = Vec::new();
+                let mut stdin = smol::io::BufReader::new(stdin);
+                while Self::receive(&mut stdin, &mut buffer).await.is_ok() {
+                    if let Ok(request) = serde_json::from_slice::<AnyRequest>(&mut buffer) {
+                        assert_eq!(request.jsonrpc, JSON_RPC_VERSION);
+
+                        let handler = handlers.lock().remove(request.method);
+                        if let Some(handler) = handler {
+                            let (response, sent) =
+                                handler(request.id, request.params.get().as_bytes());
+                            log::debug!("handled lsp request. method:{}", request.method);
+                            outgoing_tx.send(response).await.unwrap();
+                            drop(sent);
+                        } else {
+                            log::debug!("unhandled lsp request. method:{}", request.method);
+                            outgoing_tx
+                                .send(
+                                    serde_json::to_vec(&AnyResponse {
+                                        id: request.id,
+                                        error: Some(Error {
+                                            message: "no handler".to_string(),
+                                        }),
+                                        result: None,
+                                    })
+                                    .unwrap(),
+                                )
+                                .await
+                                .unwrap();
+                        }
+                    } else {
+                        incoming_tx.send(buffer.clone()).await.unwrap();
+                    }
+                }
+            })
+            .detach();
+
+        // Send outgoing messages
+        cx.background()
+            .spawn(async move {
+                let mut stdout = smol::io::BufWriter::new(stdout);
+                while let Some(notification) = outgoing_rx.next().await {
+                    Self::send(&mut stdout, &notification).await;
+                }
+            })
+            .detach();
+
+        this
+    }
+
     pub async fn notify<T: notification::Notification>(&mut self, params: T::Params) {
         if !self.started.load(std::sync::atomic::Ordering::SeqCst) {
             panic!("can't simulate an LSP notification before the server has been started");
@@ -515,51 +604,53 @@ impl FakeLanguageServer {
             params,
         })
         .unwrap();
-        self.send(message).await;
+        self.outgoing_tx.send(message).await.unwrap();
     }
 
-    pub async fn respond<'a, T: request::Request>(
-        &mut self,
-        request_id: RequestId<T>,
-        result: T::Result,
-    ) {
-        let result = serde_json::to_string(&result).unwrap();
-        let message = serde_json::to_vec(&AnyResponse {
-            id: request_id.id,
-            error: None,
-            result: Some(&RawValue::from_string(result).unwrap()),
-        })
-        .unwrap();
-        self.send(message).await;
-    }
+    pub async fn receive_notification<T: notification::Notification>(&mut self) -> T::Params {
+        use futures::StreamExt as _;
 
-    pub async fn receive_request<T: request::Request>(&mut self) -> (RequestId<T>, T::Params) {
         loop {
-            self.receive().await;
-            if let Ok(request) = serde_json::from_slice::<Request<T::Params>>(&self.buffer) {
-                assert_eq!(request.method, T::METHOD);
-                assert_eq!(request.jsonrpc, JSON_RPC_VERSION);
-                return (
-                    RequestId {
-                        id: request.id,
-                        _type: std::marker::PhantomData,
-                    },
-                    request.params,
-                );
+            let bytes = self.incoming_rx.next().await.unwrap();
+            if let Ok(notification) = serde_json::from_slice::<Notification<T::Params>>(&bytes) {
+                assert_eq!(notification.method, T::METHOD);
+                return notification.params;
             } else {
                 log::info!(
                     "skipping message in fake language server {:?}",
-                    std::str::from_utf8(&self.buffer)
+                    std::str::from_utf8(&bytes)
                 );
             }
         }
     }
 
-    pub async fn receive_notification<T: notification::Notification>(&mut self) -> T::Params {
-        self.receive().await;
-        let notification = serde_json::from_slice::<Notification<T::Params>>(&self.buffer).unwrap();
-        assert_eq!(notification.method, T::METHOD);
-        notification.params
+    pub fn handle_request<T, F>(&mut self, handler: F) -> barrier::Receiver
+    where
+        T: 'static + request::Request,
+        F: 'static + Send + FnOnce(T::Params) -> T::Result,
+    {
+        let (responded_tx, responded_rx) = barrier::channel();
+        let prev_handler = self.handlers.lock().insert(
+            T::METHOD,
+            Box::new(|id, params| {
+                let result = handler(serde_json::from_slice::<T::Params>(params).unwrap());
+                let result = serde_json::to_string(&result).unwrap();
+                let result = serde_json::from_str::<&RawValue>(&result).unwrap();
+                let response = AnyResponse {
+                    id,
+                    error: None,
+                    result: Some(result),
+                };
+                (serde_json::to_vec(&response).unwrap(), responded_tx)
+            }),
+        );
+        if prev_handler.is_some() {
+            panic!(
+                "registered a new handler for LSP method '{}' before the previous handler was called",
+                T::METHOD
+            );
+        }
+        responded_rx
     }
 
     pub async fn start_progress(&mut self, token: impl Into<String>) {
@@ -578,39 +669,37 @@ impl FakeLanguageServer {
         .await;
     }
 
-    async fn send(&mut self, message: Vec<u8>) {
-        self.stdout
+    async fn send(stdout: &mut smol::io::BufWriter<async_pipe::PipeWriter>, message: &[u8]) {
+        stdout
             .write_all(CONTENT_LEN_HEADER.as_bytes())
             .await
             .unwrap();
-        self.stdout
+        stdout
             .write_all((format!("{}", message.len())).as_bytes())
             .await
             .unwrap();
-        self.stdout.write_all("\r\n\r\n".as_bytes()).await.unwrap();
-        self.stdout.write_all(&message).await.unwrap();
-        self.stdout.flush().await.unwrap();
+        stdout.write_all("\r\n\r\n".as_bytes()).await.unwrap();
+        stdout.write_all(&message).await.unwrap();
+        stdout.flush().await.unwrap();
     }
 
-    async fn receive(&mut self) {
-        self.buffer.clear();
-        self.stdin
-            .read_until(b'\n', &mut self.buffer)
-            .await
-            .unwrap();
-        self.stdin
-            .read_until(b'\n', &mut self.buffer)
-            .await
-            .unwrap();
-        let message_len: usize = std::str::from_utf8(&self.buffer)
+    async fn receive(
+        stdin: &mut smol::io::BufReader<async_pipe::PipeReader>,
+        buffer: &mut Vec<u8>,
+    ) -> Result<()> {
+        buffer.clear();
+        stdin.read_until(b'\n', buffer).await?;
+        stdin.read_until(b'\n', buffer).await?;
+        let message_len: usize = std::str::from_utf8(buffer)
             .unwrap()
             .strip_prefix(CONTENT_LEN_HEADER)
             .unwrap()
             .trim_end()
             .parse()
             .unwrap();
-        self.buffer.resize(message_len, 0);
-        self.stdin.read_exact(&mut self.buffer).await.unwrap();
+        buffer.resize(message_len, 0);
+        stdin.read_exact(buffer).await?;
+        Ok(())
     }
 }
 
@@ -618,10 +707,16 @@ impl FakeLanguageServer {
 mod tests {
     use super::*;
     use gpui::TestAppContext;
-    use simplelog::SimpleLogger;
     use unindent::Unindent;
     use util::test::temp_tree;
 
+    #[ctor::ctor]
+    fn init_logger() {
+        if std::env::var("RUST_LOG").is_ok() {
+            env_logger::init();
+        }
+    }
+
     #[gpui::test]
     async fn test_rust_analyzer(cx: TestAppContext) {
         let lib_source = r#"
@@ -643,14 +738,9 @@ mod tests {
         }));
         let lib_file_uri = Url::from_file_path(root_dir.path().join("src/lib.rs")).unwrap();
 
-        let server = cx.read(|cx| {
-            LanguageServer::new(
-                Path::new("rust-analyzer"),
-                root_dir.path(),
-                cx.background().clone(),
-            )
-            .unwrap()
-        });
+        let server =
+            LanguageServer::new(Path::new("rust-analyzer"), root_dir.path(), cx.background())
+                .unwrap();
         server.next_idle_notification().await;
 
         server
@@ -687,9 +777,7 @@ mod tests {
 
     #[gpui::test]
     async fn test_fake(cx: TestAppContext) {
-        SimpleLogger::init(log::LevelFilter::Info, Default::default()).unwrap();
-
-        let (server, mut fake) = LanguageServer::fake(cx.background()).await;
+        let (server, mut fake) = LanguageServer::fake(&cx).await;
 
         let (message_tx, message_rx) = channel::unbounded();
         let (diagnostics_tx, diagnostics_rx) = channel::unbounded();
@@ -741,9 +829,9 @@ mod tests {
             "file://b/c"
         );
 
+        fake.handle_request::<request::Shutdown, _>(|_| ());
+
         drop(server);
-        let (shutdown_request, _) = fake.receive_request::<request::Shutdown>().await;
-        fake.respond(shutdown_request, ()).await;
         fake.receive_notification::<notification::Exit>().await;
     }
 

crates/project/Cargo.toml šŸ”—

@@ -7,7 +7,11 @@ edition = "2021"
 path = "src/project.rs"
 
 [features]
-test-support = ["language/test-support", "text/test-support"]
+test-support = [
+    "client/test-support",
+    "language/test-support",
+    "text/test-support",
+]
 
 [dependencies]
 text = { path = "../text" }
@@ -45,6 +49,5 @@ lsp = { path = "../lsp", features = ["test-support"] }
 util = { path = "../util", features = ["test-support"] }
 rpc = { path = "../rpc", features = ["test-support"] }
 rand = "0.8.3"
-simplelog = "0.9"
 tempdir = { version = "0.3.7" }
 unindent = "0.1.7"

crates/project/src/fs.rs šŸ”—

@@ -13,6 +13,11 @@ use text::Rope;
 
 #[async_trait::async_trait]
 pub trait Fs: Send + Sync {
+    async fn create_dir(&self, path: &Path) -> Result<()>;
+    async fn create_file(&self, path: &Path, options: CreateOptions) -> Result<()>;
+    async fn rename(&self, source: &Path, target: &Path, options: RenameOptions) -> Result<()>;
+    async fn remove_dir(&self, path: &Path, options: RemoveOptions) -> Result<()>;
+    async fn remove_file(&self, path: &Path, options: RemoveOptions) -> Result<()>;
     async fn load(&self, path: &Path) -> Result<String>;
     async fn save(&self, path: &Path, text: &Rope) -> Result<()>;
     async fn canonicalize(&self, path: &Path) -> Result<PathBuf>;
@@ -32,6 +37,24 @@ pub trait Fs: Send + Sync {
     fn as_fake(&self) -> &FakeFs;
 }
 
+#[derive(Copy, Clone, Default)]
+pub struct CreateOptions {
+    pub overwrite: bool,
+    pub ignore_if_exists: bool,
+}
+
+#[derive(Copy, Clone, Default)]
+pub struct RenameOptions {
+    pub overwrite: bool,
+    pub ignore_if_exists: bool,
+}
+
+#[derive(Copy, Clone, Default)]
+pub struct RemoveOptions {
+    pub recursive: bool,
+    pub ignore_if_not_exists: bool,
+}
+
 #[derive(Clone, Debug)]
 pub struct Metadata {
     pub inode: u64,
@@ -44,6 +67,60 @@ pub struct RealFs;
 
 #[async_trait::async_trait]
 impl Fs for RealFs {
+    async fn create_dir(&self, path: &Path) -> Result<()> {
+        Ok(smol::fs::create_dir_all(path).await?)
+    }
+
+    async fn create_file(&self, path: &Path, options: CreateOptions) -> Result<()> {
+        let mut open_options = smol::fs::OpenOptions::new();
+        open_options.write(true).create(true);
+        if options.overwrite {
+            open_options.truncate(true);
+        } else if !options.ignore_if_exists {
+            open_options.create_new(true);
+        }
+        open_options.open(path).await?;
+        Ok(())
+    }
+
+    async fn rename(&self, source: &Path, target: &Path, options: RenameOptions) -> Result<()> {
+        if !options.overwrite && smol::fs::metadata(target).await.is_ok() {
+            if options.ignore_if_exists {
+                return Ok(());
+            } else {
+                return Err(anyhow!("{target:?} already exists"));
+            }
+        }
+
+        smol::fs::rename(source, target).await?;
+        Ok(())
+    }
+
+    async fn remove_dir(&self, path: &Path, options: RemoveOptions) -> Result<()> {
+        let result = if options.recursive {
+            smol::fs::remove_dir_all(path).await
+        } else {
+            smol::fs::remove_dir(path).await
+        };
+        match result {
+            Ok(()) => Ok(()),
+            Err(err) if err.kind() == io::ErrorKind::NotFound && options.ignore_if_not_exists => {
+                Ok(())
+            }
+            Err(err) => Err(err)?,
+        }
+    }
+
+    async fn remove_file(&self, path: &Path, options: RemoveOptions) -> Result<()> {
+        match smol::fs::remove_file(path).await {
+            Ok(()) => Ok(()),
+            Err(err) if err.kind() == io::ErrorKind::NotFound && options.ignore_if_not_exists => {
+                Ok(())
+            }
+            Err(err) => Err(err)?,
+        }
+    }
+
     async fn load(&self, path: &Path) -> Result<String> {
         let mut file = smol::fs::File::open(path).await?;
         let mut text = String::new();
@@ -162,15 +239,19 @@ impl FakeFsState {
         }
     }
 
-    async fn emit_event(&mut self, paths: &[&Path]) {
+    async fn emit_event<I, T>(&mut self, paths: I)
+    where
+        I: IntoIterator<Item = T>,
+        T: Into<PathBuf>,
+    {
         use postage::prelude::Sink as _;
 
         let events = paths
-            .iter()
+            .into_iter()
             .map(|path| fsevent::Event {
                 event_id: 0,
                 flags: fsevent::StreamFlags::empty(),
-                path: path.to_path_buf(),
+                path: path.into(),
             })
             .collect();
 
@@ -292,46 +373,163 @@ impl FakeFs {
         }
         .boxed()
     }
+}
 
-    pub async fn remove(&self, path: &Path) -> Result<()> {
+#[cfg(any(test, feature = "test-support"))]
+#[async_trait::async_trait]
+impl Fs for FakeFs {
+    async fn create_dir(&self, path: &Path) -> Result<()> {
+        self.executor.simulate_random_delay().await;
+        let state = &mut *self.state.lock().await;
+        let mut ancestor_path = PathBuf::new();
+        let mut created_dir_paths = Vec::new();
+        for component in path.components() {
+            ancestor_path.push(component);
+            let entry = state
+                .entries
+                .entry(ancestor_path.clone())
+                .or_insert_with(|| {
+                    let inode = state.next_inode;
+                    state.next_inode += 1;
+                    created_dir_paths.push(ancestor_path.clone());
+                    FakeFsEntry {
+                        metadata: Metadata {
+                            inode,
+                            mtime: SystemTime::now(),
+                            is_dir: true,
+                            is_symlink: false,
+                        },
+                        content: None,
+                    }
+                });
+            if !entry.metadata.is_dir {
+                return Err(anyhow!(
+                    "cannot create directory because {:?} is a file",
+                    ancestor_path
+                ));
+            }
+        }
+        state.emit_event(&created_dir_paths).await;
+
+        Ok(())
+    }
+
+    async fn create_file(&self, path: &Path, options: CreateOptions) -> Result<()> {
+        self.executor.simulate_random_delay().await;
         let mut state = self.state.lock().await;
         state.validate_path(path)?;
-        state.entries.retain(|path, _| !path.starts_with(path));
+        if let Some(entry) = state.entries.get_mut(path) {
+            if entry.metadata.is_dir || entry.metadata.is_symlink {
+                return Err(anyhow!(
+                    "cannot create file because {:?} is a dir or a symlink",
+                    path
+                ));
+            }
+
+            if options.overwrite {
+                entry.metadata.mtime = SystemTime::now();
+                entry.content = Some(Default::default());
+            } else if !options.ignore_if_exists {
+                return Err(anyhow!(
+                    "cannot create file because {:?} already exists",
+                    path
+                ));
+            }
+        } else {
+            let inode = state.next_inode;
+            state.next_inode += 1;
+            let entry = FakeFsEntry {
+                metadata: Metadata {
+                    inode,
+                    mtime: SystemTime::now(),
+                    is_dir: false,
+                    is_symlink: false,
+                },
+                content: Some(Default::default()),
+            };
+            state.entries.insert(path.to_path_buf(), entry);
+        }
         state.emit_event(&[path]).await;
+
         Ok(())
     }
 
-    pub async fn rename(&self, source: &Path, target: &Path) -> Result<()> {
+    async fn rename(&self, source: &Path, target: &Path, options: RenameOptions) -> Result<()> {
         let mut state = self.state.lock().await;
         state.validate_path(source)?;
         state.validate_path(target)?;
-        if state.entries.contains_key(target) {
-            Err(anyhow!("target path already exists"))
-        } else {
-            let mut removed = Vec::new();
-            state.entries.retain(|path, entry| {
-                if let Ok(relative_path) = path.strip_prefix(source) {
-                    removed.push((relative_path.to_path_buf(), entry.clone()));
-                    false
-                } else {
-                    true
+
+        if !options.overwrite && state.entries.contains_key(target) {
+            if options.ignore_if_exists {
+                return Ok(());
+            } else {
+                return Err(anyhow!("{target:?} already exists"));
+            }
+        }
+
+        let mut removed = Vec::new();
+        state.entries.retain(|path, entry| {
+            if let Ok(relative_path) = path.strip_prefix(source) {
+                removed.push((relative_path.to_path_buf(), entry.clone()));
+                false
+            } else {
+                true
+            }
+        });
+
+        for (relative_path, entry) in removed {
+            let new_path = target.join(relative_path);
+            state.entries.insert(new_path, entry);
+        }
+
+        state.emit_event(&[source, target]).await;
+        Ok(())
+    }
+
+    async fn remove_dir(&self, path: &Path, options: RemoveOptions) -> Result<()> {
+        let mut state = self.state.lock().await;
+        state.validate_path(path)?;
+        if let Some(entry) = state.entries.get(path) {
+            if !entry.metadata.is_dir {
+                return Err(anyhow!("cannot remove {path:?} because it is not a dir"));
+            }
+
+            if !options.recursive {
+                let descendants = state
+                    .entries
+                    .keys()
+                    .filter(|path| path.starts_with(path))
+                    .count();
+                if descendants > 1 {
+                    return Err(anyhow!("{path:?} is not empty"));
                 }
-            });
+            }
+
+            state.entries.retain(|path, _| !path.starts_with(path));
+            state.emit_event(&[path]).await;
+        } else if !options.ignore_if_not_exists {
+            return Err(anyhow!("{path:?} does not exist"));
+        }
 
-            for (relative_path, entry) in removed {
-                let new_path = target.join(relative_path);
-                state.entries.insert(new_path, entry);
+        Ok(())
+    }
+
+    async fn remove_file(&self, path: &Path, options: RemoveOptions) -> Result<()> {
+        let mut state = self.state.lock().await;
+        state.validate_path(path)?;
+        if let Some(entry) = state.entries.get(path) {
+            if entry.metadata.is_dir {
+                return Err(anyhow!("cannot remove {path:?} because it is not a file"));
             }
 
-            state.emit_event(&[source, target]).await;
-            Ok(())
+            state.entries.remove(path);
+            state.emit_event(&[path]).await;
+        } else if !options.ignore_if_not_exists {
+            return Err(anyhow!("{path:?} does not exist"));
         }
+        Ok(())
     }
-}
 
-#[cfg(any(test, feature = "test-support"))]
-#[async_trait::async_trait]
-impl Fs for FakeFs {
     async fn load(&self, path: &Path) -> Result<String> {
         self.executor.simulate_random_delay().await;
         let state = self.state.lock().await;

crates/project/src/project.rs šŸ”—

@@ -13,9 +13,11 @@ use gpui::{
     WeakModelHandle,
 };
 use language::{
+    point_from_lsp,
     proto::{deserialize_anchor, serialize_anchor},
-    range_from_lsp, Bias, Buffer, Diagnostic, DiagnosticEntry, File as _, Language,
-    LanguageRegistry, PointUtf16, ToOffset, ToPointUtf16,
+    range_from_lsp, AnchorRangeExt, Bias, Buffer, CodeAction, Completion, CompletionLabel,
+    Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, PointUtf16, ToLspPosition,
+    ToOffset, ToPointUtf16, Transaction,
 };
 use lsp::{DiagnosticSeverity, LanguageServer};
 use postage::{prelude::Stream, watch};
@@ -25,6 +27,7 @@ use std::{
     ops::Range,
     path::{Path, PathBuf},
     sync::{atomic::AtomicBool, Arc},
+    time::Instant,
 };
 use util::{post_inc, ResultExt, TryFutureExt as _};
 
@@ -107,6 +110,9 @@ pub struct Definition {
     pub target_range: Range<language::Anchor>,
 }
 
+#[derive(Default)]
+pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
+
 impl DiagnosticSummary {
     fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
         let mut this = Self {
@@ -274,31 +280,43 @@ impl Project {
                 user_store,
                 fs,
                 subscriptions: vec![
-                    client.subscribe_to_entity(remote_id, cx, Self::handle_unshare_project),
-                    client.subscribe_to_entity(remote_id, cx, Self::handle_add_collaborator),
-                    client.subscribe_to_entity(remote_id, cx, Self::handle_remove_collaborator),
-                    client.subscribe_to_entity(remote_id, cx, Self::handle_share_worktree),
-                    client.subscribe_to_entity(remote_id, cx, Self::handle_unregister_worktree),
-                    client.subscribe_to_entity(remote_id, cx, Self::handle_update_worktree),
-                    client.subscribe_to_entity(
+                    client.add_entity_message_handler(remote_id, cx, Self::handle_unshare_project),
+                    client.add_entity_message_handler(remote_id, cx, Self::handle_add_collaborator),
+                    client.add_entity_message_handler(
+                        remote_id,
+                        cx,
+                        Self::handle_remove_collaborator,
+                    ),
+                    client.add_entity_message_handler(remote_id, cx, Self::handle_share_worktree),
+                    client.add_entity_message_handler(
+                        remote_id,
+                        cx,
+                        Self::handle_unregister_worktree,
+                    ),
+                    client.add_entity_message_handler(remote_id, cx, Self::handle_update_worktree),
+                    client.add_entity_message_handler(
                         remote_id,
                         cx,
                         Self::handle_update_diagnostic_summary,
                     ),
-                    client.subscribe_to_entity(
+                    client.add_entity_message_handler(
                         remote_id,
                         cx,
                         Self::handle_disk_based_diagnostics_updating,
                     ),
-                    client.subscribe_to_entity(
+                    client.add_entity_message_handler(
                         remote_id,
                         cx,
                         Self::handle_disk_based_diagnostics_updated,
                     ),
-                    client.subscribe_to_entity(remote_id, cx, Self::handle_update_buffer),
-                    client.subscribe_to_entity(remote_id, cx, Self::handle_update_buffer_file),
-                    client.subscribe_to_entity(remote_id, cx, Self::handle_buffer_reloaded),
-                    client.subscribe_to_entity(remote_id, cx, Self::handle_buffer_saved),
+                    client.add_entity_message_handler(remote_id, cx, Self::handle_update_buffer),
+                    client.add_entity_message_handler(
+                        remote_id,
+                        cx,
+                        Self::handle_update_buffer_file,
+                    ),
+                    client.add_entity_message_handler(remote_id, cx, Self::handle_buffer_reloaded),
+                    client.add_entity_message_handler(remote_id, cx, Self::handle_buffer_saved),
                 ],
                 client,
                 client_state: ProjectClientState::Remote {
@@ -316,6 +334,15 @@ impl Project {
         }))
     }
 
+    #[cfg(any(test, feature = "test-support"))]
+    pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
+        let languages = Arc::new(LanguageRegistry::new());
+        let http_client = client::test::FakeHttpClient::with_404_response();
+        let client = client::Client::new(http_client.clone());
+        let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
+        cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
+    }
+
     fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
         if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
             *remote_id_tx.borrow_mut() = remote_id;
@@ -325,22 +352,24 @@ impl Project {
         if let Some(remote_id) = remote_id {
             let client = &self.client;
             self.subscriptions.extend([
-                client.subscribe_to_entity(remote_id, cx, Self::handle_open_buffer),
-                client.subscribe_to_entity(remote_id, cx, Self::handle_close_buffer),
-                client.subscribe_to_entity(remote_id, cx, Self::handle_add_collaborator),
-                client.subscribe_to_entity(remote_id, cx, Self::handle_remove_collaborator),
-                client.subscribe_to_entity(remote_id, cx, Self::handle_update_worktree),
-                client.subscribe_to_entity(remote_id, cx, Self::handle_update_buffer),
-                client.subscribe_to_entity(remote_id, cx, Self::handle_save_buffer),
-                client.subscribe_to_entity(remote_id, cx, Self::handle_buffer_saved),
-                client.subscribe_to_entity(remote_id, cx, Self::handle_format_buffer),
-                client.subscribe_to_entity(remote_id, cx, Self::handle_get_completions),
-                client.subscribe_to_entity(
+                client.add_entity_request_handler(remote_id, cx, Self::handle_open_buffer),
+                client.add_entity_message_handler(remote_id, cx, Self::handle_close_buffer),
+                client.add_entity_message_handler(remote_id, cx, Self::handle_add_collaborator),
+                client.add_entity_message_handler(remote_id, cx, Self::handle_remove_collaborator),
+                client.add_entity_message_handler(remote_id, cx, Self::handle_update_worktree),
+                client.add_entity_message_handler(remote_id, cx, Self::handle_update_buffer),
+                client.add_entity_request_handler(remote_id, cx, Self::handle_save_buffer),
+                client.add_entity_message_handler(remote_id, cx, Self::handle_buffer_saved),
+                client.add_entity_request_handler(remote_id, cx, Self::handle_format_buffers),
+                client.add_entity_request_handler(remote_id, cx, Self::handle_get_completions),
+                client.add_entity_request_handler(
                     remote_id,
                     cx,
                     Self::handle_apply_additional_edits_for_completion,
                 ),
-                client.subscribe_to_entity(remote_id, cx, Self::handle_get_definition),
+                client.add_entity_request_handler(remote_id, cx, Self::handle_get_code_actions),
+                client.add_entity_request_handler(remote_id, cx, Self::handle_apply_code_action),
+                client.add_entity_request_handler(remote_id, cx, Self::handle_get_definition),
             ]);
         }
     }
@@ -596,9 +625,44 @@ impl Project {
                 })
                 .await?;
             let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
-            this.update(&mut cx, |this, cx| {
-                this.deserialize_remote_buffer(buffer, cx)
-            })
+            this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
+        })
+    }
+
+    fn open_local_buffer_from_lsp_path(
+        &mut self,
+        abs_path: lsp::Url,
+        lang_name: String,
+        lang_server: Arc<LanguageServer>,
+        cx: &mut ModelContext<Self>,
+    ) -> Task<Result<ModelHandle<Buffer>>> {
+        cx.spawn(|this, mut cx| async move {
+            let abs_path = abs_path
+                .to_file_path()
+                .map_err(|_| anyhow!("can't convert URI to path"))?;
+            let (worktree, relative_path) = if let Some(result) =
+                this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
+            {
+                result
+            } else {
+                let worktree = this
+                    .update(&mut cx, |this, cx| {
+                        this.create_local_worktree(&abs_path, true, cx)
+                    })
+                    .await?;
+                this.update(&mut cx, |this, cx| {
+                    this.language_servers
+                        .insert((worktree.read(cx).id(), lang_name), lang_server);
+                });
+                (worktree, PathBuf::new())
+            };
+
+            let project_path = ProjectPath {
+                worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
+                path: relative_path.into(),
+            };
+            this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
+                .await
         })
     }
 
@@ -730,7 +794,7 @@ impl Project {
         if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
             if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
                 buffer.update(cx, |buffer, cx| {
-                    buffer.update_diagnostics(None, diagnostics, cx).log_err();
+                    buffer.update_diagnostics(diagnostics, None, cx).log_err();
                 });
             }
         }
@@ -815,7 +879,7 @@ impl Project {
         // Process all the LSP events.
         cx.spawn_weak(|this, mut cx| async move {
             while let Ok(message) = diagnostics_rx.recv().await {
-                let this = cx.read(|cx| this.upgrade(cx))?;
+                let this = this.upgrade(&cx)?;
                 match message {
                     LspEvent::DiagnosticsStart => {
                         this.update(&mut cx, |this, cx| {
@@ -975,7 +1039,7 @@ impl Project {
                     .map_or(false, |file| *file.path() == project_path.path)
                 {
                     buffer.update(cx, |buffer, cx| {
-                        buffer.update_diagnostics(version, diagnostics.clone(), cx)
+                        buffer.update_diagnostics(diagnostics.clone(), version, cx)
                     })?;
                     break;
                 }
@@ -991,7 +1055,107 @@ impl Project {
         Ok(())
     }
 
-    pub fn definition<T: ToOffset>(
+    pub fn format(
+        &self,
+        buffers: HashSet<ModelHandle<Buffer>>,
+        push_to_history: bool,
+        cx: &mut ModelContext<Project>,
+    ) -> Task<Result<ProjectTransaction>> {
+        let mut local_buffers = Vec::new();
+        let mut remote_buffers = None;
+        for buffer_handle in buffers {
+            let buffer = buffer_handle.read(cx);
+            let worktree;
+            if let Some(file) = File::from_dyn(buffer.file()) {
+                worktree = file.worktree.clone();
+                if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
+                    let lang_server;
+                    if let Some(lang) = buffer.language() {
+                        if let Some(server) = self
+                            .language_servers
+                            .get(&(worktree.read(cx).id(), lang.name().to_string()))
+                        {
+                            lang_server = server.clone();
+                        } else {
+                            return Task::ready(Ok(Default::default()));
+                        };
+                    } else {
+                        return Task::ready(Ok(Default::default()));
+                    }
+
+                    local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
+                } else {
+                    remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
+                }
+            } else {
+                return Task::ready(Ok(Default::default()));
+            }
+        }
+
+        let remote_buffers = self.remote_id().zip(remote_buffers);
+        let client = self.client.clone();
+
+        cx.spawn(|this, mut cx| async move {
+            let mut project_transaction = ProjectTransaction::default();
+
+            if let Some((project_id, remote_buffers)) = remote_buffers {
+                let response = client
+                    .request(proto::FormatBuffers {
+                        project_id,
+                        buffer_ids: remote_buffers
+                            .iter()
+                            .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
+                            .collect(),
+                    })
+                    .await?
+                    .transaction
+                    .ok_or_else(|| anyhow!("missing transaction"))?;
+                project_transaction = this
+                    .update(&mut cx, |this, cx| {
+                        this.deserialize_project_transaction(response, push_to_history, cx)
+                    })
+                    .await?;
+            }
+
+            for (buffer, buffer_abs_path, lang_server) in local_buffers {
+                let lsp_edits = lang_server
+                    .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
+                        text_document: lsp::TextDocumentIdentifier::new(
+                            lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
+                        ),
+                        options: Default::default(),
+                        work_done_progress_params: Default::default(),
+                    })
+                    .await?;
+
+                if let Some(lsp_edits) = lsp_edits {
+                    let edits = buffer
+                        .update(&mut cx, |buffer, cx| {
+                            buffer.edits_from_lsp(lsp_edits, None, cx)
+                        })
+                        .await?;
+                    buffer.update(&mut cx, |buffer, cx| {
+                        buffer.finalize_last_transaction();
+                        buffer.start_transaction();
+                        for (range, text) in edits {
+                            buffer.edit([range], text, cx);
+                        }
+                        if buffer.end_transaction(cx).is_some() {
+                            let transaction = buffer.finalize_last_transaction().unwrap().clone();
+                            if !push_to_history {
+                                buffer.forget_transaction(transaction.id);
+                            }
+                            project_transaction.0.insert(cx.handle(), transaction);
+                        }
+                    });
+                }
+            }
+
+            Ok(project_transaction)
+        })
+    }
+
+    pub fn definition<T: ToPointUtf16>(
         &self,
         source_buffer_handle: &ModelHandle<Buffer>,
         position: T,
@@ -1005,11 +1169,12 @@ impl Project {
             worktree = file.worktree.clone();
             buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
         } else {
-            return Task::ready(Err(anyhow!("buffer does not belong to any worktree")));
+            return Task::ready(Ok(Default::default()));
         };
 
+        let position = position.to_point_utf16(source_buffer);
+
         if worktree.read(cx).as_local().is_some() {
-            let point = source_buffer.offset_to_point_utf16(position.to_offset(source_buffer));
             let buffer_abs_path = buffer_abs_path.unwrap();
             let lang_name;
             let lang_server;
@@ -1021,10 +1186,10 @@ impl Project {
                 {
                     lang_server = server.clone();
                 } else {
-                    return Task::ready(Err(anyhow!("buffer does not have a language server")));
+                    return Task::ready(Ok(Default::default()));
                 };
             } else {
-                return Task::ready(Err(anyhow!("buffer does not have a language")));
+                return Task::ready(Ok(Default::default()));
             }
 
             cx.spawn(|this, mut cx| async move {
@@ -1034,7 +1199,7 @@ impl Project {
                             text_document: lsp::TextDocumentIdentifier::new(
                                 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
                             ),
-                            position: lsp::Position::new(point.row, point.column),
+                            position: lsp::Position::new(position.row, position.column),
                         },
                         work_done_progress_params: Default::default(),
                         partial_result_params: Default::default(),
@@ -1061,42 +1226,23 @@ impl Project {
                     }
 
                     for (target_uri, target_range) in unresolved_locations {
-                        let abs_path = target_uri
-                            .to_file_path()
-                            .map_err(|_| anyhow!("invalid target path"))?;
-
-                        let (worktree, relative_path) = if let Some(result) =
-                            this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
-                        {
-                            result
-                        } else {
-                            let worktree = this
-                                .update(&mut cx, |this, cx| {
-                                    this.create_local_worktree(&abs_path, true, cx)
-                                })
-                                .await?;
-                            this.update(&mut cx, |this, cx| {
-                                this.language_servers.insert(
-                                    (worktree.read(cx).id(), lang_name.clone()),
-                                    lang_server.clone(),
-                                );
-                            });
-                            (worktree, PathBuf::new())
-                        };
-
-                        let project_path = ProjectPath {
-                            worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
-                            path: relative_path.into(),
-                        };
                         let target_buffer_handle = this
-                            .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
+                            .update(&mut cx, |this, cx| {
+                                this.open_local_buffer_from_lsp_path(
+                                    target_uri,
+                                    lang_name.clone(),
+                                    lang_server.clone(),
+                                    cx,
+                                )
+                            })
                             .await?;
+
                         cx.read(|cx| {
                             let target_buffer = target_buffer_handle.read(cx);
                             let target_start = target_buffer
-                                .clip_point_utf16(target_range.start.to_point_utf16(), Bias::Left);
+                                .clip_point_utf16(point_from_lsp(target_range.start), Bias::Left);
                             let target_end = target_buffer
-                                .clip_point_utf16(target_range.end.to_point_utf16(), Bias::Left);
+                                .clip_point_utf16(point_from_lsp(target_range.end), Bias::Left);
                             definitions.push(Definition {
                                 target_buffer: target_buffer_handle,
                                 target_range: target_buffer.anchor_after(target_start)
@@ -1120,7 +1266,7 @@ impl Project {
                 this.update(&mut cx, |this, cx| {
                     let mut definitions = Vec::new();
                     for definition in response.definitions {
-                        let target_buffer = this.deserialize_remote_buffer(
+                        let target_buffer = this.deserialize_buffer(
                             definition.buffer.ok_or_else(|| anyhow!("missing buffer"))?,
                             cx,
                         )?;
@@ -1141,6 +1287,479 @@ impl Project {
                     Ok(definitions)
                 })
             })
+        } else {
+            Task::ready(Ok(Default::default()))
+        }
+    }
+
+    pub fn completions<T: ToPointUtf16>(
+        &self,
+        source_buffer_handle: &ModelHandle<Buffer>,
+        position: T,
+        cx: &mut ModelContext<Self>,
+    ) -> Task<Result<Vec<Completion>>> {
+        let source_buffer_handle = source_buffer_handle.clone();
+        let source_buffer = source_buffer_handle.read(cx);
+        let buffer_id = source_buffer.remote_id();
+        let language = source_buffer.language().cloned();
+        let worktree;
+        let buffer_abs_path;
+        if let Some(file) = File::from_dyn(source_buffer.file()) {
+            worktree = file.worktree.clone();
+            buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
+        } else {
+            return Task::ready(Ok(Default::default()));
+        };
+
+        let position = position.to_point_utf16(source_buffer);
+        let anchor = source_buffer.anchor_after(position);
+
+        if worktree.read(cx).as_local().is_some() {
+            let buffer_abs_path = buffer_abs_path.unwrap();
+            let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
+                server
+            } else {
+                return Task::ready(Ok(Default::default()));
+            };
+
+            cx.spawn(|_, cx| async move {
+                let completions = lang_server
+                .request::<lsp::request::Completion>(lsp::CompletionParams {
+                    text_document_position: lsp::TextDocumentPositionParams::new(
+                        lsp::TextDocumentIdentifier::new(
+                            lsp::Url::from_file_path(buffer_abs_path).unwrap(),
+                        ),
+                        position.to_lsp_position(),
+                    ),
+                    context: Default::default(),
+                    work_done_progress_params: Default::default(),
+                    partial_result_params: Default::default(),
+                })
+                .await?;
+
+                let completions = if let Some(completions) = completions {
+                    match completions {
+                        lsp::CompletionResponse::Array(completions) => completions,
+                        lsp::CompletionResponse::List(list) => list.items,
+                    }
+                } else {
+                    Default::default()
+                };
+
+                source_buffer_handle.read_with(&cx, |this, _| {
+                    Ok(completions.into_iter().filter_map(|lsp_completion| {
+                        let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
+                            lsp::CompletionTextEdit::Edit(edit) => (range_from_lsp(edit.range), edit.new_text.clone()),
+                            lsp::CompletionTextEdit::InsertAndReplace(_) => {
+                                log::info!("received an insert and replace completion but we don't yet support that");
+                                return None
+                            },
+                        };
+
+                        let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
+                        let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left) ;
+                        if clipped_start == old_range.start && clipped_end == old_range.end {
+                            Some(Completion {
+                                old_range: this.anchor_before(old_range.start)..this.anchor_after(old_range.end),
+                                new_text,
+                                label: language.as_ref().and_then(|l| l.label_for_completion(&lsp_completion)).unwrap_or_else(|| CompletionLabel::plain(&lsp_completion)),
+                                lsp_completion,
+                            })
+                        } else {
+                            None
+                        }
+                    }).collect())
+                })
+
+            })
+        } else if let Some(project_id) = self.remote_id() {
+            let rpc = self.client.clone();
+            cx.foreground().spawn(async move {
+                let response = rpc
+                    .request(proto::GetCompletions {
+                        project_id,
+                        buffer_id,
+                        position: Some(language::proto::serialize_anchor(&anchor)),
+                    })
+                    .await?;
+                response
+                    .completions
+                    .into_iter()
+                    .map(|completion| {
+                        language::proto::deserialize_completion(completion, language.as_ref())
+                    })
+                    .collect()
+            })
+        } else {
+            Task::ready(Ok(Default::default()))
+        }
+    }
+
+    pub fn apply_additional_edits_for_completion(
+        &self,
+        buffer_handle: ModelHandle<Buffer>,
+        completion: Completion,
+        push_to_history: bool,
+        cx: &mut ModelContext<Self>,
+    ) -> Task<Result<Option<Transaction>>> {
+        let buffer = buffer_handle.read(cx);
+        let buffer_id = buffer.remote_id();
+
+        if self.is_local() {
+            let lang_server = if let Some(language_server) = buffer.language_server() {
+                language_server.clone()
+            } else {
+                return Task::ready(Err(anyhow!("buffer does not have a language server")));
+            };
+
+            cx.spawn(|_, mut cx| async move {
+                let resolved_completion = lang_server
+                    .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
+                    .await?;
+                if let Some(edits) = resolved_completion.additional_text_edits {
+                    let edits = buffer_handle
+                        .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
+                        .await?;
+                    buffer_handle.update(&mut cx, |buffer, cx| {
+                        buffer.finalize_last_transaction();
+                        buffer.start_transaction();
+                        for (range, text) in edits {
+                            buffer.edit([range], text, cx);
+                        }
+                        let transaction = if buffer.end_transaction(cx).is_some() {
+                            let transaction = buffer.finalize_last_transaction().unwrap().clone();
+                            if !push_to_history {
+                                buffer.forget_transaction(transaction.id);
+                            }
+                            Some(transaction)
+                        } else {
+                            None
+                        };
+                        Ok(transaction)
+                    })
+                } else {
+                    Ok(None)
+                }
+            })
+        } else if let Some(project_id) = self.remote_id() {
+            let client = self.client.clone();
+            cx.spawn(|_, mut cx| async move {
+                let response = client
+                    .request(proto::ApplyCompletionAdditionalEdits {
+                        project_id,
+                        buffer_id,
+                        completion: Some(language::proto::serialize_completion(&completion)),
+                    })
+                    .await?;
+
+                if let Some(transaction) = response.transaction {
+                    let transaction = language::proto::deserialize_transaction(transaction)?;
+                    buffer_handle
+                        .update(&mut cx, |buffer, _| {
+                            buffer.wait_for_edits(transaction.edit_ids.iter().copied())
+                        })
+                        .await;
+                    if push_to_history {
+                        buffer_handle.update(&mut cx, |buffer, _| {
+                            buffer.push_transaction(transaction.clone(), Instant::now());
+                        });
+                    }
+                    Ok(Some(transaction))
+                } else {
+                    Ok(None)
+                }
+            })
+        } else {
+            Task::ready(Err(anyhow!("project does not have a remote id")))
+        }
+    }
+
+    pub fn code_actions<T: ToOffset>(
+        &self,
+        buffer_handle: &ModelHandle<Buffer>,
+        range: Range<T>,
+        cx: &mut ModelContext<Self>,
+    ) -> Task<Result<Vec<CodeAction>>> {
+        let buffer_handle = buffer_handle.clone();
+        let buffer = buffer_handle.read(cx);
+        let buffer_id = buffer.remote_id();
+        let worktree;
+        let buffer_abs_path;
+        if let Some(file) = File::from_dyn(buffer.file()) {
+            worktree = file.worktree.clone();
+            buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
+        } else {
+            return Task::ready(Ok(Default::default()));
+        };
+        let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
+
+        if worktree.read(cx).as_local().is_some() {
+            let buffer_abs_path = buffer_abs_path.unwrap();
+            let lang_name;
+            let lang_server;
+            if let Some(lang) = buffer.language() {
+                lang_name = lang.name().to_string();
+                if let Some(server) = self
+                    .language_servers
+                    .get(&(worktree.read(cx).id(), lang_name.clone()))
+                {
+                    lang_server = server.clone();
+                } else {
+                    return Task::ready(Ok(Default::default()));
+                };
+            } else {
+                return Task::ready(Ok(Default::default()));
+            }
+
+            let actions =
+                lang_server.request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
+                    text_document: lsp::TextDocumentIdentifier::new(
+                        lsp::Url::from_file_path(buffer_abs_path).unwrap(),
+                    ),
+                    range: lsp::Range::new(
+                        range.start.to_point_utf16(buffer).to_lsp_position(),
+                        range.end.to_point_utf16(buffer).to_lsp_position(),
+                    ),
+                    work_done_progress_params: Default::default(),
+                    partial_result_params: Default::default(),
+                    context: lsp::CodeActionContext {
+                        diagnostics: Default::default(),
+                        only: Some(vec![
+                            lsp::CodeActionKind::QUICKFIX,
+                            lsp::CodeActionKind::REFACTOR,
+                            lsp::CodeActionKind::REFACTOR_EXTRACT,
+                        ]),
+                    },
+                });
+            cx.foreground().spawn(async move {
+                Ok(actions
+                    .await?
+                    .unwrap_or_default()
+                    .into_iter()
+                    .filter_map(|entry| {
+                        if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
+                            Some(CodeAction {
+                                range: range.clone(),
+                                lsp_action,
+                            })
+                        } else {
+                            None
+                        }
+                    })
+                    .collect())
+            })
+        } else if let Some(project_id) = self.remote_id() {
+            let rpc = self.client.clone();
+            cx.foreground().spawn(async move {
+                let response = rpc
+                    .request(proto::GetCodeActions {
+                        project_id,
+                        buffer_id,
+                        start: Some(language::proto::serialize_anchor(&range.start)),
+                        end: Some(language::proto::serialize_anchor(&range.end)),
+                    })
+                    .await?;
+                response
+                    .actions
+                    .into_iter()
+                    .map(language::proto::deserialize_code_action)
+                    .collect()
+            })
+        } else {
+            Task::ready(Ok(Default::default()))
+        }
+    }
+
+    pub fn apply_code_action(
+        &self,
+        buffer_handle: ModelHandle<Buffer>,
+        mut action: CodeAction,
+        push_to_history: bool,
+        cx: &mut ModelContext<Self>,
+    ) -> Task<Result<ProjectTransaction>> {
+        if self.is_local() {
+            let buffer = buffer_handle.read(cx);
+            let lang_name = if let Some(lang) = buffer.language() {
+                lang.name().to_string()
+            } else {
+                return Task::ready(Ok(Default::default()));
+            };
+            let lang_server = if let Some(language_server) = buffer.language_server() {
+                language_server.clone()
+            } else {
+                return Task::ready(Err(anyhow!("buffer does not have a language server")));
+            };
+            let range = action.range.to_point_utf16(buffer);
+            let fs = self.fs.clone();
+
+            cx.spawn(|this, mut cx| async move {
+                if let Some(lsp_range) = action
+                    .lsp_action
+                    .data
+                    .as_mut()
+                    .and_then(|d| d.get_mut("codeActionParams"))
+                    .and_then(|d| d.get_mut("range"))
+                {
+                    *lsp_range = serde_json::to_value(&lsp::Range::new(
+                        range.start.to_lsp_position(),
+                        range.end.to_lsp_position(),
+                    ))
+                    .unwrap();
+                    action.lsp_action = lang_server
+                        .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
+                        .await?;
+                } else {
+                    let actions = this
+                        .update(&mut cx, |this, cx| {
+                            this.code_actions(&buffer_handle, action.range, cx)
+                        })
+                        .await?;
+                    action.lsp_action = actions
+                        .into_iter()
+                        .find(|a| a.lsp_action.title == action.lsp_action.title)
+                        .ok_or_else(|| anyhow!("code action is outdated"))?
+                        .lsp_action;
+                }
+
+                let mut operations = Vec::new();
+                if let Some(edit) = action.lsp_action.edit {
+                    if let Some(document_changes) = edit.document_changes {
+                        match document_changes {
+                            lsp::DocumentChanges::Edits(edits) => operations
+                                .extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit)),
+                            lsp::DocumentChanges::Operations(ops) => operations = ops,
+                        }
+                    } else if let Some(changes) = edit.changes {
+                        operations.extend(changes.into_iter().map(|(uri, edits)| {
+                            lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
+                                text_document: lsp::OptionalVersionedTextDocumentIdentifier {
+                                    uri,
+                                    version: None,
+                                },
+                                edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
+                            })
+                        }));
+                    }
+                }
+
+                let mut project_transaction = ProjectTransaction::default();
+                for operation in operations {
+                    match operation {
+                        lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
+                            let abs_path = op
+                                .uri
+                                .to_file_path()
+                                .map_err(|_| anyhow!("can't convert URI to path"))?;
+
+                            if let Some(parent_path) = abs_path.parent() {
+                                fs.create_dir(parent_path).await?;
+                            }
+                            if abs_path.ends_with("/") {
+                                fs.create_dir(&abs_path).await?;
+                            } else {
+                                fs.create_file(
+                                    &abs_path,
+                                    op.options.map(Into::into).unwrap_or_default(),
+                                )
+                                .await?;
+                            }
+                        }
+                        lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
+                            let source_abs_path = op
+                                .old_uri
+                                .to_file_path()
+                                .map_err(|_| anyhow!("can't convert URI to path"))?;
+                            let target_abs_path = op
+                                .new_uri
+                                .to_file_path()
+                                .map_err(|_| anyhow!("can't convert URI to path"))?;
+                            fs.rename(
+                                &source_abs_path,
+                                &target_abs_path,
+                                op.options.map(Into::into).unwrap_or_default(),
+                            )
+                            .await?;
+                        }
+                        lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
+                            let abs_path = op
+                                .uri
+                                .to_file_path()
+                                .map_err(|_| anyhow!("can't convert URI to path"))?;
+                            let options = op.options.map(Into::into).unwrap_or_default();
+                            if abs_path.ends_with("/") {
+                                fs.remove_dir(&abs_path, options).await?;
+                            } else {
+                                fs.remove_file(&abs_path, options).await?;
+                            }
+                        }
+                        lsp::DocumentChangeOperation::Edit(op) => {
+                            let buffer_to_edit = this
+                                .update(&mut cx, |this, cx| {
+                                    this.open_local_buffer_from_lsp_path(
+                                        op.text_document.uri,
+                                        lang_name.clone(),
+                                        lang_server.clone(),
+                                        cx,
+                                    )
+                                })
+                                .await?;
+
+                            let edits = buffer_to_edit
+                                .update(&mut cx, |buffer, cx| {
+                                    let edits = op.edits.into_iter().map(|edit| match edit {
+                                        lsp::OneOf::Left(edit) => edit,
+                                        lsp::OneOf::Right(edit) => edit.text_edit,
+                                    });
+                                    buffer.edits_from_lsp(edits, op.text_document.version, cx)
+                                })
+                                .await?;
+
+                            let transaction = buffer_to_edit.update(&mut cx, |buffer, cx| {
+                                buffer.finalize_last_transaction();
+                                buffer.start_transaction();
+                                for (range, text) in edits {
+                                    buffer.edit([range], text, cx);
+                                }
+                                let transaction = if buffer.end_transaction(cx).is_some() {
+                                    let transaction =
+                                        buffer.finalize_last_transaction().unwrap().clone();
+                                    if !push_to_history {
+                                        buffer.forget_transaction(transaction.id);
+                                    }
+                                    Some(transaction)
+                                } else {
+                                    None
+                                };
+
+                                transaction
+                            });
+                            if let Some(transaction) = transaction {
+                                project_transaction.0.insert(buffer_to_edit, transaction);
+                            }
+                        }
+                    }
+                }
+
+                Ok(project_transaction)
+            })
+        } else if let Some(project_id) = self.remote_id() {
+            let client = self.client.clone();
+            let request = proto::ApplyCodeAction {
+                project_id,
+                buffer_id: buffer_handle.read(cx).remote_id(),
+                action: Some(language::proto::serialize_code_action(&action)),
+            };
+            cx.spawn(|this, mut cx| async move {
+                let response = client
+                    .request(request)
+                    .await?
+                    .transaction
+                    .ok_or_else(|| anyhow!("missing transaction"))?;
+                this.update(&mut cx, |this, cx| {
+                    this.deserialize_project_transaction(response, push_to_history, cx)
+                })
+                .await
+            })
         } else {
             Task::ready(Err(anyhow!("project does not have a remote id")))
         }
@@ -1392,469 +2011,541 @@ impl Project {
 
     // RPC message handlers
 
-    fn handle_unshare_project(
-        &mut self,
+    async fn handle_unshare_project(
+        this: ModelHandle<Self>,
         _: TypedEnvelope<proto::UnshareProject>,
         _: Arc<Client>,
-        cx: &mut ModelContext<Self>,
+        mut cx: AsyncAppContext,
     ) -> Result<()> {
-        if let ProjectClientState::Remote {
-            sharing_has_stopped,
-            ..
-        } = &mut self.client_state
-        {
-            *sharing_has_stopped = true;
-            self.collaborators.clear();
-            cx.notify();
-            Ok(())
-        } else {
-            unreachable!()
-        }
+        this.update(&mut cx, |this, cx| {
+            if let ProjectClientState::Remote {
+                sharing_has_stopped,
+                ..
+            } = &mut this.client_state
+            {
+                *sharing_has_stopped = true;
+                this.collaborators.clear();
+                cx.notify();
+            } else {
+                unreachable!()
+            }
+        });
+
+        Ok(())
     }
 
-    fn handle_add_collaborator(
-        &mut self,
+    async fn handle_add_collaborator(
+        this: ModelHandle<Self>,
         mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
         _: Arc<Client>,
-        cx: &mut ModelContext<Self>,
+        mut cx: AsyncAppContext,
     ) -> Result<()> {
-        let user_store = self.user_store.clone();
+        let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
         let collaborator = envelope
             .payload
             .collaborator
             .take()
             .ok_or_else(|| anyhow!("empty collaborator"))?;
 
-        cx.spawn(|this, mut cx| {
-            async move {
-                let collaborator =
-                    Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
-                this.update(&mut cx, |this, cx| {
-                    this.collaborators
-                        .insert(collaborator.peer_id, collaborator);
-                    cx.notify();
-                });
-                Ok(())
-            }
-            .log_err()
-        })
-        .detach();
+        let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
+        this.update(&mut cx, |this, cx| {
+            this.collaborators
+                .insert(collaborator.peer_id, collaborator);
+            cx.notify();
+        });
 
         Ok(())
     }
 
-    fn handle_remove_collaborator(
-        &mut self,
+    async fn handle_remove_collaborator(
+        this: ModelHandle<Self>,
         envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
         _: Arc<Client>,
-        cx: &mut ModelContext<Self>,
+        mut cx: AsyncAppContext,
     ) -> Result<()> {
-        let peer_id = PeerId(envelope.payload.peer_id);
-        let replica_id = self
-            .collaborators
-            .remove(&peer_id)
-            .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
-            .replica_id;
-        self.shared_buffers.remove(&peer_id);
-        for (_, buffer) in &self.open_buffers {
-            if let Some(buffer) = buffer.upgrade(cx) {
-                buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
+        this.update(&mut cx, |this, cx| {
+            let peer_id = PeerId(envelope.payload.peer_id);
+            let replica_id = this
+                .collaborators
+                .remove(&peer_id)
+                .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
+                .replica_id;
+            this.shared_buffers.remove(&peer_id);
+            for (_, buffer) in &this.open_buffers {
+                if let Some(buffer) = buffer.upgrade(cx) {
+                    buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
+                }
             }
-        }
-        cx.notify();
-        Ok(())
+            cx.notify();
+            Ok(())
+        })
     }
 
-    fn handle_share_worktree(
-        &mut self,
+    async fn handle_share_worktree(
+        this: ModelHandle<Self>,
         envelope: TypedEnvelope<proto::ShareWorktree>,
         client: Arc<Client>,
-        cx: &mut ModelContext<Self>,
+        mut cx: AsyncAppContext,
     ) -> Result<()> {
-        let remote_id = self.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
-        let replica_id = self.replica_id();
-        let worktree = envelope
-            .payload
-            .worktree
-            .ok_or_else(|| anyhow!("invalid worktree"))?;
-        let (worktree, load_task) = Worktree::remote(remote_id, replica_id, worktree, client, cx);
-        self.add_worktree(&worktree, cx);
-        load_task.detach();
-        Ok(())
+        this.update(&mut cx, |this, cx| {
+            let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
+            let replica_id = this.replica_id();
+            let worktree = envelope
+                .payload
+                .worktree
+                .ok_or_else(|| anyhow!("invalid worktree"))?;
+            let (worktree, load_task) =
+                Worktree::remote(remote_id, replica_id, worktree, client, cx);
+            this.add_worktree(&worktree, cx);
+            load_task.detach();
+            Ok(())
+        })
     }
 
-    fn handle_unregister_worktree(
-        &mut self,
+    async fn handle_unregister_worktree(
+        this: ModelHandle<Self>,
         envelope: TypedEnvelope<proto::UnregisterWorktree>,
         _: Arc<Client>,
-        cx: &mut ModelContext<Self>,
+        mut cx: AsyncAppContext,
     ) -> Result<()> {
-        let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
-        self.remove_worktree(worktree_id, cx);
-        Ok(())
+        this.update(&mut cx, |this, cx| {
+            let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
+            this.remove_worktree(worktree_id, cx);
+            Ok(())
+        })
     }
 
-    fn handle_update_worktree(
-        &mut self,
+    async fn handle_update_worktree(
+        this: ModelHandle<Self>,
         envelope: TypedEnvelope<proto::UpdateWorktree>,
         _: Arc<Client>,
-        cx: &mut ModelContext<Self>,
+        mut cx: AsyncAppContext,
     ) -> Result<()> {
-        let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
-        if let Some(worktree) = self.worktree_for_id(worktree_id, cx) {
-            worktree.update(cx, |worktree, cx| {
-                let worktree = worktree.as_remote_mut().unwrap();
-                worktree.update_from_remote(envelope, cx)
-            })?;
-        }
-        Ok(())
+        this.update(&mut cx, |this, cx| {
+            let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
+            if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
+                worktree.update(cx, |worktree, cx| {
+                    let worktree = worktree.as_remote_mut().unwrap();
+                    worktree.update_from_remote(envelope, cx)
+                })?;
+            }
+            Ok(())
+        })
     }
 
-    fn handle_update_diagnostic_summary(
-        &mut self,
+    async fn handle_update_diagnostic_summary(
+        this: ModelHandle<Self>,
         envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
         _: Arc<Client>,
-        cx: &mut ModelContext<Self>,
+        mut cx: AsyncAppContext,
     ) -> Result<()> {
-        let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
-        if let Some(worktree) = self.worktree_for_id(worktree_id, cx) {
-            if let Some(summary) = envelope.payload.summary {
-                let project_path = ProjectPath {
-                    worktree_id,
-                    path: Path::new(&summary.path).into(),
-                };
-                worktree.update(cx, |worktree, _| {
-                    worktree
-                        .as_remote_mut()
-                        .unwrap()
-                        .update_diagnostic_summary(project_path.path.clone(), &summary);
-                });
-                cx.emit(Event::DiagnosticsUpdated(project_path));
-            }
-        }
-        Ok(())
+        this.update(&mut cx, |this, cx| {
+            let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
+            if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
+                if let Some(summary) = envelope.payload.summary {
+                    let project_path = ProjectPath {
+                        worktree_id,
+                        path: Path::new(&summary.path).into(),
+                    };
+                    worktree.update(cx, |worktree, _| {
+                        worktree
+                            .as_remote_mut()
+                            .unwrap()
+                            .update_diagnostic_summary(project_path.path.clone(), &summary);
+                    });
+                    cx.emit(Event::DiagnosticsUpdated(project_path));
+                }
+            }
+            Ok(())
+        })
     }
 
-    fn handle_disk_based_diagnostics_updating(
-        &mut self,
+    async fn handle_disk_based_diagnostics_updating(
+        this: ModelHandle<Self>,
         _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
         _: Arc<Client>,
-        cx: &mut ModelContext<Self>,
+        mut cx: AsyncAppContext,
     ) -> Result<()> {
-        self.disk_based_diagnostics_started(cx);
+        this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
         Ok(())
     }
 
-    fn handle_disk_based_diagnostics_updated(
-        &mut self,
+    async fn handle_disk_based_diagnostics_updated(
+        this: ModelHandle<Self>,
         _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
         _: Arc<Client>,
-        cx: &mut ModelContext<Self>,
+        mut cx: AsyncAppContext,
     ) -> Result<()> {
-        self.disk_based_diagnostics_finished(cx);
+        this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
         Ok(())
     }
 
-    pub fn handle_update_buffer(
-        &mut self,
+    async fn handle_update_buffer(
+        this: ModelHandle<Self>,
         envelope: TypedEnvelope<proto::UpdateBuffer>,
         _: Arc<Client>,
-        cx: &mut ModelContext<Self>,
+        mut cx: AsyncAppContext,
     ) -> Result<()> {
-        let payload = envelope.payload.clone();
-        let buffer_id = payload.buffer_id as usize;
-        let ops = payload
-            .operations
-            .into_iter()
-            .map(|op| language::proto::deserialize_operation(op))
-            .collect::<Result<Vec<_>, _>>()?;
-        if let Some(buffer) = self.open_buffers.get_mut(&buffer_id) {
-            if let Some(buffer) = buffer.upgrade(cx) {
-                buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
+        this.update(&mut cx, |this, cx| {
+            let payload = envelope.payload.clone();
+            let buffer_id = payload.buffer_id as usize;
+            let ops = payload
+                .operations
+                .into_iter()
+                .map(|op| language::proto::deserialize_operation(op))
+                .collect::<Result<Vec<_>, _>>()?;
+            if let Some(buffer) = this.open_buffers.get_mut(&buffer_id) {
+                if let Some(buffer) = buffer.upgrade(cx) {
+                    buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
+                }
             }
-        }
-        Ok(())
+            Ok(())
+        })
     }
 
-    pub fn handle_update_buffer_file(
-        &mut self,
+    async fn handle_update_buffer_file(
+        this: ModelHandle<Self>,
         envelope: TypedEnvelope<proto::UpdateBufferFile>,
         _: Arc<Client>,
-        cx: &mut ModelContext<Self>,
+        mut cx: AsyncAppContext,
     ) -> Result<()> {
-        let payload = envelope.payload.clone();
-        let buffer_id = payload.buffer_id as usize;
-        let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
-        let worktree = self
-            .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
-            .ok_or_else(|| anyhow!("no such worktree"))?;
-        let file = File::from_proto(file, worktree.clone(), cx)?;
-        let buffer = self
-            .open_buffers
-            .get_mut(&buffer_id)
-            .and_then(|b| b.upgrade(cx))
-            .ok_or_else(|| anyhow!("no such buffer"))?;
-        buffer.update(cx, |buffer, cx| {
-            buffer.file_updated(Box::new(file), cx).detach();
-        });
-
-        Ok(())
+        this.update(&mut cx, |this, cx| {
+            let payload = envelope.payload.clone();
+            let buffer_id = payload.buffer_id as usize;
+            let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
+            let worktree = this
+                .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
+                .ok_or_else(|| anyhow!("no such worktree"))?;
+            let file = File::from_proto(file, worktree.clone(), cx)?;
+            let buffer = this
+                .open_buffers
+                .get_mut(&buffer_id)
+                .and_then(|b| b.upgrade(cx))
+                .ok_or_else(|| anyhow!("no such buffer"))?;
+            buffer.update(cx, |buffer, cx| {
+                buffer.file_updated(Box::new(file), cx).detach();
+            });
+            Ok(())
+        })
     }
 
-    pub fn handle_save_buffer(
-        &mut self,
+    async fn handle_save_buffer(
+        this: ModelHandle<Self>,
         envelope: TypedEnvelope<proto::SaveBuffer>,
-        rpc: Arc<Client>,
-        cx: &mut ModelContext<Self>,
-    ) -> Result<()> {
-        let sender_id = envelope.original_sender_id()?;
-        let project_id = self.remote_id().ok_or_else(|| anyhow!("not connected"))?;
-        let buffer = self
-            .shared_buffers
-            .get(&sender_id)
-            .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
-            .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
-        let receipt = envelope.receipt();
+        _: Arc<Client>,
+        mut cx: AsyncAppContext,
+    ) -> Result<proto::BufferSaved> {
         let buffer_id = envelope.payload.buffer_id;
-        let save = cx.spawn(|_, mut cx| async move {
-            buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await
-        });
-
-        cx.background()
-            .spawn(
-                async move {
-                    let (version, mtime) = save.await?;
-
-                    rpc.respond(
-                        receipt,
-                        proto::BufferSaved {
-                            project_id,
-                            buffer_id,
-                            version: (&version).into(),
-                            mtime: Some(mtime.into()),
-                        },
-                    )?;
+        let sender_id = envelope.original_sender_id()?;
+        let (project_id, save) = this.update(&mut cx, |this, cx| {
+            let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
+            let buffer = this
+                .shared_buffers
+                .get(&sender_id)
+                .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned())
+                .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
+            Ok::<_, anyhow::Error>((project_id, buffer.update(cx, |buffer, cx| buffer.save(cx))))
+        })?;
 
-                    Ok(())
-                }
-                .log_err(),
-            )
-            .detach();
-        Ok(())
+        let (version, mtime) = save.await?;
+        Ok(proto::BufferSaved {
+            project_id,
+            buffer_id,
+            version: (&version).into(),
+            mtime: Some(mtime.into()),
+        })
     }
 
-    pub fn handle_format_buffer(
-        &mut self,
-        envelope: TypedEnvelope<proto::FormatBuffer>,
-        rpc: Arc<Client>,
-        cx: &mut ModelContext<Self>,
-    ) -> Result<()> {
-        let receipt = envelope.receipt();
+    async fn handle_format_buffers(
+        this: ModelHandle<Self>,
+        envelope: TypedEnvelope<proto::FormatBuffers>,
+        _: Arc<Client>,
+        mut cx: AsyncAppContext,
+    ) -> Result<proto::FormatBuffersResponse> {
         let sender_id = envelope.original_sender_id()?;
-        let buffer = self
-            .shared_buffers
-            .get(&sender_id)
-            .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
-            .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
-        cx.spawn(|_, mut cx| async move {
-            let format = buffer.update(&mut cx, |buffer, cx| buffer.format(cx)).await;
-            // We spawn here in order to enqueue the sending of `Ack` *after* transmission of edits
-            // associated with formatting.
-            cx.spawn(|_| async move {
-                match format {
-                    Ok(()) => rpc.respond(receipt, proto::Ack {})?,
-                    Err(error) => rpc.respond_with_error(
-                        receipt,
-                        proto::Error {
-                            message: error.to_string(),
-                        },
-                    )?,
-                }
-                Ok::<_, anyhow::Error>(())
-            })
-            .await
-            .log_err();
+        let format = this.update(&mut cx, |this, cx| {
+            let shared_buffers = this
+                .shared_buffers
+                .get(&sender_id)
+                .ok_or_else(|| anyhow!("peer has no buffers"))?;
+            let mut buffers = HashSet::default();
+            for buffer_id in &envelope.payload.buffer_ids {
+                buffers.insert(
+                    shared_buffers
+                        .get(buffer_id)
+                        .cloned()
+                        .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
+                );
+            }
+            Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
+        })?;
+
+        let project_transaction = format.await?;
+        let project_transaction = this.update(&mut cx, |this, cx| {
+            this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
+        });
+        Ok(proto::FormatBuffersResponse {
+            transaction: Some(project_transaction),
         })
-        .detach();
-        Ok(())
     }
 
-    fn handle_get_completions(
-        &mut self,
+    async fn handle_get_completions(
+        this: ModelHandle<Self>,
         envelope: TypedEnvelope<proto::GetCompletions>,
-        rpc: Arc<Client>,
-        cx: &mut ModelContext<Self>,
-    ) -> Result<()> {
-        let receipt = envelope.receipt();
+        _: Arc<Client>,
+        mut cx: AsyncAppContext,
+    ) -> Result<proto::GetCompletionsResponse> {
         let sender_id = envelope.original_sender_id()?;
-        let buffer = self
-            .shared_buffers
-            .get(&sender_id)
-            .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
-            .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
         let position = envelope
             .payload
             .position
             .and_then(language::proto::deserialize_anchor)
             .ok_or_else(|| anyhow!("invalid position"))?;
-        cx.spawn(|_, mut cx| async move {
-            match buffer
-                .update(&mut cx, |buffer, cx| buffer.completions(position, cx))
-                .await
-            {
-                Ok(completions) => rpc.respond(
-                    receipt,
-                    proto::GetCompletionsResponse {
-                        completions: completions
-                            .iter()
-                            .map(language::proto::serialize_completion)
-                            .collect(),
-                    },
-                ),
-                Err(error) => rpc.respond_with_error(
-                    receipt,
-                    proto::Error {
-                        message: error.to_string(),
-                    },
-                ),
-            }
+        let completions = this.update(&mut cx, |this, cx| {
+            let buffer = this
+                .shared_buffers
+                .get(&sender_id)
+                .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
+                .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
+            Ok::<_, anyhow::Error>(this.completions(&buffer, position, cx))
+        })?;
+
+        Ok(proto::GetCompletionsResponse {
+            completions: completions
+                .await?
+                .iter()
+                .map(language::proto::serialize_completion)
+                .collect(),
         })
-        .detach_and_log_err(cx);
-        Ok(())
     }
 
-    fn handle_apply_additional_edits_for_completion(
-        &mut self,
+    async fn handle_apply_additional_edits_for_completion(
+        this: ModelHandle<Self>,
         envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
-        rpc: Arc<Client>,
-        cx: &mut ModelContext<Self>,
-    ) -> Result<()> {
-        let receipt = envelope.receipt();
+        _: Arc<Client>,
+        mut cx: AsyncAppContext,
+    ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
+        let sender_id = envelope.original_sender_id()?;
+        let apply_additional_edits = this.update(&mut cx, |this, cx| {
+            let buffer = this
+                .shared_buffers
+                .get(&sender_id)
+                .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
+                .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
+            let language = buffer.read(cx).language();
+            let completion = language::proto::deserialize_completion(
+                envelope
+                    .payload
+                    .completion
+                    .ok_or_else(|| anyhow!("invalid completion"))?,
+                language,
+            )?;
+            Ok::<_, anyhow::Error>(
+                this.apply_additional_edits_for_completion(buffer, completion, false, cx),
+            )
+        })?;
+
+        Ok(proto::ApplyCompletionAdditionalEditsResponse {
+            transaction: apply_additional_edits
+                .await?
+                .as_ref()
+                .map(language::proto::serialize_transaction),
+        })
+    }
+
+    async fn handle_get_code_actions(
+        this: ModelHandle<Self>,
+        envelope: TypedEnvelope<proto::GetCodeActions>,
+        _: Arc<Client>,
+        mut cx: AsyncAppContext,
+    ) -> Result<proto::GetCodeActionsResponse> {
+        let sender_id = envelope.original_sender_id()?;
+        let start = envelope
+            .payload
+            .start
+            .and_then(language::proto::deserialize_anchor)
+            .ok_or_else(|| anyhow!("invalid start"))?;
+        let end = envelope
+            .payload
+            .end
+            .and_then(language::proto::deserialize_anchor)
+            .ok_or_else(|| anyhow!("invalid end"))?;
+        let code_actions = this.update(&mut cx, |this, cx| {
+            let buffer = this
+                .shared_buffers
+                .get(&sender_id)
+                .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
+                .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
+            Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
+        })?;
+
+        Ok(proto::GetCodeActionsResponse {
+            actions: code_actions
+                .await?
+                .iter()
+                .map(language::proto::serialize_code_action)
+                .collect(),
+        })
+    }
+
+    async fn handle_apply_code_action(
+        this: ModelHandle<Self>,
+        envelope: TypedEnvelope<proto::ApplyCodeAction>,
+        _: Arc<Client>,
+        mut cx: AsyncAppContext,
+    ) -> Result<proto::ApplyCodeActionResponse> {
         let sender_id = envelope.original_sender_id()?;
-        let buffer = self
-            .shared_buffers
-            .get(&sender_id)
-            .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
-            .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
-        let language = buffer.read(cx).language();
-        let completion = language::proto::deserialize_completion(
+        let action = language::proto::deserialize_code_action(
             envelope
                 .payload
-                .completion
-                .ok_or_else(|| anyhow!("invalid position"))?,
-            language,
+                .action
+                .ok_or_else(|| anyhow!("invalid action"))?,
         )?;
-        cx.spawn(|_, mut cx| async move {
-            match buffer
-                .update(&mut cx, |buffer, cx| {
-                    buffer.apply_additional_edits_for_completion(completion, false, cx)
-                })
-                .await
-            {
-                Ok(edit_ids) => rpc.respond(
-                    receipt,
-                    proto::ApplyCompletionAdditionalEditsResponse {
-                        additional_edits: edit_ids
-                            .into_iter()
-                            .map(|edit_id| proto::AdditionalEdit {
-                                replica_id: edit_id.replica_id as u32,
-                                local_timestamp: edit_id.value,
-                            })
-                            .collect(),
-                    },
-                ),
-                Err(error) => rpc.respond_with_error(
-                    receipt,
-                    proto::Error {
-                        message: error.to_string(),
-                    },
-                ),
-            }
+        let apply_code_action = this.update(&mut cx, |this, cx| {
+            let buffer = this
+                .shared_buffers
+                .get(&sender_id)
+                .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
+                .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
+            Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
+        })?;
+
+        let project_transaction = apply_code_action.await?;
+        let project_transaction = this.update(&mut cx, |this, cx| {
+            this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
+        });
+        Ok(proto::ApplyCodeActionResponse {
+            transaction: Some(project_transaction),
         })
-        .detach_and_log_err(cx);
-        Ok(())
     }
 
-    pub fn handle_get_definition(
-        &mut self,
+    async fn handle_get_definition(
+        this: ModelHandle<Self>,
         envelope: TypedEnvelope<proto::GetDefinition>,
-        rpc: Arc<Client>,
-        cx: &mut ModelContext<Self>,
-    ) -> Result<()> {
-        let receipt = envelope.receipt();
+        _: Arc<Client>,
+        mut cx: AsyncAppContext,
+    ) -> Result<proto::GetDefinitionResponse> {
         let sender_id = envelope.original_sender_id()?;
-        let source_buffer = self
-            .shared_buffers
-            .get(&sender_id)
-            .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
-            .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
         let position = envelope
             .payload
             .position
             .and_then(deserialize_anchor)
             .ok_or_else(|| anyhow!("invalid position"))?;
-        if !source_buffer.read(cx).can_resolve(&position) {
-            return Err(anyhow!("cannot resolve position"));
-        }
+        let definitions = this.update(&mut cx, |this, cx| {
+            let source_buffer = this
+                .shared_buffers
+                .get(&sender_id)
+                .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned())
+                .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
+            if source_buffer.read(cx).can_resolve(&position) {
+                Ok(this.definition(&source_buffer, position, cx))
+            } else {
+                Err(anyhow!("cannot resolve position"))
+            }
+        })?;
 
-        let definitions = self.definition(&source_buffer, position, cx);
-        cx.spawn(|this, mut cx| async move {
-            let definitions = definitions.await?;
+        let definitions = definitions.await?;
+
+        this.update(&mut cx, |this, cx| {
             let mut response = proto::GetDefinitionResponse {
                 definitions: Default::default(),
             };
-            this.update(&mut cx, |this, cx| {
-                for definition in definitions {
-                    let buffer =
-                        this.serialize_buffer_for_peer(&definition.target_buffer, sender_id, cx);
-                    response.definitions.push(proto::Definition {
-                        target_start: Some(serialize_anchor(&definition.target_range.start)),
-                        target_end: Some(serialize_anchor(&definition.target_range.end)),
-                        buffer: Some(buffer),
-                    });
-                }
-            });
-            rpc.respond(receipt, response)?;
-            Ok::<_, anyhow::Error>(())
+            for definition in definitions {
+                let buffer =
+                    this.serialize_buffer_for_peer(&definition.target_buffer, sender_id, cx);
+                response.definitions.push(proto::Definition {
+                    target_start: Some(serialize_anchor(&definition.target_range.start)),
+                    target_end: Some(serialize_anchor(&definition.target_range.end)),
+                    buffer: Some(buffer),
+                });
+            }
+            Ok(response)
         })
-        .detach_and_log_err(cx);
-
-        Ok(())
     }
 
-    pub fn handle_open_buffer(
-        &mut self,
+    async fn handle_open_buffer(
+        this: ModelHandle<Self>,
         envelope: TypedEnvelope<proto::OpenBuffer>,
-        rpc: Arc<Client>,
-        cx: &mut ModelContext<Self>,
-    ) -> anyhow::Result<()> {
-        let receipt = envelope.receipt();
+        _: Arc<Client>,
+        mut cx: AsyncAppContext,
+    ) -> anyhow::Result<proto::OpenBufferResponse> {
         let peer_id = envelope.original_sender_id()?;
         let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
-        let open_buffer = self.open_buffer(
-            ProjectPath {
-                worktree_id,
-                path: PathBuf::from(envelope.payload.path).into(),
-            },
-            cx,
-        );
-        cx.spawn(|this, mut cx| {
-            async move {
-                let buffer = open_buffer.await?;
-                let buffer = this.update(&mut cx, |this, cx| {
-                    this.serialize_buffer_for_peer(&buffer, peer_id, cx)
-                });
-                rpc.respond(
-                    receipt,
-                    proto::OpenBufferResponse {
-                        buffer: Some(buffer),
-                    },
-                )
+        let open_buffer = this.update(&mut cx, |this, cx| {
+            this.open_buffer(
+                ProjectPath {
+                    worktree_id,
+                    path: PathBuf::from(envelope.payload.path).into(),
+                },
+                cx,
+            )
+        });
+
+        let buffer = open_buffer.await?;
+        this.update(&mut cx, |this, cx| {
+            Ok(proto::OpenBufferResponse {
+                buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
+            })
+        })
+    }
+
+    fn serialize_project_transaction_for_peer(
+        &mut self,
+        project_transaction: ProjectTransaction,
+        peer_id: PeerId,
+        cx: &AppContext,
+    ) -> proto::ProjectTransaction {
+        let mut serialized_transaction = proto::ProjectTransaction {
+            buffers: Default::default(),
+            transactions: Default::default(),
+        };
+        for (buffer, transaction) in project_transaction.0 {
+            serialized_transaction
+                .buffers
+                .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
+            serialized_transaction
+                .transactions
+                .push(language::proto::serialize_transaction(&transaction));
+        }
+        serialized_transaction
+    }
+
+    fn deserialize_project_transaction(
+        &mut self,
+        message: proto::ProjectTransaction,
+        push_to_history: bool,
+        cx: &mut ModelContext<Self>,
+    ) -> Task<Result<ProjectTransaction>> {
+        let mut project_transaction = ProjectTransaction::default();
+        for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
+            let buffer = match self.deserialize_buffer(buffer, cx) {
+                Ok(buffer) => buffer,
+                Err(error) => return Task::ready(Err(error)),
+            };
+            let transaction = match language::proto::deserialize_transaction(transaction) {
+                Ok(transaction) => transaction,
+                Err(error) => return Task::ready(Err(error)),
+            };
+            project_transaction.0.insert(buffer, transaction);
+        }
+
+        cx.spawn_weak(|_, mut cx| async move {
+            for (buffer, transaction) in &project_transaction.0 {
+                buffer
+                    .update(&mut cx, |buffer, _| {
+                        buffer.wait_for_edits(transaction.edit_ids.iter().copied())
+                    })
+                    .await;
+
+                if push_to_history {
+                    buffer.update(&mut cx, |buffer, _| {
+                        buffer.push_transaction(transaction.clone(), Instant::now());
+                    });
+                }
             }
-            .log_err()
+
+            Ok(project_transaction)
         })
-        .detach();
-        Ok(())
     }
 
     fn serialize_buffer_for_peer(

crates/project/src/worktree.rs šŸ”—

@@ -14,9 +14,7 @@ use gpui::{
     executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext,
     Task,
 };
-use language::{
-    Anchor, Buffer, Completion, DiagnosticEntry, Language, Operation, PointUtf16, Rope,
-};
+use language::{Buffer, DiagnosticEntry, Operation, PointUtf16, Rope};
 use lazy_static::lazy_static;
 use parking_lot::Mutex;
 use postage::{
@@ -293,7 +291,7 @@ impl Worktree {
                     let this = worktree_handle.downgrade();
                     cx.spawn(|mut cx| async move {
                         while let Some(_) = snapshot_rx.recv().await {
-                            if let Some(this) = cx.read(|cx| this.upgrade(cx)) {
+                            if let Some(this) = this.upgrade(&cx) {
                                 this.update(&mut cx, |this, cx| this.poll_snapshot(cx));
                             } else {
                                 break;
@@ -518,7 +516,7 @@ impl LocalWorktree {
 
             cx.spawn_weak(|this, mut cx| async move {
                 while let Ok(scan_state) = scan_states_rx.recv().await {
-                    if let Some(handle) = cx.read(|cx| this.upgrade(cx)) {
+                    if let Some(handle) = this.upgrade(&cx) {
                         let to_send = handle.update(&mut cx, |this, cx| {
                             last_scan_state_tx.blocking_send(scan_state).ok();
                             this.poll_snapshot(cx);
@@ -820,7 +818,7 @@ impl RemoteWorktree {
     ) -> Result<()> {
         let mut tx = self.updates_tx.clone();
         let payload = envelope.payload.clone();
-        cx.background()
+        cx.foreground()
             .spawn(async move {
                 tx.send(payload).await.expect("receiver runs to completion");
             })
@@ -1387,96 +1385,6 @@ impl language::File for File {
         })
     }
 
-    fn format_remote(
-        &self,
-        buffer_id: u64,
-        cx: &mut MutableAppContext,
-    ) -> Option<Task<Result<()>>> {
-        let worktree = self.worktree.read(cx);
-        let worktree = worktree.as_remote()?;
-        let rpc = worktree.client.clone();
-        let project_id = worktree.project_id;
-        Some(cx.foreground().spawn(async move {
-            rpc.request(proto::FormatBuffer {
-                project_id,
-                buffer_id,
-            })
-            .await?;
-            Ok(())
-        }))
-    }
-
-    fn completions(
-        &self,
-        buffer_id: u64,
-        position: Anchor,
-        language: Option<Arc<Language>>,
-        cx: &mut MutableAppContext,
-    ) -> Task<Result<Vec<Completion<Anchor>>>> {
-        let worktree = self.worktree.read(cx);
-        let worktree = if let Some(worktree) = worktree.as_remote() {
-            worktree
-        } else {
-            return Task::ready(Err(anyhow!(
-                "remote completions requested on a local worktree"
-            )));
-        };
-        let rpc = worktree.client.clone();
-        let project_id = worktree.project_id;
-        cx.foreground().spawn(async move {
-            let response = rpc
-                .request(proto::GetCompletions {
-                    project_id,
-                    buffer_id,
-                    position: Some(language::proto::serialize_anchor(&position)),
-                })
-                .await?;
-            response
-                .completions
-                .into_iter()
-                .map(|completion| {
-                    language::proto::deserialize_completion(completion, language.as_ref())
-                })
-                .collect()
-        })
-    }
-
-    fn apply_additional_edits_for_completion(
-        &self,
-        buffer_id: u64,
-        completion: Completion<Anchor>,
-        cx: &mut MutableAppContext,
-    ) -> Task<Result<Vec<clock::Local>>> {
-        let worktree = self.worktree.read(cx);
-        let worktree = if let Some(worktree) = worktree.as_remote() {
-            worktree
-        } else {
-            return Task::ready(Err(anyhow!(
-                "remote additional edits application requested on a local worktree"
-            )));
-        };
-        let rpc = worktree.client.clone();
-        let project_id = worktree.project_id;
-        cx.foreground().spawn(async move {
-            let response = rpc
-                .request(proto::ApplyCompletionAdditionalEdits {
-                    project_id,
-                    buffer_id,
-                    completion: Some(language::proto::serialize_completion(&completion)),
-                })
-                .await?;
-
-            Ok(response
-                .additional_edits
-                .into_iter()
-                .map(|edit| clock::Local {
-                    replica_id: edit.replica_id as ReplicaId,
-                    value: edit.local_timestamp,
-                })
-                .collect())
-        })
-    }
-
     fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext) {
         self.worktree.update(cx, |worktree, cx| {
             worktree.send_buffer_update(buffer_id, operation, cx);
@@ -2216,7 +2124,7 @@ struct UpdateIgnoreStatusJob {
 }
 
 pub trait WorktreeHandle {
-    #[cfg(test)]
+    #[cfg(any(test, feature = "test-support"))]
     fn flush_fs_events<'a>(
         &self,
         cx: &'a gpui::TestAppContext,
@@ -2230,7 +2138,7 @@ impl WorktreeHandle for ModelHandle<Worktree> {
     //
     // This function mutates the worktree's directory and waits for those mutations to be picked up,
     // to ensure that all redundant FS events have already been processed.
-    #[cfg(test)]
+    #[cfg(any(test, feature = "test-support"))]
     fn flush_fs_events<'a>(
         &self,
         cx: &'a gpui::TestAppContext,
@@ -2238,14 +2146,22 @@ impl WorktreeHandle for ModelHandle<Worktree> {
         use smol::future::FutureExt;
 
         let filename = "fs-event-sentinel";
-        let root_path = cx.read(|cx| self.read(cx).as_local().unwrap().abs_path().clone());
         let tree = self.clone();
+        let (fs, root_path) = self.read_with(cx, |tree, _| {
+            let tree = tree.as_local().unwrap();
+            (tree.fs.clone(), tree.abs_path().clone())
+        });
+
         async move {
-            std::fs::write(root_path.join(filename), "").unwrap();
+            fs.create_file(&root_path.join(filename), Default::default())
+                .await
+                .unwrap();
             tree.condition(&cx, |tree, _| tree.entry_for_path(filename).is_some())
                 .await;
 
-            std::fs::remove_file(root_path.join(filename)).unwrap();
+            fs.remove_file(&root_path.join(filename), Default::default())
+                .await
+                .unwrap();
             tree.condition(&cx, |tree, _| tree.entry_for_path(filename).is_none())
                 .await;
 

crates/rpc/proto/zed.proto šŸ”—

@@ -39,27 +39,32 @@ message Envelope {
         SaveBuffer save_buffer = 31;
         BufferSaved buffer_saved = 32;
         BufferReloaded buffer_reloaded = 33;
-        FormatBuffer format_buffer = 34;
-        GetCompletions get_completions = 35;
-        GetCompletionsResponse get_completions_response = 36;
-        ApplyCompletionAdditionalEdits apply_completion_additional_edits = 37;
-        ApplyCompletionAdditionalEditsResponse apply_completion_additional_edits_response = 38;
-
-        GetChannels get_channels = 39;
-        GetChannelsResponse get_channels_response = 40;
-        JoinChannel join_channel = 41;
-        JoinChannelResponse join_channel_response = 42;
-        LeaveChannel leave_channel = 43;
-        SendChannelMessage send_channel_message = 44;
-        SendChannelMessageResponse send_channel_message_response = 45;
-        ChannelMessageSent channel_message_sent = 46;
-        GetChannelMessages get_channel_messages = 47;
-        GetChannelMessagesResponse get_channel_messages_response = 48;
-
-        UpdateContacts update_contacts = 49;
-
-        GetUsers get_users = 50;
-        GetUsersResponse get_users_response = 51;
+        FormatBuffers format_buffers = 34;
+        FormatBuffersResponse format_buffers_response = 35;
+        GetCompletions get_completions = 36;
+        GetCompletionsResponse get_completions_response = 37;
+        ApplyCompletionAdditionalEdits apply_completion_additional_edits = 38;
+        ApplyCompletionAdditionalEditsResponse apply_completion_additional_edits_response = 39;
+        GetCodeActions get_code_actions = 40;
+        GetCodeActionsResponse get_code_actions_response = 41;
+        ApplyCodeAction apply_code_action = 42;
+        ApplyCodeActionResponse apply_code_action_response = 43;
+
+        GetChannels get_channels = 44;
+        GetChannelsResponse get_channels_response = 45;
+        JoinChannel join_channel = 46;
+        JoinChannelResponse join_channel_response = 47;
+        LeaveChannel leave_channel = 48;
+        SendChannelMessage send_channel_message = 49;
+        SendChannelMessageResponse send_channel_message_response = 50;
+        ChannelMessageSent channel_message_sent = 51;
+        GetChannelMessages get_channel_messages = 52;
+        GetChannelMessagesResponse get_channel_messages_response = 53;
+
+        UpdateContacts update_contacts = 54;
+
+        GetUsers get_users = 55;
+        GetUsersResponse get_users_response = 56;
     }
 }
 
@@ -202,9 +207,13 @@ message BufferReloaded {
     Timestamp mtime = 4;
 }
 
-message FormatBuffer {
+message FormatBuffers {
     uint64 project_id = 1;
-    uint64 buffer_id = 2;
+    repeated uint64 buffer_ids = 2;
+}
+
+message FormatBuffersResponse {
+    ProjectTransaction transaction = 1;
 }
 
 message GetCompletions {
@@ -224,12 +233,7 @@ message ApplyCompletionAdditionalEdits {
 }
 
 message ApplyCompletionAdditionalEditsResponse {
-    repeated AdditionalEdit additional_edits = 1;
-}
-
-message AdditionalEdit {
-    uint32 replica_id = 1;
-    uint32 local_timestamp = 2;
+    Transaction transaction = 1;
 }
 
 message Completion {
@@ -239,6 +243,51 @@ message Completion {
     bytes lsp_completion = 4;
 }
 
+message GetCodeActions {
+    uint64 project_id = 1;
+    uint64 buffer_id = 2;
+    Anchor start = 3;
+    Anchor end = 4;
+}
+
+message GetCodeActionsResponse {
+    repeated CodeAction actions = 1;
+}
+
+message ApplyCodeAction {
+    uint64 project_id = 1;
+    uint64 buffer_id = 2;
+    CodeAction action = 3;
+}
+
+message ApplyCodeActionResponse {
+    ProjectTransaction transaction = 1;
+}
+
+message CodeAction {
+    Anchor start = 1;
+    Anchor end = 2;
+    bytes lsp_action = 3;
+}
+
+message ProjectTransaction {
+    repeated Buffer buffers = 1;
+    repeated Transaction transactions = 2;
+}
+
+message Transaction {
+    LocalTimestamp id = 1;
+    repeated LocalTimestamp edit_ids = 2;
+    repeated VectorClockEntry start = 3;
+    repeated VectorClockEntry end = 4;
+    repeated Range ranges = 5;
+}
+
+message LocalTimestamp {
+    uint32 replica_id = 1;
+    uint32 value = 2;
+}
+
 message UpdateDiagnosticSummary {
     uint64 project_id = 1;
     uint64 worktree_id = 2;
@@ -366,16 +415,11 @@ message Buffer {
 message BufferState {
     uint64 id = 1;
     optional File file = 2;
-    string visible_text = 3;
-    string deleted_text = 4;
-    repeated BufferFragment fragments = 5;
-    repeated UndoMapEntry undo_map = 6;
-    repeated VectorClockEntry version = 7;
-    repeated SelectionSet selections = 8;
-    repeated Diagnostic diagnostics = 9;
-    uint32 lamport_timestamp = 10;
-    repeated Operation deferred_operations = 11;
-    repeated string completion_triggers = 12;
+    string base_text = 3;
+    repeated Operation operations = 4;
+    repeated SelectionSet selections = 5;
+    repeated Diagnostic diagnostics = 6;
+    repeated string completion_triggers = 7;
 }
 
 message BufferFragment {
@@ -474,7 +518,9 @@ message Operation {
     }
 
     message UpdateCompletionTriggers {
-        repeated string triggers = 1;
+        uint32 replica_id = 1;
+        uint32 lamport_timestamp = 2;
+        repeated string triggers = 3;
     }
 }
 

crates/rpc/src/peer.rs šŸ”—

@@ -179,7 +179,16 @@ impl Peer {
                     let channel = response_channels.lock().as_mut()?.remove(&responding_to);
                     if let Some(mut tx) = channel {
                         let mut requester_resumed = barrier::channel();
-                        tx.send((incoming, requester_resumed.0)).await.ok();
+                        if let Err(error) = tx.send((incoming, requester_resumed.0)).await {
+                            log::debug!(
+                                "received RPC but request future was dropped {:?}",
+                                error.0 .0
+                            );
+                        }
+                        // Drop response channel before awaiting on the barrier. This allows the
+                        // barrier to get dropped even if the request's future is dropped before it
+                        // has a chance to observe the response.
+                        drop(tx);
                         requester_resumed.1.recv().await;
                     } else {
                         log::warn!("received RPC response to unknown request {}", responding_to);
@@ -337,7 +346,7 @@ mod tests {
     use async_tungstenite::tungstenite::Message as WebSocketMessage;
     use gpui::TestAppContext;
 
-    #[gpui::test(iterations = 10)]
+    #[gpui::test(iterations = 50)]
     async fn test_request_response(cx: TestAppContext) {
         let executor = cx.foreground();
 
@@ -478,7 +487,7 @@ mod tests {
         }
     }
 
-    #[gpui::test(iterations = 10)]
+    #[gpui::test(iterations = 50)]
     async fn test_order_of_response_and_incoming(cx: TestAppContext) {
         let executor = cx.foreground();
         let server = Peer::new();
@@ -576,7 +585,119 @@ mod tests {
         );
     }
 
-    #[gpui::test(iterations = 10)]
+    #[gpui::test(iterations = 50)]
+    async fn test_dropping_request_before_completion(cx: TestAppContext) {
+        let executor = cx.foreground();
+        let server = Peer::new();
+        let client = Peer::new();
+
+        let (client_to_server_conn, server_to_client_conn, _) =
+            Connection::in_memory(cx.background());
+        let (client_to_server_conn_id, io_task1, mut client_incoming) =
+            client.add_connection(client_to_server_conn).await;
+        let (server_to_client_conn_id, io_task2, mut server_incoming) =
+            server.add_connection(server_to_client_conn).await;
+
+        executor.spawn(io_task1).detach();
+        executor.spawn(io_task2).detach();
+
+        executor
+            .spawn(async move {
+                let request1 = server_incoming
+                    .next()
+                    .await
+                    .unwrap()
+                    .into_any()
+                    .downcast::<TypedEnvelope<proto::Ping>>()
+                    .unwrap();
+                let request2 = server_incoming
+                    .next()
+                    .await
+                    .unwrap()
+                    .into_any()
+                    .downcast::<TypedEnvelope<proto::Ping>>()
+                    .unwrap();
+
+                server
+                    .send(
+                        server_to_client_conn_id,
+                        proto::Error {
+                            message: "message 1".to_string(),
+                        },
+                    )
+                    .unwrap();
+                server
+                    .send(
+                        server_to_client_conn_id,
+                        proto::Error {
+                            message: "message 2".to_string(),
+                        },
+                    )
+                    .unwrap();
+                server.respond(request1.receipt(), proto::Ack {}).unwrap();
+                server.respond(request2.receipt(), proto::Ack {}).unwrap();
+
+                // Prevent the connection from being dropped
+                server_incoming.next().await;
+            })
+            .detach();
+
+        let events = Arc::new(Mutex::new(Vec::new()));
+
+        let request1 = client.request(client_to_server_conn_id, proto::Ping {});
+        let request1_task = executor.spawn(request1);
+        let request2 = client.request(client_to_server_conn_id, proto::Ping {});
+        let request2_task = executor.spawn({
+            let events = events.clone();
+            async move {
+                request2.await.unwrap();
+                events.lock().push("response 2".to_string());
+            }
+        });
+
+        executor
+            .spawn({
+                let events = events.clone();
+                async move {
+                    let incoming1 = client_incoming
+                        .next()
+                        .await
+                        .unwrap()
+                        .into_any()
+                        .downcast::<TypedEnvelope<proto::Error>>()
+                        .unwrap();
+                    events.lock().push(incoming1.payload.message);
+                    let incoming2 = client_incoming
+                        .next()
+                        .await
+                        .unwrap()
+                        .into_any()
+                        .downcast::<TypedEnvelope<proto::Error>>()
+                        .unwrap();
+                    events.lock().push(incoming2.payload.message);
+
+                    // Prevent the connection from being dropped
+                    client_incoming.next().await;
+                }
+            })
+            .detach();
+
+        // Allow the request to make some progress before dropping it.
+        cx.background().simulate_random_delay().await;
+        drop(request1_task);
+
+        request2_task.await;
+        assert_eq!(
+            &*events.lock(),
+            &[
+                "message 1".to_string(),
+                "message 2".to_string(),
+                "response 2".to_string()
+            ]
+        );
+    }
+
+    #[gpui::test(iterations = 50)]
     async fn test_disconnect(cx: TestAppContext) {
         let executor = cx.foreground();
 
@@ -611,7 +732,7 @@ mod tests {
             .is_err());
     }
 
-    #[gpui::test(iterations = 10)]
+    #[gpui::test(iterations = 50)]
     async fn test_io_error(cx: TestAppContext) {
         let executor = cx.foreground();
         let (client_conn, mut server_conn, _) = Connection::in_memory(cx.background());

crates/rpc/src/proto.rs šŸ”—

@@ -122,6 +122,8 @@ macro_rules! entity_messages {
 messages!(
     Ack,
     AddProjectCollaborator,
+    ApplyCodeAction,
+    ApplyCodeActionResponse,
     ApplyCompletionAdditionalEdits,
     ApplyCompletionAdditionalEditsResponse,
     BufferReloaded,
@@ -131,11 +133,14 @@ messages!(
     DiskBasedDiagnosticsUpdated,
     DiskBasedDiagnosticsUpdating,
     Error,
-    FormatBuffer,
+    FormatBuffers,
+    FormatBuffersResponse,
     GetChannelMessages,
     GetChannelMessagesResponse,
     GetChannels,
     GetChannelsResponse,
+    GetCodeActions,
+    GetCodeActionsResponse,
     GetCompletions,
     GetCompletionsResponse,
     GetDefinition,
@@ -171,13 +176,15 @@ messages!(
 );
 
 request_messages!(
+    (ApplyCodeAction, ApplyCodeActionResponse),
     (
         ApplyCompletionAdditionalEdits,
         ApplyCompletionAdditionalEditsResponse
     ),
-    (FormatBuffer, Ack),
+    (FormatBuffers, FormatBuffersResponse),
     (GetChannelMessages, GetChannelMessagesResponse),
     (GetChannels, GetChannelsResponse),
+    (GetCodeActions, GetCodeActionsResponse),
     (GetCompletions, GetCompletionsResponse),
     (GetDefinition, GetDefinitionResponse),
     (GetUsers, GetUsersResponse),
@@ -197,13 +204,15 @@ request_messages!(
 entity_messages!(
     project_id,
     AddProjectCollaborator,
+    ApplyCodeAction,
     ApplyCompletionAdditionalEdits,
     BufferReloaded,
     BufferSaved,
     CloseBuffer,
     DiskBasedDiagnosticsUpdated,
     DiskBasedDiagnosticsUpdating,
-    FormatBuffer,
+    FormatBuffers,
+    GetCodeActions,
     GetCompletions,
     GetDefinition,
     JoinProject,

crates/rpc/src/rpc.rs šŸ”—

@@ -5,4 +5,4 @@ pub mod proto;
 pub use conn::Connection;
 pub use peer::*;
 
-pub const PROTOCOL_VERSION: u32 = 5;
+pub const PROTOCOL_VERSION: u32 = 6;

crates/server/src/rpc.rs šŸ”—

@@ -13,7 +13,7 @@ use futures::{future::BoxFuture, FutureExt, StreamExt};
 use parking_lot::{RwLock, RwLockReadGuard, RwLockWriteGuard};
 use postage::{mpsc, prelude::Sink as _};
 use rpc::{
-    proto::{self, AnyTypedEnvelope, EnvelopedMessage},
+    proto::{self, AnyTypedEnvelope, EnvelopedMessage, RequestMessage},
     Connection, ConnectionId, Peer, TypedEnvelope,
 };
 use sha1::{Digest as _, Sha1};
@@ -43,7 +43,6 @@ pub struct Server {
 
 const MESSAGE_COUNT_PER_PAGE: usize = 100;
 const MAX_MESSAGE_LEN: usize = 1024;
-const NO_SUCH_PROJECT: &'static str = "no such project";
 
 impl Server {
     pub fn new(
@@ -60,42 +59,44 @@ impl Server {
         };
 
         server
-            .add_handler(Server::ping)
-            .add_handler(Server::register_project)
-            .add_handler(Server::unregister_project)
-            .add_handler(Server::share_project)
-            .add_handler(Server::unshare_project)
-            .add_handler(Server::join_project)
-            .add_handler(Server::leave_project)
-            .add_handler(Server::register_worktree)
-            .add_handler(Server::unregister_worktree)
-            .add_handler(Server::share_worktree)
-            .add_handler(Server::update_worktree)
-            .add_handler(Server::update_diagnostic_summary)
-            .add_handler(Server::disk_based_diagnostics_updating)
-            .add_handler(Server::disk_based_diagnostics_updated)
-            .add_handler(Server::get_definition)
-            .add_handler(Server::open_buffer)
-            .add_handler(Server::close_buffer)
-            .add_handler(Server::update_buffer)
-            .add_handler(Server::update_buffer_file)
-            .add_handler(Server::buffer_reloaded)
-            .add_handler(Server::buffer_saved)
-            .add_handler(Server::save_buffer)
-            .add_handler(Server::format_buffer)
-            .add_handler(Server::get_completions)
-            .add_handler(Server::apply_additional_edits_for_completion)
-            .add_handler(Server::get_channels)
-            .add_handler(Server::get_users)
-            .add_handler(Server::join_channel)
-            .add_handler(Server::leave_channel)
-            .add_handler(Server::send_channel_message)
-            .add_handler(Server::get_channel_messages);
+            .add_request_handler(Server::ping)
+            .add_request_handler(Server::register_project)
+            .add_message_handler(Server::unregister_project)
+            .add_request_handler(Server::share_project)
+            .add_message_handler(Server::unshare_project)
+            .add_request_handler(Server::join_project)
+            .add_message_handler(Server::leave_project)
+            .add_request_handler(Server::register_worktree)
+            .add_message_handler(Server::unregister_worktree)
+            .add_request_handler(Server::share_worktree)
+            .add_message_handler(Server::update_worktree)
+            .add_message_handler(Server::update_diagnostic_summary)
+            .add_message_handler(Server::disk_based_diagnostics_updating)
+            .add_message_handler(Server::disk_based_diagnostics_updated)
+            .add_request_handler(Server::get_definition)
+            .add_request_handler(Server::open_buffer)
+            .add_message_handler(Server::close_buffer)
+            .add_request_handler(Server::update_buffer)
+            .add_message_handler(Server::update_buffer_file)
+            .add_message_handler(Server::buffer_reloaded)
+            .add_message_handler(Server::buffer_saved)
+            .add_request_handler(Server::save_buffer)
+            .add_request_handler(Server::format_buffers)
+            .add_request_handler(Server::get_completions)
+            .add_request_handler(Server::apply_additional_edits_for_completion)
+            .add_request_handler(Server::get_code_actions)
+            .add_request_handler(Server::apply_code_action)
+            .add_request_handler(Server::get_channels)
+            .add_request_handler(Server::get_users)
+            .add_request_handler(Server::join_channel)
+            .add_message_handler(Server::leave_channel)
+            .add_request_handler(Server::send_channel_message)
+            .add_request_handler(Server::get_channel_messages);
 
         Arc::new(server)
     }
 
-    fn add_handler<F, Fut, M>(&mut self, handler: F) -> &mut Self
+    fn add_message_handler<F, Fut, M>(&mut self, handler: F) -> &mut Self
     where
         F: 'static + Send + Sync + Fn(Arc<Self>, TypedEnvelope<M>) -> Fut,
         Fut: 'static + Send + Future<Output = tide::Result<()>>,
@@ -114,6 +115,35 @@ impl Server {
         self
     }
 
+    fn add_request_handler<F, Fut, M>(&mut self, handler: F) -> &mut Self
+    where
+        F: 'static + Send + Sync + Fn(Arc<Self>, TypedEnvelope<M>) -> Fut,
+        Fut: 'static + Send + Future<Output = tide::Result<M::Response>>,
+        M: RequestMessage,
+    {
+        self.add_message_handler(move |server, envelope| {
+            let receipt = envelope.receipt();
+            let response = (handler)(server.clone(), envelope);
+            async move {
+                match response.await {
+                    Ok(response) => {
+                        server.peer.respond(receipt, response)?;
+                        Ok(())
+                    }
+                    Err(error) => {
+                        server.peer.respond_with_error(
+                            receipt,
+                            proto::Error {
+                                message: error.to_string(),
+                            },
+                        )?;
+                        Err(error)
+                    }
+                }
+            }
+        })
+    }
+
     pub fn handle_connection(
         self: &Arc<Self>,
         connection: Connection,
@@ -212,25 +242,20 @@ impl Server {
         Ok(())
     }
 
-    async fn ping(self: Arc<Server>, request: TypedEnvelope<proto::Ping>) -> tide::Result<()> {
-        self.peer.respond(request.receipt(), proto::Ack {})?;
-        Ok(())
+    async fn ping(self: Arc<Server>, _: TypedEnvelope<proto::Ping>) -> tide::Result<proto::Ack> {
+        Ok(proto::Ack {})
     }
 
     async fn register_project(
         mut self: Arc<Server>,
         request: TypedEnvelope<proto::RegisterProject>,
-    ) -> tide::Result<()> {
+    ) -> tide::Result<proto::RegisterProjectResponse> {
         let project_id = {
             let mut state = self.state_mut();
             let user_id = state.user_id_for_connection(request.sender_id)?;
             state.register_project(request.sender_id, user_id)
         };
-        self.peer.respond(
-            request.receipt(),
-            proto::RegisterProjectResponse { project_id },
-        )?;
-        Ok(())
+        Ok(proto::RegisterProjectResponse { project_id })
     }
 
     async fn unregister_project(
@@ -239,8 +264,7 @@ impl Server {
     ) -> tide::Result<()> {
         let project = self
             .state_mut()
-            .unregister_project(request.payload.project_id, request.sender_id)
-            .ok_or_else(|| anyhow!("no such project"))?;
+            .unregister_project(request.payload.project_id, request.sender_id)?;
         self.update_contacts_for_users(project.authorized_user_ids().iter())?;
         Ok(())
     }
@@ -248,11 +272,10 @@ impl Server {
     async fn share_project(
         mut self: Arc<Server>,
         request: TypedEnvelope<proto::ShareProject>,
-    ) -> tide::Result<()> {
+    ) -> tide::Result<proto::Ack> {
         self.state_mut()
             .share_project(request.payload.project_id, request.sender_id);
-        self.peer.respond(request.receipt(), proto::Ack {})?;
-        Ok(())
+        Ok(proto::Ack {})
     }
 
     async fn unshare_project(
@@ -275,11 +298,11 @@ impl Server {
     async fn join_project(
         mut self: Arc<Server>,
         request: TypedEnvelope<proto::JoinProject>,
-    ) -> tide::Result<()> {
+    ) -> tide::Result<proto::JoinProjectResponse> {
         let project_id = request.payload.project_id;
 
         let user_id = self.state().user_id_for_connection(request.sender_id)?;
-        let response_data = self
+        let (response, connection_ids, contact_user_ids) = self
             .state_mut()
             .join_project(request.sender_id, user_id, project_id)
             .and_then(|joined| {
@@ -326,37 +349,23 @@ impl Server {
                 let connection_ids = joined.project.connection_ids();
                 let contact_user_ids = joined.project.authorized_user_ids();
                 Ok((response, connection_ids, contact_user_ids))
-            });
-
-        match response_data {
-            Ok((response, connection_ids, contact_user_ids)) => {
-                broadcast(request.sender_id, connection_ids, |conn_id| {
-                    self.peer.send(
-                        conn_id,
-                        proto::AddProjectCollaborator {
-                            project_id,
-                            collaborator: Some(proto::Collaborator {
-                                peer_id: request.sender_id.0,
-                                replica_id: response.replica_id,
-                                user_id: user_id.to_proto(),
-                            }),
-                        },
-                    )
-                })?;
-                self.peer.respond(request.receipt(), response)?;
-                self.update_contacts_for_users(&contact_user_ids)?;
-            }
-            Err(error) => {
-                self.peer.respond_with_error(
-                    request.receipt(),
-                    proto::Error {
-                        message: error.to_string(),
-                    },
-                )?;
-            }
-        }
+            })?;
 
-        Ok(())
+        broadcast(request.sender_id, connection_ids, |conn_id| {
+            self.peer.send(
+                conn_id,
+                proto::AddProjectCollaborator {
+                    project_id,
+                    collaborator: Some(proto::Collaborator {
+                        peer_id: request.sender_id.0,
+                        replica_id: response.replica_id,
+                        user_id: user_id.to_proto(),
+                    }),
+                },
+            )
+        })?;
+        self.update_contacts_for_users(&contact_user_ids)?;
+        Ok(response)
     }
 
     async fn leave_project(
@@ -365,70 +374,49 @@ impl Server {
     ) -> tide::Result<()> {
         let sender_id = request.sender_id;
         let project_id = request.payload.project_id;
-        let worktree = self.state_mut().leave_project(sender_id, project_id);
-        if let Some(worktree) = worktree {
-            broadcast(sender_id, worktree.connection_ids, |conn_id| {
-                self.peer.send(
-                    conn_id,
-                    proto::RemoveProjectCollaborator {
-                        project_id,
-                        peer_id: sender_id.0,
-                    },
-                )
-            })?;
-            self.update_contacts_for_users(&worktree.authorized_user_ids)?;
-        }
+        let worktree = self.state_mut().leave_project(sender_id, project_id)?;
+
+        broadcast(sender_id, worktree.connection_ids, |conn_id| {
+            self.peer.send(
+                conn_id,
+                proto::RemoveProjectCollaborator {
+                    project_id,
+                    peer_id: sender_id.0,
+                },
+            )
+        })?;
+        self.update_contacts_for_users(&worktree.authorized_user_ids)?;
+
         Ok(())
     }
 
     async fn register_worktree(
         mut self: Arc<Server>,
         request: TypedEnvelope<proto::RegisterWorktree>,
-    ) -> tide::Result<()> {
-        let receipt = request.receipt();
+    ) -> tide::Result<proto::Ack> {
         let host_user_id = self.state().user_id_for_connection(request.sender_id)?;
 
         let mut contact_user_ids = HashSet::default();
         contact_user_ids.insert(host_user_id);
         for github_login in request.payload.authorized_logins {
-            match self.app_state.db.create_user(&github_login, false).await {
-                Ok(contact_user_id) => {
-                    contact_user_ids.insert(contact_user_id);
-                }
-                Err(err) => {
-                    let message = err.to_string();
-                    self.peer
-                        .respond_with_error(receipt, proto::Error { message })?;
-                    return Ok(());
-                }
-            }
+            let contact_user_id = self.app_state.db.create_user(&github_login, false).await?;
+            contact_user_ids.insert(contact_user_id);
         }
 
         let contact_user_ids = contact_user_ids.into_iter().collect::<Vec<_>>();
-        let ok = self.state_mut().register_worktree(
+        self.state_mut().register_worktree(
             request.payload.project_id,
             request.payload.worktree_id,
+            request.sender_id,
             Worktree {
                 authorized_user_ids: contact_user_ids.clone(),
                 root_name: request.payload.root_name,
                 share: None,
                 weak: false,
             },
-        );
-
-        if ok {
-            self.peer.respond(receipt, proto::Ack {})?;
-            self.update_contacts_for_users(&contact_user_ids)?;
-        } else {
-            self.peer.respond_with_error(
-                receipt,
-                proto::Error {
-                    message: NO_SUCH_PROJECT.to_string(),
-                },
-            )?;
-        }
-
-        Ok(())
+        )?;
+        self.update_contacts_for_users(&contact_user_ids)?;
+        Ok(proto::Ack {})
     }
 
     async fn unregister_worktree(
@@ -456,7 +444,7 @@ impl Server {
     async fn share_worktree(
         mut self: Arc<Server>,
         mut request: TypedEnvelope<proto::ShareWorktree>,
-    ) -> tide::Result<()> {
+    ) -> tide::Result<proto::Ack> {
         let worktree = request
             .payload
             .worktree
@@ -479,46 +467,32 @@ impl Server {
             request.sender_id,
             entries,
             diagnostic_summaries,
-        );
-        if let Some(shared_worktree) = shared_worktree {
-            broadcast(
-                request.sender_id,
-                shared_worktree.connection_ids,
-                |connection_id| {
-                    self.peer.forward_send(
-                        request.sender_id,
-                        connection_id,
-                        request.payload.clone(),
-                    )
-                },
-            )?;
-            self.peer.respond(request.receipt(), proto::Ack {})?;
-            self.update_contacts_for_users(&shared_worktree.authorized_user_ids)?;
-        } else {
-            self.peer.respond_with_error(
-                request.receipt(),
-                proto::Error {
-                    message: "no such worktree".to_string(),
-                },
-            )?;
-        }
-        Ok(())
+        )?;
+
+        broadcast(
+            request.sender_id,
+            shared_worktree.connection_ids,
+            |connection_id| {
+                self.peer
+                    .forward_send(request.sender_id, connection_id, request.payload.clone())
+            },
+        )?;
+        self.update_contacts_for_users(&shared_worktree.authorized_user_ids)?;
+
+        Ok(proto::Ack {})
     }
 
     async fn update_worktree(
         mut self: Arc<Server>,
         request: TypedEnvelope<proto::UpdateWorktree>,
     ) -> tide::Result<()> {
-        let connection_ids = self
-            .state_mut()
-            .update_worktree(
-                request.sender_id,
-                request.payload.project_id,
-                request.payload.worktree_id,
-                &request.payload.removed_entries,
-                &request.payload.updated_entries,
-            )
-            .ok_or_else(|| anyhow!("no such worktree"))?;
+        let connection_ids = self.state_mut().update_worktree(
+            request.sender_id,
+            request.payload.project_id,
+            request.payload.worktree_id,
+            &request.payload.removed_entries,
+            &request.payload.updated_entries,
+        )?;
 
         broadcast(request.sender_id, connection_ids, |connection_id| {
             self.peer
@@ -532,19 +506,17 @@ impl Server {
         mut self: Arc<Server>,
         request: TypedEnvelope<proto::UpdateDiagnosticSummary>,
     ) -> tide::Result<()> {
-        let receiver_ids = request
+        let summary = request
             .payload
             .summary
             .clone()
-            .and_then(|summary| {
-                self.state_mut().update_diagnostic_summary(
-                    request.payload.project_id,
-                    request.payload.worktree_id,
-                    request.sender_id,
-                    summary,
-                )
-            })
-            .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?;
+            .ok_or_else(|| anyhow!("invalid summary"))?;
+        let receiver_ids = self.state_mut().update_diagnostic_summary(
+            request.payload.project_id,
+            request.payload.worktree_id,
+            request.sender_id,
+            summary,
+        )?;
 
         broadcast(request.sender_id, receiver_ids, |connection_id| {
             self.peer
@@ -559,8 +531,7 @@ impl Server {
     ) -> tide::Result<()> {
         let receiver_ids = self
             .state()
-            .project_connection_ids(request.payload.project_id, request.sender_id)
-            .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?;
+            .project_connection_ids(request.payload.project_id, request.sender_id)?;
         broadcast(request.sender_id, receiver_ids, |connection_id| {
             self.peer
                 .forward_send(request.sender_id, connection_id, request.payload.clone())
@@ -574,8 +545,7 @@ impl Server {
     ) -> tide::Result<()> {
         let receiver_ids = self
             .state()
-            .project_connection_ids(request.payload.project_id, request.sender_id)
-            .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?;
+            .project_connection_ids(request.payload.project_id, request.sender_id)?;
         broadcast(request.sender_id, receiver_ids, |connection_id| {
             self.peer
                 .forward_send(request.sender_id, connection_id, request.payload.clone())
@@ -586,37 +556,29 @@ impl Server {
     async fn get_definition(
         self: Arc<Server>,
         request: TypedEnvelope<proto::GetDefinition>,
-    ) -> tide::Result<()> {
-        let receipt = request.receipt();
+    ) -> tide::Result<proto::GetDefinitionResponse> {
         let host_connection_id = self
             .state()
-            .read_project(request.payload.project_id, request.sender_id)
-            .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?
+            .read_project(request.payload.project_id, request.sender_id)?
             .host_connection_id;
-        let response = self
+        Ok(self
             .peer
             .forward_request(request.sender_id, host_connection_id, request.payload)
-            .await?;
-        self.peer.respond(receipt, response)?;
-        Ok(())
+            .await?)
     }
 
     async fn open_buffer(
         self: Arc<Server>,
         request: TypedEnvelope<proto::OpenBuffer>,
-    ) -> tide::Result<()> {
-        let receipt = request.receipt();
+    ) -> tide::Result<proto::OpenBufferResponse> {
         let host_connection_id = self
             .state()
-            .read_project(request.payload.project_id, request.sender_id)
-            .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?
+            .read_project(request.payload.project_id, request.sender_id)?
             .host_connection_id;
-        let response = self
+        Ok(self
             .peer
             .forward_request(request.sender_id, host_connection_id, request.payload)
-            .await?;
-        self.peer.respond(receipt, response)?;
-        Ok(())
+            .await?)
     }
 
     async fn close_buffer(
@@ -625,8 +587,7 @@ impl Server {
     ) -> tide::Result<()> {
         let host_connection_id = self
             .state()
-            .read_project(request.payload.project_id, request.sender_id)
-            .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?
+            .read_project(request.payload.project_id, request.sender_id)?
             .host_connection_id;
         self.peer
             .forward_send(request.sender_id, host_connection_id, request.payload)?;
@@ -636,121 +597,111 @@ impl Server {
     async fn save_buffer(
         self: Arc<Server>,
         request: TypedEnvelope<proto::SaveBuffer>,
-    ) -> tide::Result<()> {
+    ) -> tide::Result<proto::BufferSaved> {
         let host;
-        let guests;
+        let mut guests;
         {
             let state = self.state();
-            let project = state
-                .read_project(request.payload.project_id, request.sender_id)
-                .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?;
+            let project = state.read_project(request.payload.project_id, request.sender_id)?;
             host = project.host_connection_id;
             guests = project.guest_connection_ids()
         }
 
-        let sender = request.sender_id;
-        let receipt = request.receipt();
         let response = self
             .peer
-            .forward_request(sender, host, request.payload.clone())
+            .forward_request(request.sender_id, host, request.payload.clone())
             .await?;
 
+        guests.retain(|guest_connection_id| *guest_connection_id != request.sender_id);
         broadcast(host, guests, |conn_id| {
-            let response = response.clone();
-            if conn_id == sender {
-                self.peer.respond(receipt, response)
-            } else {
-                self.peer.forward_send(host, conn_id, response)
-            }
+            self.peer.forward_send(host, conn_id, response.clone())
         })?;
 
-        Ok(())
+        Ok(response)
     }
 
-    async fn format_buffer(
+    async fn format_buffers(
         self: Arc<Server>,
-        request: TypedEnvelope<proto::FormatBuffer>,
-    ) -> tide::Result<()> {
-        let host;
-        {
-            let state = self.state();
-            let project = state
-                .read_project(request.payload.project_id, request.sender_id)
-                .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?;
-            host = project.host_connection_id;
-        }
-
-        let sender = request.sender_id;
-        let receipt = request.receipt();
-        let response = self
+        request: TypedEnvelope<proto::FormatBuffers>,
+    ) -> tide::Result<proto::FormatBuffersResponse> {
+        let host = self
+            .state()
+            .read_project(request.payload.project_id, request.sender_id)?
+            .host_connection_id;
+        Ok(self
             .peer
-            .forward_request(sender, host, request.payload.clone())
-            .await?;
-        self.peer.respond(receipt, response)?;
-
-        Ok(())
+            .forward_request(request.sender_id, host, request.payload.clone())
+            .await?)
     }
 
     async fn get_completions(
         self: Arc<Server>,
         request: TypedEnvelope<proto::GetCompletions>,
-    ) -> tide::Result<()> {
-        let host;
-        {
-            let state = self.state();
-            let project = state
-                .read_project(request.payload.project_id, request.sender_id)
-                .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?;
-            host = project.host_connection_id;
-        }
-
-        let sender = request.sender_id;
-        let receipt = request.receipt();
-        let response = self
+    ) -> tide::Result<proto::GetCompletionsResponse> {
+        let host = self
+            .state()
+            .read_project(request.payload.project_id, request.sender_id)?
+            .host_connection_id;
+        Ok(self
             .peer
-            .forward_request(sender, host, request.payload.clone())
-            .await?;
-        self.peer.respond(receipt, response)?;
-        Ok(())
+            .forward_request(request.sender_id, host, request.payload.clone())
+            .await?)
     }
 
     async fn apply_additional_edits_for_completion(
         self: Arc<Server>,
         request: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
-    ) -> tide::Result<()> {
-        let host;
-        {
-            let state = self.state();
-            let project = state
-                .read_project(request.payload.project_id, request.sender_id)
-                .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?;
-            host = project.host_connection_id;
-        }
+    ) -> tide::Result<proto::ApplyCompletionAdditionalEditsResponse> {
+        let host = self
+            .state()
+            .read_project(request.payload.project_id, request.sender_id)?
+            .host_connection_id;
+        Ok(self
+            .peer
+            .forward_request(request.sender_id, host, request.payload.clone())
+            .await?)
+    }
 
-        let sender = request.sender_id;
-        let receipt = request.receipt();
-        let response = self
+    async fn get_code_actions(
+        self: Arc<Server>,
+        request: TypedEnvelope<proto::GetCodeActions>,
+    ) -> tide::Result<proto::GetCodeActionsResponse> {
+        let host = self
+            .state()
+            .read_project(request.payload.project_id, request.sender_id)?
+            .host_connection_id;
+        Ok(self
             .peer
-            .forward_request(sender, host, request.payload.clone())
-            .await?;
-        self.peer.respond(receipt, response)?;
-        Ok(())
+            .forward_request(request.sender_id, host, request.payload.clone())
+            .await?)
+    }
+
+    async fn apply_code_action(
+        self: Arc<Server>,
+        request: TypedEnvelope<proto::ApplyCodeAction>,
+    ) -> tide::Result<proto::ApplyCodeActionResponse> {
+        let host = self
+            .state()
+            .read_project(request.payload.project_id, request.sender_id)?
+            .host_connection_id;
+        Ok(self
+            .peer
+            .forward_request(request.sender_id, host, request.payload.clone())
+            .await?)
     }
 
     async fn update_buffer(
         self: Arc<Server>,
         request: TypedEnvelope<proto::UpdateBuffer>,
-    ) -> tide::Result<()> {
+    ) -> tide::Result<proto::Ack> {
         let receiver_ids = self
             .state()
-            .project_connection_ids(request.payload.project_id, request.sender_id)
-            .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?;
+            .project_connection_ids(request.payload.project_id, request.sender_id)?;
         broadcast(request.sender_id, receiver_ids, |connection_id| {
             self.peer
                 .forward_send(request.sender_id, connection_id, request.payload.clone())
         })?;
-        self.peer.respond(request.receipt(), proto::Ack {})?;
-        Ok(())
+        Ok(proto::Ack {})
     }
 
     async fn update_buffer_file(
@@ -759,8 +710,7 @@ impl Server {
     ) -> tide::Result<()> {
         let receiver_ids = self
             .state()
-            .project_connection_ids(request.payload.project_id, request.sender_id)
-            .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?;
+            .project_connection_ids(request.payload.project_id, request.sender_id)?;
         broadcast(request.sender_id, receiver_ids, |connection_id| {
             self.peer
                 .forward_send(request.sender_id, connection_id, request.payload.clone())
@@ -774,8 +724,7 @@ impl Server {
     ) -> tide::Result<()> {
         let receiver_ids = self
             .state()
-            .project_connection_ids(request.payload.project_id, request.sender_id)
-            .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?;
+            .project_connection_ids(request.payload.project_id, request.sender_id)?;
         broadcast(request.sender_id, receiver_ids, |connection_id| {
             self.peer
                 .forward_send(request.sender_id, connection_id, request.payload.clone())
@@ -789,8 +738,7 @@ impl Server {
     ) -> tide::Result<()> {
         let receiver_ids = self
             .state()
-            .project_connection_ids(request.payload.project_id, request.sender_id)
-            .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?;
+            .project_connection_ids(request.payload.project_id, request.sender_id)?;
         broadcast(request.sender_id, receiver_ids, |connection_id| {
             self.peer
                 .forward_send(request.sender_id, connection_id, request.payload.clone())
@@ -801,29 +749,24 @@ impl Server {
     async fn get_channels(
         self: Arc<Server>,
         request: TypedEnvelope<proto::GetChannels>,
-    ) -> tide::Result<()> {
+    ) -> tide::Result<proto::GetChannelsResponse> {
         let user_id = self.state().user_id_for_connection(request.sender_id)?;
         let channels = self.app_state.db.get_accessible_channels(user_id).await?;
-        self.peer.respond(
-            request.receipt(),
-            proto::GetChannelsResponse {
-                channels: channels
-                    .into_iter()
-                    .map(|chan| proto::Channel {
-                        id: chan.id.to_proto(),
-                        name: chan.name,
-                    })
-                    .collect(),
-            },
-        )?;
-        Ok(())
+        Ok(proto::GetChannelsResponse {
+            channels: channels
+                .into_iter()
+                .map(|chan| proto::Channel {
+                    id: chan.id.to_proto(),
+                    name: chan.name,
+                })
+                .collect(),
+        })
     }
 
     async fn get_users(
         self: Arc<Server>,
         request: TypedEnvelope<proto::GetUsers>,
-    ) -> tide::Result<()> {
-        let receipt = request.receipt();
+    ) -> tide::Result<proto::GetUsersResponse> {
         let user_ids = request.payload.user_ids.into_iter().map(UserId::from_proto);
         let users = self
             .app_state
@@ -837,9 +780,7 @@ impl Server {
                 github_login: user.github_login,
             })
             .collect();
-        self.peer
-            .respond(receipt, proto::GetUsersResponse { users })?;
-        Ok(())
+        Ok(proto::GetUsersResponse { users })
     }
 
     fn update_contacts_for_users<'a>(
@@ -867,7 +808,7 @@ impl Server {
     async fn join_channel(
         mut self: Arc<Self>,
         request: TypedEnvelope<proto::JoinChannel>,
-    ) -> tide::Result<()> {
+    ) -> tide::Result<proto::JoinChannelResponse> {
         let user_id = self.state().user_id_for_connection(request.sender_id)?;
         let channel_id = ChannelId::from_proto(request.payload.channel_id);
         if !self
@@ -894,14 +835,10 @@ impl Server {
                 nonce: Some(msg.nonce.as_u128().into()),
             })
             .collect::<Vec<_>>();
-        self.peer.respond(
-            request.receipt(),
-            proto::JoinChannelResponse {
-                done: messages.len() < MESSAGE_COUNT_PER_PAGE,
-                messages,
-            },
-        )?;
-        Ok(())
+        Ok(proto::JoinChannelResponse {
+            done: messages.len() < MESSAGE_COUNT_PER_PAGE,
+            messages,
+        })
     }
 
     async fn leave_channel(
@@ -928,54 +865,30 @@ impl Server {
     async fn send_channel_message(
         self: Arc<Self>,
         request: TypedEnvelope<proto::SendChannelMessage>,
-    ) -> tide::Result<()> {
-        let receipt = request.receipt();
+    ) -> tide::Result<proto::SendChannelMessageResponse> {
         let channel_id = ChannelId::from_proto(request.payload.channel_id);
         let user_id;
         let connection_ids;
         {
             let state = self.state();
             user_id = state.user_id_for_connection(request.sender_id)?;
-            if let Some(ids) = state.channel_connection_ids(channel_id) {
-                connection_ids = ids;
-            } else {
-                return Ok(());
-            }
+            connection_ids = state.channel_connection_ids(channel_id)?;
         }
 
         // Validate the message body.
         let body = request.payload.body.trim().to_string();
         if body.len() > MAX_MESSAGE_LEN {
-            self.peer.respond_with_error(
-                receipt,
-                proto::Error {
-                    message: "message is too long".to_string(),
-                },
-            )?;
-            return Ok(());
+            return Err(anyhow!("message is too long"))?;
         }
         if body.is_empty() {
-            self.peer.respond_with_error(
-                receipt,
-                proto::Error {
-                    message: "message can't be blank".to_string(),
-                },
-            )?;
-            return Ok(());
+            return Err(anyhow!("message can't be blank"))?;
         }
 
         let timestamp = OffsetDateTime::now_utc();
-        let nonce = if let Some(nonce) = request.payload.nonce {
-            nonce
-        } else {
-            self.peer.respond_with_error(
-                receipt,
-                proto::Error {
-                    message: "nonce can't be blank".to_string(),
-                },
-            )?;
-            return Ok(());
-        };
+        let nonce = request
+            .payload
+            .nonce
+            .ok_or_else(|| anyhow!("nonce can't be blank"))?;
 
         let message_id = self
             .app_state
@@ -999,19 +912,15 @@ impl Server {
                 },
             )
         })?;
-        self.peer.respond(
-            receipt,
-            proto::SendChannelMessageResponse {
-                message: Some(message),
-            },
-        )?;
-        Ok(())
+        Ok(proto::SendChannelMessageResponse {
+            message: Some(message),
+        })
     }
 
     async fn get_channel_messages(
         self: Arc<Self>,
         request: TypedEnvelope<proto::GetChannelMessages>,
-    ) -> tide::Result<()> {
+    ) -> tide::Result<proto::GetChannelMessagesResponse> {
         let user_id = self.state().user_id_for_connection(request.sender_id)?;
         let channel_id = ChannelId::from_proto(request.payload.channel_id);
         if !self
@@ -1041,14 +950,11 @@ impl Server {
                 nonce: Some(msg.nonce.as_u128().into()),
             })
             .collect::<Vec<_>>();
-        self.peer.respond(
-            request.receipt(),
-            proto::GetChannelMessagesResponse {
-                done: messages.len() < MESSAGE_COUNT_PER_PAGE,
-                messages,
-            },
-        )?;
-        Ok(())
+
+        Ok(proto::GetChannelMessagesResponse {
+            done: messages.len() < MESSAGE_COUNT_PER_PAGE,
+            messages,
+        })
     }
 
     fn state<'a>(self: &'a Arc<Self>) -> RwLockReadGuard<'a, Store> {
@@ -1183,14 +1089,18 @@ mod tests {
             self, test::FakeHttpClient, Channel, ChannelDetails, ChannelList, Client, Credentials,
             EstablishConnectionError, UserStore,
         },
-        editor::{Editor, EditorSettings, Input, MultiBuffer},
+        editor::{
+            self, ConfirmCodeAction, ConfirmCompletion, Editor, EditorSettings, Input, MultiBuffer,
+            Redo, ToggleCodeActions, Undo,
+        },
         fs::{FakeFs, Fs as _},
         language::{
             tree_sitter_rust, AnchorRangeExt, Diagnostic, DiagnosticEntry, Language,
             LanguageConfig, LanguageRegistry, LanguageServerConfig, Point,
         },
         lsp,
-        project::{DiagnosticSummary, Project, ProjectPath},
+        project::{worktree::WorktreeHandle, DiagnosticSummary, Project, ProjectPath},
+        workspace::{Workspace, WorkspaceParams},
     };
 
     #[cfg(test)]
@@ -1301,7 +1211,7 @@ mod tests {
             .unwrap();
 
         let editor_b = cx_b.add_view(window_b, |cx| {
-            Editor::for_buffer(buffer_b, Arc::new(|cx| EditorSettings::test(cx)), cx)
+            Editor::for_buffer(buffer_b, Arc::new(|cx| EditorSettings::test(cx)), None, cx)
         });
 
         // TODO
@@ -1560,11 +1470,20 @@ mod tests {
         buffer_b.read_with(&cx_b, |buf, _| assert!(!buf.is_dirty()));
         buffer_c.condition(&cx_c, |buf, _| !buf.is_dirty()).await;
 
+        // Ensure worktree observes a/file1's change event *before* the rename occurs, otherwise
+        // when interpreting the change event it will mistakenly think that the file has been
+        // deleted (because its path has changed) and will subsequently fail to detect the rename.
+        worktree_a.flush_fs_events(&cx_a).await;
+
         // Make changes on host's file system, see those changes on guest worktrees.
-        fs.rename("/a/file1".as_ref(), "/a/file1-renamed".as_ref())
-            .await
-            .unwrap();
-        fs.rename("/a/file2".as_ref(), "/a/file3".as_ref())
+        fs.rename(
+            "/a/file1".as_ref(),
+            "/a/file1-renamed".as_ref(),
+            Default::default(),
+        )
+        .await
+        .unwrap();
+        fs.rename("/a/file2".as_ref(), "/a/file3".as_ref(), Default::default())
             .await
             .unwrap();
         fs.insert_file(Path::new("/a/file4"), "4".into())
@@ -1572,38 +1491,29 @@ mod tests {
             .unwrap();
 
         worktree_a
-            .condition(&cx_a, |tree, _| tree.file_count() == 4)
-            .await;
-        worktree_b
-            .condition(&cx_b, |tree, _| tree.file_count() == 4)
-            .await;
-        worktree_c
-            .condition(&cx_c, |tree, _| tree.file_count() == 4)
-            .await;
-        worktree_a.read_with(&cx_a, |tree, _| {
-            assert_eq!(
+            .condition(&cx_a, |tree, _| {
                 tree.paths()
                     .map(|p| p.to_string_lossy())
-                    .collect::<Vec<_>>(),
-                &[".zed.toml", "file1-renamed", "file3", "file4"]
-            )
-        });
-        worktree_b.read_with(&cx_b, |tree, _| {
-            assert_eq!(
+                    .collect::<Vec<_>>()
+                    == [".zed.toml", "file1-renamed", "file3", "file4"]
+            })
+            .await;
+        worktree_b
+            .condition(&cx_b, |tree, _| {
                 tree.paths()
                     .map(|p| p.to_string_lossy())
-                    .collect::<Vec<_>>(),
-                &[".zed.toml", "file1-renamed", "file3", "file4"]
-            )
-        });
-        worktree_c.read_with(&cx_c, |tree, _| {
-            assert_eq!(
+                    .collect::<Vec<_>>()
+                    == [".zed.toml", "file1-renamed", "file3", "file4"]
+            })
+            .await;
+        worktree_c
+            .condition(&cx_c, |tree, _| {
                 tree.paths()
                     .map(|p| p.to_string_lossy())
-                    .collect::<Vec<_>>(),
-                &[".zed.toml", "file1-renamed", "file3", "file4"]
-            )
-        });
+                    .collect::<Vec<_>>()
+                    == [".zed.toml", "file1-renamed", "file3", "file4"]
+            })
+            .await;
 
         // Ensure buffer files are updated as well.
         buffer_a

crates/server/src/rpc/store.rs šŸ”—

@@ -122,10 +122,10 @@ impl Store {
 
         let mut result = RemovedConnectionState::default();
         for project_id in connection.projects.clone() {
-            if let Some(project) = self.unregister_project(project_id, connection_id) {
+            if let Ok(project) = self.unregister_project(project_id, connection_id) {
                 result.contact_ids.extend(project.authorized_user_ids());
                 result.hosted_projects.insert(project_id, project);
-            } else if let Some(project) = self.leave_project(connection_id, project_id) {
+            } else if let Ok(project) = self.leave_project(connection_id, project_id) {
                 result
                     .guest_project_ids
                     .insert(project_id, project.connection_ids);
@@ -254,9 +254,14 @@ impl Store {
         &mut self,
         project_id: u64,
         worktree_id: u64,
+        connection_id: ConnectionId,
         worktree: Worktree,
-    ) -> bool {
-        if let Some(project) = self.projects.get_mut(&project_id) {
+    ) -> tide::Result<()> {
+        let project = self
+            .projects
+            .get_mut(&project_id)
+            .ok_or_else(|| anyhow!("no such project"))?;
+        if project.host_connection_id == connection_id {
             for authorized_user_id in &worktree.authorized_user_ids {
                 self.visible_projects_by_user_id
                     .entry(*authorized_user_id)
@@ -270,9 +275,9 @@ impl Store {
 
             #[cfg(test)]
             self.check_invariants();
-            true
+            Ok(())
         } else {
-            false
+            Err(anyhow!("no such project"))?
         }
     }
 
@@ -280,7 +285,7 @@ impl Store {
         &mut self,
         project_id: u64,
         connection_id: ConnectionId,
-    ) -> Option<Project> {
+    ) -> tide::Result<Project> {
         match self.projects.entry(project_id) {
             hash_map::Entry::Occupied(e) => {
                 if e.get().host_connection_id == connection_id {
@@ -292,12 +297,12 @@ impl Store {
                         }
                     }
 
-                    Some(e.remove())
+                    Ok(e.remove())
                 } else {
-                    None
+                    Err(anyhow!("no such project"))?
                 }
             }
-            hash_map::Entry::Vacant(_) => None,
+            hash_map::Entry::Vacant(_) => Err(anyhow!("no such project"))?,
         }
     }
 
@@ -398,20 +403,26 @@ impl Store {
         connection_id: ConnectionId,
         entries: HashMap<u64, proto::Entry>,
         diagnostic_summaries: BTreeMap<PathBuf, proto::DiagnosticSummary>,
-    ) -> Option<SharedWorktree> {
-        let project = self.projects.get_mut(&project_id)?;
-        let worktree = project.worktrees.get_mut(&worktree_id)?;
+    ) -> tide::Result<SharedWorktree> {
+        let project = self
+            .projects
+            .get_mut(&project_id)
+            .ok_or_else(|| anyhow!("no such project"))?;
+        let worktree = project
+            .worktrees
+            .get_mut(&worktree_id)
+            .ok_or_else(|| anyhow!("no such worktree"))?;
         if project.host_connection_id == connection_id && project.share.is_some() {
             worktree.share = Some(WorktreeShare {
                 entries,
                 diagnostic_summaries,
             });
-            Some(SharedWorktree {
+            Ok(SharedWorktree {
                 authorized_user_ids: project.authorized_user_ids(),
                 connection_ids: project.guest_connection_ids(),
             })
         } else {
-            None
+            Err(anyhow!("no such worktree"))?
         }
     }
 
@@ -421,19 +432,25 @@ impl Store {
         worktree_id: u64,
         connection_id: ConnectionId,
         summary: proto::DiagnosticSummary,
-    ) -> Option<Vec<ConnectionId>> {
-        let project = self.projects.get_mut(&project_id)?;
-        let worktree = project.worktrees.get_mut(&worktree_id)?;
+    ) -> tide::Result<Vec<ConnectionId>> {
+        let project = self
+            .projects
+            .get_mut(&project_id)
+            .ok_or_else(|| anyhow!("no such project"))?;
+        let worktree = project
+            .worktrees
+            .get_mut(&worktree_id)
+            .ok_or_else(|| anyhow!("no such worktree"))?;
         if project.host_connection_id == connection_id {
             if let Some(share) = worktree.share.as_mut() {
                 share
                     .diagnostic_summaries
                     .insert(summary.path.clone().into(), summary);
-                return Some(project.connection_ids());
+                return Ok(project.connection_ids());
             }
         }
 
-        None
+        Err(anyhow!("no such worktree"))?
     }
 
     pub fn join_project(
@@ -481,10 +498,19 @@ impl Store {
         &mut self,
         connection_id: ConnectionId,
         project_id: u64,
-    ) -> Option<LeftProject> {
-        let project = self.projects.get_mut(&project_id)?;
-        let share = project.share.as_mut()?;
-        let (replica_id, _) = share.guests.remove(&connection_id)?;
+    ) -> tide::Result<LeftProject> {
+        let project = self
+            .projects
+            .get_mut(&project_id)
+            .ok_or_else(|| anyhow!("no such project"))?;
+        let share = project
+            .share
+            .as_mut()
+            .ok_or_else(|| anyhow!("project is not shared"))?;
+        let (replica_id, _) = share
+            .guests
+            .remove(&connection_id)
+            .ok_or_else(|| anyhow!("cannot leave a project before joining it"))?;
         share.active_replica_ids.remove(&replica_id);
 
         if let Some(connection) = self.connections.get_mut(&connection_id) {
@@ -497,7 +523,7 @@ impl Store {
         #[cfg(test)]
         self.check_invariants();
 
-        Some(LeftProject {
+        Ok(LeftProject {
             connection_ids,
             authorized_user_ids,
         })
@@ -510,31 +536,40 @@ impl Store {
         worktree_id: u64,
         removed_entries: &[u64],
         updated_entries: &[proto::Entry],
-    ) -> Option<Vec<ConnectionId>> {
+    ) -> tide::Result<Vec<ConnectionId>> {
         let project = self.write_project(project_id, connection_id)?;
-        let share = project.worktrees.get_mut(&worktree_id)?.share.as_mut()?;
+        let share = project
+            .worktrees
+            .get_mut(&worktree_id)
+            .ok_or_else(|| anyhow!("no such worktree"))?
+            .share
+            .as_mut()
+            .ok_or_else(|| anyhow!("worktree is not shared"))?;
         for entry_id in removed_entries {
             share.entries.remove(&entry_id);
         }
         for entry in updated_entries {
             share.entries.insert(entry.id, entry.clone());
         }
-        Some(project.connection_ids())
+        Ok(project.connection_ids())
     }
 
     pub fn project_connection_ids(
         &self,
         project_id: u64,
         acting_connection_id: ConnectionId,
-    ) -> Option<Vec<ConnectionId>> {
-        Some(
-            self.read_project(project_id, acting_connection_id)?
-                .connection_ids(),
-        )
+    ) -> tide::Result<Vec<ConnectionId>> {
+        Ok(self
+            .read_project(project_id, acting_connection_id)?
+            .connection_ids())
     }
 
-    pub fn channel_connection_ids(&self, channel_id: ChannelId) -> Option<Vec<ConnectionId>> {
-        Some(self.channels.get(&channel_id)?.connection_ids())
+    pub fn channel_connection_ids(&self, channel_id: ChannelId) -> tide::Result<Vec<ConnectionId>> {
+        Ok(self
+            .channels
+            .get(&channel_id)
+            .ok_or_else(|| anyhow!("no such channel"))?
+            .connection_ids())
     }
 
     #[cfg(test)]
@@ -542,14 +577,26 @@ impl Store {
         self.projects.get(&project_id)
     }
 
-    pub fn read_project(&self, project_id: u64, connection_id: ConnectionId) -> Option<&Project> {
-        let project = self.projects.get(&project_id)?;
+    pub fn read_project(
+        &self,
+        project_id: u64,
+        connection_id: ConnectionId,
+    ) -> tide::Result<&Project> {
+        let project = self
+            .projects
+            .get(&project_id)
+            .ok_or_else(|| anyhow!("no such project"))?;
         if project.host_connection_id == connection_id
-            || project.share.as_ref()?.guests.contains_key(&connection_id)
+            || project
+                .share
+                .as_ref()
+                .ok_or_else(|| anyhow!("project is not shared"))?
+                .guests
+                .contains_key(&connection_id)
         {
-            Some(project)
+            Ok(project)
         } else {
-            None
+            Err(anyhow!("no such project"))?
         }
     }
 
@@ -557,14 +604,22 @@ impl Store {
         &mut self,
         project_id: u64,
         connection_id: ConnectionId,
-    ) -> Option<&mut Project> {
-        let project = self.projects.get_mut(&project_id)?;
+    ) -> tide::Result<&mut Project> {
+        let project = self
+            .projects
+            .get_mut(&project_id)
+            .ok_or_else(|| anyhow!("no such project"))?;
         if project.host_connection_id == connection_id
-            || project.share.as_ref()?.guests.contains_key(&connection_id)
+            || project
+                .share
+                .as_ref()
+                .ok_or_else(|| anyhow!("project is not shared"))?
+                .guests
+                .contains_key(&connection_id)
         {
-            Some(project)
+            Ok(project)
         } else {
-            None
+            Err(anyhow!("no such project"))?
         }
     }
 

crates/text/src/anchor.rs šŸ”—

@@ -1,5 +1,5 @@
 use super::{Point, ToOffset};
-use crate::{rope::TextDimension, BufferSnapshot};
+use crate::{rope::TextDimension, BufferSnapshot, PointUtf16, ToPointUtf16};
 use anyhow::Result;
 use std::{cmp::Ordering, fmt::Debug, ops::Range};
 use sum_tree::Bias;
@@ -78,6 +78,7 @@ pub trait AnchorRangeExt {
     fn cmp(&self, b: &Range<Anchor>, buffer: &BufferSnapshot) -> Result<Ordering>;
     fn to_offset(&self, content: &BufferSnapshot) -> Range<usize>;
     fn to_point(&self, content: &BufferSnapshot) -> Range<Point>;
+    fn to_point_utf16(&self, content: &BufferSnapshot) -> Range<PointUtf16>;
 }
 
 impl AnchorRangeExt for Range<Anchor> {
@@ -95,4 +96,8 @@ impl AnchorRangeExt for Range<Anchor> {
     fn to_point(&self, content: &BufferSnapshot) -> Range<Point> {
         self.start.summary::<Point>(&content)..self.end.summary::<Point>(&content)
     }
+
+    fn to_point_utf16(&self, content: &BufferSnapshot) -> Range<PointUtf16> {
+        self.start.to_point_utf16(content)..self.end.to_point_utf16(content)
+    }
 }

crates/text/src/rope.rs šŸ”—

@@ -179,6 +179,19 @@ impl Rope {
             })
     }
 
+    pub fn point_to_point_utf16(&self, point: Point) -> PointUtf16 {
+        if point >= self.summary().lines {
+            return self.summary().lines_utf16;
+        }
+        let mut cursor = self.chunks.cursor::<(Point, PointUtf16)>();
+        cursor.seek(&point, Bias::Left, &());
+        let overshoot = point - cursor.start().0;
+        cursor.start().1
+            + cursor.item().map_or(PointUtf16::zero(), |chunk| {
+                chunk.point_to_point_utf16(overshoot)
+            })
+    }
+
     pub fn point_to_offset(&self, point: Point) -> usize {
         if point >= self.summary().lines {
             return self.summary().bytes;
@@ -580,6 +593,27 @@ impl Chunk {
         offset
     }
 
+    fn point_to_point_utf16(&self, target: Point) -> PointUtf16 {
+        let mut point = Point::zero();
+        let mut point_utf16 = PointUtf16::new(0, 0);
+        for ch in self.0.chars() {
+            if point >= target {
+                break;
+            }
+
+            if ch == '\n' {
+                point_utf16.row += 1;
+                point_utf16.column = 0;
+                point.row += 1;
+                point.column = 0;
+            } else {
+                point_utf16.column += ch.len_utf16() as u32;
+                point.column += ch.len_utf8() as u32;
+            }
+        }
+        point_utf16
+    }
+
     fn point_utf16_to_offset(&self, target: PointUtf16) -> usize {
         let mut offset = 0;
         let mut point = PointUtf16::new(0, 0);

crates/text/src/tests.rs šŸ”—

@@ -432,28 +432,28 @@ fn test_undo_redo() {
     buffer.edit(vec![3..5], "cd");
     assert_eq!(buffer.text(), "1abcdef234");
 
-    let transactions = buffer.history.undo_stack.clone();
-    assert_eq!(transactions.len(), 3);
+    let entries = buffer.history.undo_stack.clone();
+    assert_eq!(entries.len(), 3);
 
-    buffer.undo_or_redo(transactions[0].clone()).unwrap();
+    buffer.undo_or_redo(entries[0].transaction.clone()).unwrap();
     assert_eq!(buffer.text(), "1cdef234");
-    buffer.undo_or_redo(transactions[0].clone()).unwrap();
+    buffer.undo_or_redo(entries[0].transaction.clone()).unwrap();
     assert_eq!(buffer.text(), "1abcdef234");
 
-    buffer.undo_or_redo(transactions[1].clone()).unwrap();
+    buffer.undo_or_redo(entries[1].transaction.clone()).unwrap();
     assert_eq!(buffer.text(), "1abcdx234");
-    buffer.undo_or_redo(transactions[2].clone()).unwrap();
+    buffer.undo_or_redo(entries[2].transaction.clone()).unwrap();
     assert_eq!(buffer.text(), "1abx234");
-    buffer.undo_or_redo(transactions[1].clone()).unwrap();
+    buffer.undo_or_redo(entries[1].transaction.clone()).unwrap();
     assert_eq!(buffer.text(), "1abyzef234");
-    buffer.undo_or_redo(transactions[2].clone()).unwrap();
+    buffer.undo_or_redo(entries[2].transaction.clone()).unwrap();
     assert_eq!(buffer.text(), "1abcdef234");
 
-    buffer.undo_or_redo(transactions[2].clone()).unwrap();
+    buffer.undo_or_redo(entries[2].transaction.clone()).unwrap();
     assert_eq!(buffer.text(), "1abyzef234");
-    buffer.undo_or_redo(transactions[0].clone()).unwrap();
+    buffer.undo_or_redo(entries[0].transaction.clone()).unwrap();
     assert_eq!(buffer.text(), "1yzef234");
-    buffer.undo_or_redo(transactions[1].clone()).unwrap();
+    buffer.undo_or_redo(entries[1].transaction.clone()).unwrap();
     assert_eq!(buffer.text(), "1234");
 }
 
@@ -502,7 +502,7 @@ fn test_history() {
 }
 
 #[test]
-fn test_avoid_grouping_next_transaction() {
+fn test_finalize_last_transaction() {
     let now = Instant::now();
     let mut buffer = Buffer::new(0, 0, History::new("123456".into()));
 
@@ -511,7 +511,7 @@ fn test_avoid_grouping_next_transaction() {
     buffer.end_transaction_at(now);
     assert_eq!(buffer.text(), "12cd56");
 
-    buffer.avoid_grouping_next_transaction();
+    buffer.finalize_last_transaction();
     buffer.start_transaction_at(now);
     buffer.edit(vec![4..5], "e");
     buffer.end_transaction_at(now).unwrap();
@@ -536,6 +536,44 @@ fn test_avoid_grouping_next_transaction() {
     assert_eq!(buffer.text(), "ab2cde6");
 }
 
+#[test]
+fn test_edited_ranges_for_transaction() {
+    let now = Instant::now();
+    let mut buffer = Buffer::new(0, 0, History::new("1234567".into()));
+
+    buffer.start_transaction_at(now);
+    buffer.edit(vec![2..4], "cd");
+    buffer.edit(vec![6..6], "efg");
+    buffer.end_transaction_at(now);
+    assert_eq!(buffer.text(), "12cd56efg7");
+
+    let tx = buffer.finalize_last_transaction().unwrap().clone();
+    assert_eq!(
+        buffer
+            .edited_ranges_for_transaction::<usize>(&tx)
+            .collect::<Vec<_>>(),
+        [2..4, 6..9]
+    );
+
+    buffer.edit(vec![5..5], "hijk");
+    assert_eq!(buffer.text(), "12cd5hijk6efg7");
+    assert_eq!(
+        buffer
+            .edited_ranges_for_transaction::<usize>(&tx)
+            .collect::<Vec<_>>(),
+        [2..4, 10..13]
+    );
+
+    buffer.edit(vec![4..4], "l");
+    assert_eq!(buffer.text(), "12cdl5hijk6efg7");
+    assert_eq!(
+        buffer
+            .edited_ranges_for_transaction::<usize>(&tx)
+            .collect::<Vec<_>>(),
+        [2..4, 11..14]
+    );
+}
+
 #[test]
 fn test_concurrent_edits() {
     let text = "abcdef";
@@ -551,12 +589,12 @@ fn test_concurrent_edits() {
     let buf3_op = buffer3.edit(vec![5..6], "56");
     assert_eq!(buffer3.text(), "abcde56");
 
-    buffer1.apply_op(Operation::Edit(buf2_op.clone())).unwrap();
-    buffer1.apply_op(Operation::Edit(buf3_op.clone())).unwrap();
-    buffer2.apply_op(Operation::Edit(buf1_op.clone())).unwrap();
-    buffer2.apply_op(Operation::Edit(buf3_op.clone())).unwrap();
-    buffer3.apply_op(Operation::Edit(buf1_op.clone())).unwrap();
-    buffer3.apply_op(Operation::Edit(buf2_op.clone())).unwrap();
+    buffer1.apply_op(buf2_op.clone()).unwrap();
+    buffer1.apply_op(buf3_op.clone()).unwrap();
+    buffer2.apply_op(buf1_op.clone()).unwrap();
+    buffer2.apply_op(buf3_op.clone()).unwrap();
+    buffer3.apply_op(buf1_op.clone()).unwrap();
+    buffer3.apply_op(buf2_op.clone()).unwrap();
 
     assert_eq!(buffer1.text(), "a12c34e56");
     assert_eq!(buffer2.text(), "a12c34e56");

crates/text/src/text.rs šŸ”—

@@ -40,7 +40,7 @@ pub use subscription::*;
 pub use sum_tree::Bias;
 use sum_tree::{FilterCursor, SumTree};
 
-pub type TransactionId = usize;
+pub type TransactionId = clock::Local;
 
 pub struct Buffer {
     snapshot: BufferSnapshot,
@@ -67,28 +67,37 @@ pub struct BufferSnapshot {
 }
 
 #[derive(Clone, Debug)]
-pub struct Transaction {
-    id: TransactionId,
-    start: clock::Global,
-    end: clock::Global,
-    edits: Vec<clock::Local>,
-    ranges: Vec<Range<FullOffset>>,
+pub struct HistoryEntry {
+    transaction: Transaction,
     first_edit_at: Instant,
     last_edit_at: Instant,
     suppress_grouping: bool,
 }
 
-impl Transaction {
+#[derive(Clone, Debug)]
+pub struct Transaction {
+    pub id: TransactionId,
+    pub edit_ids: Vec<clock::Local>,
+    pub start: clock::Global,
+    pub end: clock::Global,
+    pub ranges: Vec<Range<FullOffset>>,
+}
+
+impl HistoryEntry {
+    pub fn transaction_id(&self) -> TransactionId {
+        self.transaction.id
+    }
+
     fn push_edit(&mut self, edit: &EditOperation) {
-        self.edits.push(edit.timestamp.local());
-        self.end.observe(edit.timestamp.local());
+        self.transaction.edit_ids.push(edit.timestamp.local());
+        self.transaction.end.observe(edit.timestamp.local());
 
         let mut other_ranges = edit.ranges.iter().peekable();
         let mut new_ranges = Vec::new();
         let insertion_len = edit.new_text.as_ref().map_or(0, |t| t.len());
         let mut delta = 0;
 
-        for mut self_range in self.ranges.iter().cloned() {
+        for mut self_range in self.transaction.ranges.iter().cloned() {
             self_range.start += delta;
             self_range.end += delta;
 
@@ -122,7 +131,7 @@ impl Transaction {
             delta += insertion_len;
         }
 
-        self.ranges = new_ranges;
+        self.transaction.ranges = new_ranges;
     }
 }
 
@@ -130,42 +139,46 @@ impl Transaction {
 pub struct History {
     // TODO: Turn this into a String or Rope, maybe.
     pub base_text: Arc<str>,
-    ops: HashMap<clock::Local, EditOperation>,
-    undo_stack: Vec<Transaction>,
-    redo_stack: Vec<Transaction>,
+    operations: HashMap<clock::Local, Operation>,
+    undo_stack: Vec<HistoryEntry>,
+    redo_stack: Vec<HistoryEntry>,
     transaction_depth: usize,
     group_interval: Duration,
-    next_transaction_id: TransactionId,
 }
 
 impl History {
     pub fn new(base_text: Arc<str>) -> Self {
         Self {
             base_text,
-            ops: Default::default(),
+            operations: Default::default(),
             undo_stack: Vec::new(),
             redo_stack: Vec::new(),
             transaction_depth: 0,
             group_interval: Duration::from_millis(300),
-            next_transaction_id: 0,
         }
     }
 
-    fn push(&mut self, op: EditOperation) {
-        self.ops.insert(op.timestamp.local(), op);
+    fn push(&mut self, op: Operation) {
+        self.operations.insert(op.local_timestamp(), op);
     }
 
-    fn start_transaction(&mut self, start: clock::Global, now: Instant) -> Option<TransactionId> {
+    fn start_transaction(
+        &mut self,
+        start: clock::Global,
+        now: Instant,
+        local_clock: &mut clock::Local,
+    ) -> Option<TransactionId> {
         self.transaction_depth += 1;
         if self.transaction_depth == 1 {
-            let id = self.next_transaction_id;
-            self.next_transaction_id += 1;
-            self.undo_stack.push(Transaction {
-                id,
-                start: start.clone(),
-                end: start,
-                edits: Vec::new(),
-                ranges: Vec::new(),
+            let id = local_clock.tick();
+            self.undo_stack.push(HistoryEntry {
+                transaction: Transaction {
+                    id,
+                    start: start.clone(),
+                    end: start,
+                    edit_ids: Default::default(),
+                    ranges: Default::default(),
+                },
                 first_edit_at: now,
                 last_edit_at: now,
                 suppress_grouping: false,
@@ -176,17 +189,24 @@ impl History {
         }
     }
 
-    fn end_transaction(&mut self, now: Instant) -> Option<&Transaction> {
+    fn end_transaction(&mut self, now: Instant) -> Option<&HistoryEntry> {
         assert_ne!(self.transaction_depth, 0);
         self.transaction_depth -= 1;
         if self.transaction_depth == 0 {
-            if self.undo_stack.last().unwrap().ranges.is_empty() {
+            if self
+                .undo_stack
+                .last()
+                .unwrap()
+                .transaction
+                .ranges
+                .is_empty()
+            {
                 self.undo_stack.pop();
                 None
             } else {
-                let transaction = self.undo_stack.last_mut().unwrap();
-                transaction.last_edit_at = now;
-                Some(transaction)
+                let entry = self.undo_stack.last_mut().unwrap();
+                entry.last_edit_at = now;
+                Some(entry)
             }
         } else {
             None
@@ -195,16 +215,15 @@ impl History {
 
     fn group(&mut self) -> Option<TransactionId> {
         let mut new_len = self.undo_stack.len();
-        let mut transactions = self.undo_stack.iter_mut();
-
-        if let Some(mut transaction) = transactions.next_back() {
-            while let Some(prev_transaction) = transactions.next_back() {
-                if !prev_transaction.suppress_grouping
-                    && transaction.first_edit_at - prev_transaction.last_edit_at
-                        <= self.group_interval
-                    && transaction.start == prev_transaction.end
+        let mut entries = self.undo_stack.iter_mut();
+
+        if let Some(mut entry) = entries.next_back() {
+            while let Some(prev_entry) = entries.next_back() {
+                if !prev_entry.suppress_grouping
+                    && entry.first_edit_at - prev_entry.last_edit_at <= self.group_interval
+                    && entry.transaction.start == prev_entry.transaction.end
                 {
-                    transaction = prev_transaction;
+                    entry = prev_entry;
                     new_len -= 1;
                 } else {
                     break;
@@ -212,101 +231,114 @@ impl History {
             }
         }
 
-        let (transactions_to_keep, transactions_to_merge) = self.undo_stack.split_at_mut(new_len);
-        if let Some(last_transaction) = transactions_to_keep.last_mut() {
-            for transaction in &*transactions_to_merge {
-                for edit_id in &transaction.edits {
-                    last_transaction.push_edit(&self.ops[edit_id]);
+        let (entries_to_keep, entries_to_merge) = self.undo_stack.split_at_mut(new_len);
+        if let Some(last_entry) = entries_to_keep.last_mut() {
+            for entry in &*entries_to_merge {
+                for edit_id in &entry.transaction.edit_ids {
+                    last_entry.push_edit(self.operations[edit_id].as_edit().unwrap());
                 }
             }
 
-            if let Some(transaction) = transactions_to_merge.last_mut() {
-                last_transaction.last_edit_at = transaction.last_edit_at;
-                last_transaction.end = transaction.end.clone();
+            if let Some(entry) = entries_to_merge.last_mut() {
+                last_entry.last_edit_at = entry.last_edit_at;
+                last_entry.transaction.end = entry.transaction.end.clone();
             }
         }
 
         self.undo_stack.truncate(new_len);
-        self.undo_stack.last().map(|t| t.id)
+        self.undo_stack.last().map(|e| e.transaction.id)
     }
 
-    fn avoid_grouping_next_transaction(&mut self) {
-        if let Some(transaction) = self.undo_stack.last_mut() {
-            transaction.suppress_grouping = true;
-        }
+    fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
+        self.undo_stack.last_mut().map(|entry| {
+            entry.suppress_grouping = true;
+            &entry.transaction
+        })
     }
 
-    fn push_transaction(&mut self, edit_ids: impl IntoIterator<Item = clock::Local>, now: Instant) {
+    fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
         assert_eq!(self.transaction_depth, 0);
-        let mut edit_ids = edit_ids.into_iter().peekable();
-
-        if let Some(first_edit_id) = edit_ids.peek() {
-            let version = self.ops[first_edit_id].version.clone();
-            self.start_transaction(version, now);
-            for edit_id in edit_ids {
-                self.push_undo(edit_id);
-            }
-            self.end_transaction(now);
-        }
+        self.undo_stack.push(HistoryEntry {
+            transaction,
+            first_edit_at: now,
+            last_edit_at: now,
+            suppress_grouping: false,
+        });
     }
 
-    fn push_undo(&mut self, edit_id: clock::Local) {
+    fn push_undo(&mut self, op_id: clock::Local) {
         assert_ne!(self.transaction_depth, 0);
-        let last_transaction = self.undo_stack.last_mut().unwrap();
-        last_transaction.push_edit(&self.ops[&edit_id]);
+        if let Some(Operation::Edit(edit)) = self.operations.get(&op_id) {
+            let last_transaction = self.undo_stack.last_mut().unwrap();
+            last_transaction.push_edit(&edit);
+        }
     }
 
-    fn pop_undo(&mut self) -> Option<&Transaction> {
+    fn pop_undo(&mut self) -> Option<&HistoryEntry> {
         assert_eq!(self.transaction_depth, 0);
-        if let Some(transaction) = self.undo_stack.pop() {
-            self.redo_stack.push(transaction);
+        if let Some(entry) = self.undo_stack.pop() {
+            self.redo_stack.push(entry);
             self.redo_stack.last()
         } else {
             None
         }
     }
 
-    fn remove_from_undo(&mut self, transaction_id: TransactionId) -> Option<&Transaction> {
+    fn remove_from_undo(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
         assert_eq!(self.transaction_depth, 0);
-        if let Some(transaction_ix) = self.undo_stack.iter().rposition(|t| t.id == transaction_id) {
-            let transaction = self.undo_stack.remove(transaction_ix);
-            self.redo_stack.push(transaction);
-            self.redo_stack.last()
-        } else {
-            None
+
+        let redo_stack_start_len = self.redo_stack.len();
+        if let Some(entry_ix) = self
+            .undo_stack
+            .iter()
+            .rposition(|entry| entry.transaction.id == transaction_id)
+        {
+            self.redo_stack
+                .extend(self.undo_stack.drain(entry_ix..).rev());
         }
+        &self.redo_stack[redo_stack_start_len..]
     }
 
     fn forget(&mut self, transaction_id: TransactionId) {
         assert_eq!(self.transaction_depth, 0);
-        if let Some(transaction_ix) = self.undo_stack.iter().rposition(|t| t.id == transaction_id) {
-            self.undo_stack.remove(transaction_ix);
-        } else if let Some(transaction_ix) =
-            self.redo_stack.iter().rposition(|t| t.id == transaction_id)
+        if let Some(entry_ix) = self
+            .undo_stack
+            .iter()
+            .rposition(|entry| entry.transaction.id == transaction_id)
+        {
+            self.undo_stack.remove(entry_ix);
+        } else if let Some(entry_ix) = self
+            .redo_stack
+            .iter()
+            .rposition(|entry| entry.transaction.id == transaction_id)
         {
-            self.undo_stack.remove(transaction_ix);
+            self.undo_stack.remove(entry_ix);
         }
     }
 
-    fn pop_redo(&mut self) -> Option<&Transaction> {
+    fn pop_redo(&mut self) -> Option<&HistoryEntry> {
         assert_eq!(self.transaction_depth, 0);
-        if let Some(transaction) = self.redo_stack.pop() {
-            self.undo_stack.push(transaction);
+        if let Some(entry) = self.redo_stack.pop() {
+            self.undo_stack.push(entry);
             self.undo_stack.last()
         } else {
             None
         }
     }
 
-    fn remove_from_redo(&mut self, transaction_id: TransactionId) -> Option<&Transaction> {
+    fn remove_from_redo(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
         assert_eq!(self.transaction_depth, 0);
-        if let Some(transaction_ix) = self.redo_stack.iter().rposition(|t| t.id == transaction_id) {
-            let transaction = self.redo_stack.remove(transaction_ix);
-            self.undo_stack.push(transaction);
-            self.undo_stack.last()
-        } else {
-            None
+
+        let undo_stack_start_len = self.undo_stack.len();
+        if let Some(entry_ix) = self
+            .redo_stack
+            .iter()
+            .rposition(|entry| entry.transaction.id == transaction_id)
+        {
+            self.undo_stack
+                .extend(self.redo_stack.drain(entry_ix..).rev());
         }
+        &self.undo_stack[undo_stack_start_len..]
     }
 }
 
@@ -545,57 +577,6 @@ impl Buffer {
         }
     }
 
-    pub fn from_parts(
-        replica_id: u16,
-        remote_id: u64,
-        visible_text: &str,
-        deleted_text: &str,
-        undo_map: impl Iterator<Item = (clock::Local, Vec<(clock::Local, u32)>)>,
-        fragments: impl ExactSizeIterator<Item = Fragment>,
-        lamport_timestamp: u32,
-        version: clock::Global,
-    ) -> Self {
-        let visible_text = visible_text.into();
-        let deleted_text = deleted_text.into();
-        let fragments = SumTree::from_iter(fragments, &None);
-        let mut insertions = fragments
-            .iter()
-            .map(|fragment| InsertionFragment {
-                timestamp: fragment.insertion_timestamp.local(),
-                split_offset: fragment.insertion_offset,
-                fragment_id: fragment.id.clone(),
-            })
-            .collect::<Vec<_>>();
-        insertions.sort_unstable_by_key(|i| (i.timestamp, i.split_offset));
-        Self {
-            remote_id,
-            replica_id,
-
-            history: History::new("".into()),
-            deferred_ops: OperationQueue::new(),
-            deferred_replicas: Default::default(),
-            local_clock: clock::Local {
-                replica_id,
-                value: version.get(replica_id) + 1,
-            },
-            lamport_clock: clock::Lamport {
-                replica_id,
-                value: lamport_timestamp,
-            },
-            subscriptions: Default::default(),
-            edit_id_resolvers: Default::default(),
-            snapshot: BufferSnapshot {
-                replica_id,
-                visible_text,
-                deleted_text,
-                undo_map: UndoMap(undo_map.collect()),
-                fragments,
-                insertions: SumTree::from_iter(insertions, &()),
-                version,
-            },
-        }
-    }
-
     pub fn version(&self) -> clock::Global {
         self.version.clone()
     }
@@ -620,7 +601,7 @@ impl Buffer {
         self.history.group_interval
     }
 
-    pub fn edit<R, I, S, T>(&mut self, ranges: R, new_text: T) -> EditOperation
+    pub fn edit<R, I, S, T>(&mut self, ranges: R, new_text: T) -> Operation
     where
         R: IntoIterator<IntoIter = I>,
         I: ExactSizeIterator<Item = Range<S>>,
@@ -641,13 +622,14 @@ impl Buffer {
             local: self.local_clock.tick().value,
             lamport: self.lamport_clock.tick().value,
         };
-        let edit = self.apply_local_edit(ranges.into_iter(), new_text, timestamp);
+        let operation =
+            Operation::Edit(self.apply_local_edit(ranges.into_iter(), new_text, timestamp));
 
-        self.history.push(edit.clone());
-        self.history.push_undo(edit.timestamp.local());
-        self.snapshot.version.observe(edit.timestamp.local());
+        self.history.push(operation.clone());
+        self.history.push_undo(operation.local_timestamp());
+        self.snapshot.version.observe(operation.local_timestamp());
         self.end_transaction();
-        edit
+        operation
     }
 
     fn apply_local_edit<S: ToOffset>(
@@ -815,6 +797,7 @@ impl Buffer {
     pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I) -> Result<()> {
         let mut deferred_ops = Vec::new();
         for op in ops {
+            self.history.push(op.clone());
             if self.can_apply_op(&op) {
                 self.apply_op(op)?;
             } else {
@@ -839,7 +822,6 @@ impl Buffer {
                     );
                     self.snapshot.version.observe(edit.timestamp.local());
                     self.resolve_edit(edit.timestamp.local());
-                    self.history.push(edit);
                 }
             }
             Operation::Undo {
@@ -1142,10 +1124,6 @@ impl Buffer {
         Ok(())
     }
 
-    pub fn deferred_ops(&self) -> impl Iterator<Item = &Operation> {
-        self.deferred_ops.iter()
-    }
-
     fn flush_deferred_ops(&mut self) -> Result<()> {
         self.deferred_replicas.clear();
         let mut deferred_ops = Vec::new();
@@ -1172,16 +1150,21 @@ impl Buffer {
         }
     }
 
-    pub fn peek_undo_stack(&self) -> Option<&Transaction> {
+    pub fn peek_undo_stack(&self) -> Option<&HistoryEntry> {
         self.history.undo_stack.last()
     }
 
+    pub fn peek_redo_stack(&self) -> Option<&HistoryEntry> {
+        self.history.redo_stack.last()
+    }
+
     pub fn start_transaction(&mut self) -> Option<TransactionId> {
         self.start_transaction_at(Instant::now())
     }
 
     pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
-        self.history.start_transaction(self.version.clone(), now)
+        self.history
+            .start_transaction(self.version.clone(), now, &mut self.local_clock)
     }
 
     pub fn end_transaction(&mut self) -> Option<(TransactionId, clock::Global)> {
@@ -1189,8 +1172,8 @@ impl Buffer {
     }
 
     pub fn end_transaction_at(&mut self, now: Instant) -> Option<(TransactionId, clock::Global)> {
-        if let Some(transaction) = self.history.end_transaction(now) {
-            let since = transaction.start.clone();
+        if let Some(entry) = self.history.end_transaction(now) {
+            let since = entry.transaction.start.clone();
             let id = self.history.group().unwrap();
             Some((id, since))
         } else {
@@ -1198,16 +1181,16 @@ impl Buffer {
         }
     }
 
-    pub fn avoid_grouping_next_transaction(&mut self) {
-        self.history.avoid_grouping_next_transaction()
+    pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
+        self.history.finalize_last_transaction()
     }
 
     pub fn base_text(&self) -> &Arc<str> {
         &self.history.base_text
     }
 
-    pub fn history(&self) -> impl Iterator<Item = &EditOperation> {
-        self.history.ops.values()
+    pub fn history(&self) -> impl Iterator<Item = &Operation> {
+        self.history.operations.values()
     }
 
     pub fn undo_history(&self) -> impl Iterator<Item = (&clock::Local, &[(clock::Local, u32)])> {
@@ -1218,7 +1201,8 @@ impl Buffer {
     }
 
     pub fn undo(&mut self) -> Option<(TransactionId, Operation)> {
-        if let Some(transaction) = self.history.pop_undo().cloned() {
+        if let Some(entry) = self.history.pop_undo() {
+            let transaction = entry.transaction.clone();
             let transaction_id = transaction.id;
             let op = self.undo_or_redo(transaction).unwrap();
             Some((transaction_id, op))
@@ -1227,13 +1211,18 @@ impl Buffer {
         }
     }
 
-    pub fn undo_transaction(&mut self, transaction_id: TransactionId) -> Option<Operation> {
-        if let Some(transaction) = self.history.remove_from_undo(transaction_id).cloned() {
-            let op = self.undo_or_redo(transaction).unwrap();
-            Some(op)
-        } else {
-            None
-        }
+    pub fn undo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
+        let transactions = self
+            .history
+            .remove_from_undo(transaction_id)
+            .iter()
+            .map(|entry| entry.transaction.clone())
+            .collect::<Vec<_>>();
+
+        transactions
+            .into_iter()
+            .map(|transaction| self.undo_or_redo(transaction).unwrap())
+            .collect()
     }
 
     pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
@@ -1241,7 +1230,8 @@ impl Buffer {
     }
 
     pub fn redo(&mut self) -> Option<(TransactionId, Operation)> {
-        if let Some(transaction) = self.history.pop_redo().cloned() {
+        if let Some(entry) = self.history.pop_redo() {
+            let transaction = entry.transaction.clone();
             let transaction_id = transaction.id;
             let op = self.undo_or_redo(transaction).unwrap();
             Some((transaction_id, op))
@@ -1250,18 +1240,23 @@ impl Buffer {
         }
     }
 
-    pub fn redo_transaction(&mut self, transaction_id: TransactionId) -> Option<Operation> {
-        if let Some(transaction) = self.history.remove_from_redo(transaction_id).cloned() {
-            let op = self.undo_or_redo(transaction).unwrap();
-            Some(op)
-        } else {
-            None
-        }
+    pub fn redo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
+        let transactions = self
+            .history
+            .remove_from_redo(transaction_id)
+            .iter()
+            .map(|entry| entry.transaction.clone())
+            .collect::<Vec<_>>();
+
+        transactions
+            .into_iter()
+            .map(|transaction| self.undo_or_redo(transaction).unwrap())
+            .collect()
     }
 
     fn undo_or_redo(&mut self, transaction: Transaction) -> Result<Operation> {
         let mut counts = HashMap::default();
-        for edit_id in transaction.edits {
+        for edit_id in transaction.edit_ids {
             counts.insert(edit_id, self.undo_map.undo_count(edit_id) + 1);
         }
 
@@ -1272,20 +1267,18 @@ impl Buffer {
             version: transaction.start.clone(),
         };
         self.apply_undo(&undo)?;
-        self.snapshot.version.observe(undo.id);
-
-        Ok(Operation::Undo {
+        let operation = Operation::Undo {
             undo,
             lamport_timestamp: self.lamport_clock.tick(),
-        })
+        };
+        self.snapshot.version.observe(operation.local_timestamp());
+        self.history.push(operation.clone());
+        Ok(operation)
     }
 
-    pub fn push_transaction(
-        &mut self,
-        edit_ids: impl IntoIterator<Item = clock::Local>,
-        now: Instant,
-    ) {
-        self.history.push_transaction(edit_ids, now);
+    pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
+        self.history.push_transaction(transaction, now);
+        self.history.finalize_last_transaction();
     }
 
     pub fn subscribe(&mut self) -> Subscription {
@@ -1294,13 +1287,13 @@ impl Buffer {
 
     pub fn wait_for_edits(
         &mut self,
-        edit_ids: &[clock::Local],
+        edit_ids: impl IntoIterator<Item = clock::Local>,
     ) -> impl 'static + Future<Output = ()> {
         let mut futures = Vec::new();
         for edit_id in edit_ids {
-            if !self.version.observed(*edit_id) {
+            if !self.version.observed(edit_id) {
                 let (tx, rx) = oneshot::channel();
-                self.edit_id_resolvers.entry(*edit_id).or_default().push(tx);
+                self.edit_id_resolvers.entry(edit_id).or_default().push(tx);
                 futures.push(rx);
             }
         }
@@ -1404,7 +1397,7 @@ impl Buffer {
             new_text
         );
         let op = self.edit(old_ranges.iter().cloned(), new_text.as_str());
-        (old_ranges, new_text, Operation::Edit(op))
+        (old_ranges, new_text, op)
     }
 
     pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng) -> Vec<Operation> {
@@ -1412,7 +1405,8 @@ impl Buffer {
 
         let mut ops = Vec::new();
         for _ in 0..rng.gen_range(1..=5) {
-            if let Some(transaction) = self.history.undo_stack.choose(rng).cloned() {
+            if let Some(entry) = self.history.undo_stack.choose(rng) {
+                let transaction = entry.transaction.clone();
                 log::info!(
                     "undoing buffer {} transaction {:?}",
                     self.replica_id,
@@ -1512,6 +1506,10 @@ impl BufferSnapshot {
         self.visible_text.offset_to_point_utf16(offset)
     }
 
+    pub fn point_to_point_utf16(&self, point: Point) -> PointUtf16 {
+        self.visible_text.point_to_point_utf16(point)
+    }
+
     pub fn version(&self) -> &clock::Global {
         &self.version
     }
@@ -1748,14 +1746,6 @@ impl BufferSnapshot {
         self.visible_text.clip_point_utf16(point, bias)
     }
 
-    // pub fn point_for_offset(&self, offset: usize) -> Result<Point> {
-    //     if offset <= self.len() {
-    //         Ok(self.text_summary_for_range(0..offset))
-    //     } else {
-    //         Err(anyhow!("offset out of bounds"))
-    //     }
-    // }
-
     pub fn edits_since<'a, D>(
         &'a self,
         since: &'a clock::Global,
@@ -1766,6 +1756,42 @@ impl BufferSnapshot {
         self.edits_since_in_range(since, Anchor::min()..Anchor::max())
     }
 
+    pub fn edited_ranges_for_transaction<'a, D>(
+        &'a self,
+        transaction: &'a Transaction,
+    ) -> impl 'a + Iterator<Item = Range<D>>
+    where
+        D: TextDimension,
+    {
+        let mut cursor = self.fragments.cursor::<(VersionedFullOffset, usize)>();
+        let mut rope_cursor = self.visible_text.cursor(0);
+        let cx = Some(transaction.end.clone());
+        let mut position = D::default();
+        transaction.ranges.iter().map(move |range| {
+            cursor.seek_forward(&VersionedFullOffset::Offset(range.start), Bias::Right, &cx);
+            let mut start_offset = cursor.start().1;
+            if cursor
+                .item()
+                .map_or(false, |fragment| fragment.is_visible(&self.undo_map))
+            {
+                start_offset += range.start - cursor.start().0.full_offset()
+            }
+            position.add_assign(&rope_cursor.summary(start_offset));
+            let start = position.clone();
+
+            cursor.seek_forward(&VersionedFullOffset::Offset(range.end), Bias::Left, &cx);
+            let mut end_offset = cursor.start().1;
+            if cursor
+                .item()
+                .map_or(false, |fragment| fragment.is_visible(&self.undo_map))
+            {
+                end_offset += range.end - cursor.start().0.full_offset();
+            }
+            position.add_assign(&rope_cursor.summary(end_offset));
+            start..position.clone()
+        })
+    }
+
     pub fn edits_since_in_range<'a, D>(
         &'a self,
         since: &'a clock::Global,
@@ -2178,6 +2204,20 @@ impl Operation {
         operation_queue::Operation::lamport_timestamp(self).replica_id
     }
 
+    pub fn local_timestamp(&self) -> clock::Local {
+        match self {
+            Operation::Edit(edit) => edit.timestamp.local(),
+            Operation::Undo { undo, .. } => undo.id,
+        }
+    }
+
+    pub fn as_edit(&self) -> Option<&EditOperation> {
+        match self {
+            Operation::Edit(edit) => Some(edit),
+            _ => None,
+        }
+    }
+
     pub fn is_edit(&self) -> bool {
         match self {
             Operation::Edit { .. } => true,
@@ -2260,6 +2300,34 @@ impl ToPoint for Point {
     }
 }
 
+pub trait ToPointUtf16 {
+    fn to_point_utf16<'a>(&self, snapshot: &BufferSnapshot) -> PointUtf16;
+}
+
+impl ToPointUtf16 for Anchor {
+    fn to_point_utf16<'a>(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
+        snapshot.summary_for_anchor(self)
+    }
+}
+
+impl ToPointUtf16 for usize {
+    fn to_point_utf16<'a>(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
+        snapshot.offset_to_point_utf16(*self)
+    }
+}
+
+impl ToPointUtf16 for PointUtf16 {
+    fn to_point_utf16<'a>(&self, _: &BufferSnapshot) -> PointUtf16 {
+        *self
+    }
+}
+
+impl ToPointUtf16 for Point {
+    fn to_point_utf16<'a>(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
+        snapshot.point_to_point_utf16(*self)
+    }
+}
+
 pub trait Clip {
     fn clip(&self, bias: Bias, snapshot: &BufferSnapshot) -> Self;
 }

crates/theme/src/theme.rs šŸ”—

@@ -293,6 +293,7 @@ pub struct EditorStyle {
     pub hint_diagnostic: DiagnosticStyle,
     pub invalid_hint_diagnostic: DiagnosticStyle,
     pub autocomplete: AutocompleteStyle,
+    pub code_actions_indicator: Color,
 }
 
 #[derive(Clone, Deserialize, Default)]
@@ -420,6 +421,7 @@ impl InputEditorStyle {
             hint_diagnostic: default_diagnostic_style.clone(),
             invalid_hint_diagnostic: default_diagnostic_style.clone(),
             autocomplete: Default::default(),
+            code_actions_indicator: Default::default(),
         }
     }
 }

crates/workspace/src/pane.rs šŸ”—

@@ -221,7 +221,7 @@ impl Pane {
             let task = workspace.load_path(project_path, cx);
             cx.spawn(|workspace, mut cx| async move {
                 let item = task.await;
-                if let Some(pane) = cx.read(|cx| pane.upgrade(cx)) {
+                if let Some(pane) = pane.upgrade(&cx) {
                     if let Some(item) = item.log_err() {
                         workspace.update(&mut cx, |workspace, cx| {
                             pane.update(cx, |p, _| p.nav_history.borrow_mut().set_mode(mode));
@@ -279,7 +279,7 @@ impl Pane {
         item_view.added_to_pane(cx);
         let item_idx = cmp::min(self.active_item_index + 1, self.item_views.len());
         self.item_views
-            .insert(item_idx, (item_view.item_handle(cx).id(), item_view));
+            .insert(item_idx, (item_view.item_id(cx), item_view));
         self.activate_item(item_idx, cx);
         cx.notify();
     }

crates/workspace/src/workspace.rs šŸ”—

@@ -150,11 +150,9 @@ pub trait Item: Entity + Sized {
 }
 
 pub trait ItemView: View {
-    type ItemHandle: ItemHandle;
-
     fn deactivated(&mut self, _: &mut ViewContext<Self>) {}
     fn navigate(&mut self, _: Box<dyn Any>, _: &mut ViewContext<Self>) {}
-    fn item_handle(&self, cx: &AppContext) -> Self::ItemHandle;
+    fn item_id(&self, cx: &AppContext) -> usize;
     fn tab_content(&self, style: &theme::Tab, cx: &AppContext) -> ElementBox;
     fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
     fn clone_on_split(&self, _: &mut ViewContext<Self>) -> Option<Self>
@@ -170,7 +168,11 @@ pub trait ItemView: View {
         false
     }
     fn can_save(&self, cx: &AppContext) -> bool;
-    fn save(&mut self, cx: &mut ViewContext<Self>) -> Task<Result<()>>;
+    fn save(
+        &mut self,
+        project: ModelHandle<Project>,
+        cx: &mut ViewContext<Self>,
+    ) -> Task<Result<()>>;
     fn can_save_as(&self, cx: &AppContext) -> bool;
     fn save_as(
         &mut self,
@@ -222,7 +224,7 @@ pub trait WeakItemHandle {
 }
 
 pub trait ItemViewHandle: 'static {
-    fn item_handle(&self, cx: &AppContext) -> Box<dyn ItemHandle>;
+    fn item_id(&self, cx: &AppContext) -> usize;
     fn tab_content(&self, style: &theme::Tab, cx: &AppContext) -> ElementBox;
     fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
     fn boxed_clone(&self) -> Box<dyn ItemViewHandle>;
@@ -236,7 +238,7 @@ pub trait ItemViewHandle: 'static {
     fn has_conflict(&self, cx: &AppContext) -> bool;
     fn can_save(&self, cx: &AppContext) -> bool;
     fn can_save_as(&self, cx: &AppContext) -> bool;
-    fn save(&self, cx: &mut MutableAppContext) -> Task<Result<()>>;
+    fn save(&self, project: ModelHandle<Project>, cx: &mut MutableAppContext) -> Task<Result<()>>;
     fn save_as(
         &self,
         project: ModelHandle<Project>,
@@ -324,7 +326,7 @@ impl<T: Item> WeakItemHandle for WeakModelHandle<T> {
     }
 
     fn upgrade(&self, cx: &AppContext) -> Option<Box<dyn ItemHandle>> {
-        WeakModelHandle::<T>::upgrade(*self, cx).map(|i| Box::new(i) as Box<dyn ItemHandle>)
+        WeakModelHandle::<T>::upgrade(self, cx).map(|i| Box::new(i) as Box<dyn ItemHandle>)
     }
 }
 
@@ -354,8 +356,8 @@ impl dyn ItemViewHandle {
 }
 
 impl<T: ItemView> ItemViewHandle for ViewHandle<T> {
-    fn item_handle(&self, cx: &AppContext) -> Box<dyn ItemHandle> {
-        Box::new(self.read(cx).item_handle(cx))
+    fn item_id(&self, cx: &AppContext) -> usize {
+        self.read(cx).item_id(cx)
     }
 
     fn tab_content(&self, style: &theme::Tab, cx: &AppContext) -> ElementBox {
@@ -404,8 +406,8 @@ impl<T: ItemView> ItemViewHandle for ViewHandle<T> {
         self.update(cx, |this, cx| this.navigate(data, cx));
     }
 
-    fn save(&self, cx: &mut MutableAppContext) -> Task<Result<()>> {
-        self.update(cx, |item, cx| item.save(cx))
+    fn save(&self, project: ModelHandle<Project>, cx: &mut MutableAppContext) -> Task<Result<()>> {
+        self.update(cx, |item, cx| item.save(project, cx))
     }
 
     fn save_as(
@@ -589,7 +591,7 @@ impl Workspace {
 
             while stream.recv().await.is_some() {
                 cx.update(|cx| {
-                    if let Some(this) = this.upgrade(&cx) {
+                    if let Some(this) = this.upgrade(cx) {
                         this.update(cx, |_, cx| cx.notify());
                     }
                 })
@@ -772,7 +774,7 @@ impl Workspace {
             let item = load_task.await?;
             this.update(&mut cx, |this, cx| {
                 let pane = pane
-                    .upgrade(&cx)
+                    .upgrade(cx)
                     .ok_or_else(|| anyhow!("could not upgrade pane reference"))?;
                 Ok(this.open_item_in_pane(item, &pane, cx))
             })
@@ -822,6 +824,7 @@ impl Workspace {
     }
 
     pub fn save_active_item(&mut self, cx: &mut ViewContext<Self>) -> Task<Result<()>> {
+        let project = self.project.clone();
         if let Some(item) = self.active_item(cx) {
             if item.can_save(cx) {
                 if item.has_conflict(cx.as_ref()) {
@@ -835,12 +838,12 @@ impl Workspace {
                     cx.spawn(|_, mut cx| async move {
                         let answer = answer.recv().await;
                         if answer == Some(0) {
-                            cx.update(|cx| item.save(cx)).await?;
+                            cx.update(|cx| item.save(project, cx)).await?;
                         }
                         Ok(())
                     })
                 } else {
-                    item.save(cx)
+                    item.save(project, cx)
                 }
             } else if item.can_save_as(cx) {
                 let worktree = self.worktrees(cx).next();
@@ -849,9 +852,8 @@ impl Workspace {
                     .map_or(Path::new(""), |w| w.abs_path())
                     .to_path_buf();
                 let mut abs_path = cx.prompt_for_new_path(&start_abs_path);
-                cx.spawn(|this, mut cx| async move {
+                cx.spawn(|_, mut cx| async move {
                     if let Some(abs_path) = abs_path.recv().await.flatten() {
-                        let project = this.read_with(&cx, |this, _| this.project().clone());
                         cx.update(|cx| item.save_as(project, abs_path, cx)).await?;
                     }
                     Ok(())

crates/zed/assets/icons/zap.svg šŸ”—

@@ -0,0 +1,3 @@
+<svg width="8" height="12" viewBox="0 0 8 12" fill="none" xmlns="http://www.w3.org/2000/svg">
+<path d="M2.00262 12L2.89358 7.9886C2.95207 7.71862 2.77658 7.49963 2.5021 7.49963H0.000671387L6.00037 0L5.10792 4.0108C5.04792 4.27929 5.22341 4.49828 5.4994 4.49828H7.99932L1.99962 11.9979L2.00262 12Z" fill="#FDE047"/>
+</svg>

crates/zed/assets/themes/_base.toml šŸ”—

@@ -253,6 +253,7 @@ line_number_active = "$text.0.color"
 selection = "$selection.host"
 guest_selections = "$selection.guests"
 error_color = "$status.bad"
+code_actions_indicator = "$text.3.color"
 
 [editor.diagnostic_path_header]
 background = "$state.active_line"

crates/zed/src/zed.rs šŸ”—

@@ -126,7 +126,7 @@ mod tests {
     use super::*;
     use editor::{DisplayPoint, Editor};
     use gpui::{MutableAppContext, TestAppContext, ViewHandle};
-    use project::ProjectPath;
+    use project::{Fs, ProjectPath};
     use serde_json::json;
     use std::{
         collections::HashSet,
@@ -817,7 +817,10 @@ mod tests {
                     .active_pane()
                     .update(cx, |pane, cx| pane.close_item(editor2.id(), cx));
                 drop(editor2);
-                app_state.fs.as_fake().remove(Path::new("/root/a/file2"))
+                app_state
+                    .fs
+                    .as_fake()
+                    .remove_file(Path::new("/root/a/file2"), Default::default())
             })
             .await
             .unwrap();

script/drop-test-dbs šŸ”—

@@ -0,0 +1,16 @@
+#!/bin/bash
+
+databases=$(psql --tuples-only --command "
+  SELECT
+    datname
+  FROM
+    pg_database
+  WHERE
+    datistemplate = false
+    AND datname like 'zed-test-%'
+")
+
+for database in $databases; do
+  echo $database
+  dropdb $database
+done