diff --git a/Cargo.lock b/Cargo.lock index 1e9d43b2c0bbba211de24489453d31d271dca372..416564365642590757e7e7daec1492c39a2972bd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2776,6 +2776,8 @@ version = "0.1.0" dependencies = [ "anyhow", "async-pipe", + "ctor", + "env_logger", "futures", "gpui", "log", @@ -2784,7 +2786,6 @@ dependencies = [ "postage", "serde", "serde_json", - "simplelog", "smol", "unindent", "util", @@ -3523,7 +3524,6 @@ dependencies = [ "rpc", "serde", "serde_json", - "simplelog", "smol", "sum_tree", "tempdir", diff --git a/crates/client/src/channel.rs b/crates/client/src/channel.rs index f89f5782471062b84401b316a2a3e3d34c340a52..ab65b4d22830c5ea4d815d6db7f352574c59f013 100644 --- a/crates/client/src/channel.rs +++ b/crates/client/src/channel.rs @@ -184,7 +184,8 @@ impl Channel { rpc: Arc, cx: &mut ModelContext, ) -> Self { - let _subscription = rpc.subscribe_to_entity(details.id, cx, Self::handle_message_sent); + let _subscription = + rpc.add_entity_message_handler(details.id, cx, Self::handle_message_sent); { let user_store = user_store.clone(); @@ -398,29 +399,23 @@ impl Channel { cursor } - fn handle_message_sent( - &mut self, + async fn handle_message_sent( + this: ModelHandle, message: TypedEnvelope, _: Arc, - cx: &mut ModelContext, + mut cx: AsyncAppContext, ) -> Result<()> { - let user_store = self.user_store.clone(); + let user_store = this.read_with(&cx, |this, _| this.user_store.clone()); let message = message .payload .message .ok_or_else(|| anyhow!("empty message"))?; - cx.spawn(|this, mut cx| { - async move { - let message = ChannelMessage::from_proto(message, &user_store, &mut cx).await?; - this.update(&mut cx, |this, cx| { - this.insert_messages(SumTree::from_item(message, &()), cx) - }); - Ok(()) - } - .log_err() - }) - .detach(); + let message = ChannelMessage::from_proto(message, &user_store, &mut cx).await?; + this.update(&mut cx, |this, cx| { + this.insert_messages(SumTree::from_item(message, &()), cx) + }); + Ok(()) } diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 103471c6f38506e3b700f75ef7a945fa153d75d3..702360f7787a238e119df1da50f071a49fefc6be 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -11,8 +11,8 @@ use async_tungstenite::tungstenite::{ error::Error as WebsocketError, http::{Request, StatusCode}, }; -use futures::StreamExt; -use gpui::{action, AsyncAppContext, Entity, ModelContext, MutableAppContext, Task}; +use futures::{future::LocalBoxFuture, FutureExt, StreamExt}; +use gpui::{action, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task}; use http::HttpClient; use lazy_static::lazy_static; use parking_lot::RwLock; @@ -20,10 +20,11 @@ use postage::watch; use rand::prelude::*; use rpc::proto::{AnyTypedEnvelope, EntityMessage, EnvelopedMessage, RequestMessage}; use std::{ - any::TypeId, + any::{type_name, TypeId}, collections::HashMap, convert::TryFrom, fmt::Write as _, + future::Future, sync::{ atomic::{AtomicUsize, Ordering}, Arc, Weak, @@ -123,14 +124,17 @@ pub enum Status { ReconnectionError { next_reconnection: Instant }, } +type ModelHandler = Box< + dyn Send + + Sync + + FnMut(Box, &AsyncAppContext) -> LocalBoxFuture<'static, Result<()>>, +>; + struct ClientState { credentials: Option, status: (watch::Sender, watch::Receiver), entity_id_extractors: HashMap u64>>, - model_handlers: HashMap< - (TypeId, Option), - Option, &mut AsyncAppContext)>>, - >, + model_handlers: HashMap<(TypeId, Option), Option>, _maintain_connection: Option>, heartbeat_interval: Duration, } @@ -262,7 +266,7 @@ impl Client { } } - pub fn subscribe( + pub fn add_message_handler( self: &Arc, cx: &mut ModelContext, mut handler: F, @@ -273,7 +277,8 @@ impl Client { F: 'static + Send + Sync - + FnMut(&mut M, TypedEnvelope, Arc, &mut ModelContext) -> Result<()>, + + FnMut(ModelHandle, TypedEnvelope, Arc, AsyncAppContext) -> Fut, + Fut: 'static + Future>, { let subscription_id = (TypeId::of::(), None); let client = self.clone(); @@ -284,11 +289,15 @@ impl Client { Some(Box::new(move |envelope, cx| { if let Some(model) = model.upgrade(cx) { let envelope = envelope.into_any().downcast::>().unwrap(); - model.update(cx, |model, cx| { - if let Err(error) = handler(model, *envelope, client.clone(), cx) { - log::error!("error handling message: {}", error) - } - }); + handler(model, *envelope, client.clone(), cx.clone()).boxed_local() + } else { + async move { + Err(anyhow!( + "received message for {:?} but model was dropped", + type_name::() + )) + } + .boxed_local() } })), ); @@ -302,7 +311,7 @@ impl Client { } } - pub fn subscribe_to_entity( + pub fn add_entity_message_handler( self: &Arc, remote_id: u64, cx: &mut ModelContext, @@ -314,7 +323,8 @@ impl Client { F: 'static + Send + Sync - + FnMut(&mut M, TypedEnvelope, Arc, &mut ModelContext) -> Result<()>, + + FnMut(ModelHandle, TypedEnvelope, Arc, AsyncAppContext) -> Fut, + Fut: 'static + Future>, { let subscription_id = (TypeId::of::(), Some(remote_id)); let client = self.clone(); @@ -337,11 +347,15 @@ impl Client { Some(Box::new(move |envelope, cx| { if let Some(model) = model.upgrade(cx) { let envelope = envelope.into_any().downcast::>().unwrap(); - model.update(cx, |model, cx| { - if let Err(error) = handler(model, *envelope, client.clone(), cx) { - log::error!("error handling message: {}", error) - } - }); + handler(model, *envelope, client.clone(), cx.clone()).boxed_local() + } else { + async move { + Err(anyhow!( + "received message for {:?} but model was dropped", + type_name::() + )) + } + .boxed_local() } })), ); @@ -355,6 +369,44 @@ impl Client { } } + pub fn add_entity_request_handler( + self: &Arc, + remote_id: u64, + cx: &mut ModelContext, + mut handler: F, + ) -> Subscription + where + T: EntityMessage + RequestMessage, + M: Entity, + F: 'static + + Send + + Sync + + FnMut(ModelHandle, TypedEnvelope, Arc, AsyncAppContext) -> Fut, + Fut: 'static + Future>, + { + self.add_entity_message_handler(remote_id, cx, move |model, envelope, client, cx| { + let receipt = envelope.receipt(); + let response = handler(model, envelope, client.clone(), cx); + async move { + match response.await { + Ok(response) => { + client.respond(receipt, response)?; + Ok(()) + } + Err(error) => { + client.respond_with_error( + receipt, + proto::Error { + message: error.to_string(), + }, + )?; + Err(error) + } + } + } + }) + } + pub fn has_keychain_credentials(&self, cx: &AsyncAppContext) -> bool { read_credentials_from_keychain(cx).is_some() } @@ -442,7 +494,7 @@ impl Client { let (connection_id, handle_io, mut incoming) = self.peer.add_connection(conn).await; cx.foreground() .spawn({ - let mut cx = cx.clone(); + let cx = cx.clone(); let this = self.clone(); async move { while let Some(message) = incoming.next().await { @@ -462,23 +514,41 @@ impl Client { if let Some(handler) = state.model_handlers.get_mut(&handler_key) { let mut handler = handler.take().unwrap(); drop(state); // Avoid deadlocks if the handler interacts with rpc::Client + let future = (handler)(message, &cx); + { + let mut state = this.state.write(); + if state.model_handlers.contains_key(&handler_key) { + state.model_handlers.insert(handler_key, Some(handler)); + } + } + let client_id = this.id; log::debug!( "rpc message received. client_id:{}, name:{}", - this.id, + client_id, type_name ); - (handler)(message, &mut cx); - log::debug!( - "rpc message handled. client_id:{}, name:{}", - this.id, - type_name - ); - - let mut state = this.state.write(); - if state.model_handlers.contains_key(&handler_key) { - state.model_handlers.insert(handler_key, Some(handler)); - } + cx.foreground() + .spawn(async move { + match future.await { + Ok(()) => { + log::debug!( + "rpc message handled. client_id:{}, name:{}", + client_id, + type_name + ); + } + Err(error) => { + log::error!( + "error handling rpc message. client_id:{}, name:{}, error:{}", + client_id, + type_name, + error + ); + } + } + }) + .detach(); } else { log::info!("unhandled message {}", type_name); } @@ -715,16 +785,12 @@ impl Client { response } - pub fn respond( - &self, - receipt: Receipt, - response: T::Response, - ) -> Result<()> { + fn respond(&self, receipt: Receipt, response: T::Response) -> Result<()> { log::debug!("rpc respond. client_id: {}. name:{}", self.id, T::NAME); self.peer.respond(receipt, response) } - pub fn respond_with_error( + fn respond_with_error( &self, receipt: Receipt, error: proto::Error, @@ -861,22 +927,22 @@ mod tests { let (mut done_tx1, mut done_rx1) = postage::oneshot::channel(); let (mut done_tx2, mut done_rx2) = postage::oneshot::channel(); let _subscription1 = model.update(&mut cx, |_, cx| { - client.subscribe_to_entity( + client.add_entity_message_handler( 1, cx, move |_, _: TypedEnvelope, _, _| { postage::sink::Sink::try_send(&mut done_tx1, ()).unwrap(); - Ok(()) + async { Ok(()) } }, ) }); let _subscription2 = model.update(&mut cx, |_, cx| { - client.subscribe_to_entity( + client.add_entity_message_handler( 2, cx, move |_, _: TypedEnvelope, _, _| { postage::sink::Sink::try_send(&mut done_tx2, ()).unwrap(); - Ok(()) + async { Ok(()) } }, ) }); @@ -884,10 +950,10 @@ mod tests { // Ensure dropping a subscription for the same entity type still allows receiving of // messages for other entity IDs of the same type. let subscription3 = model.update(&mut cx, |_, cx| { - client.subscribe_to_entity( + client.add_entity_message_handler( 3, cx, - move |_, _: TypedEnvelope, _, _| Ok(()), + |_, _: TypedEnvelope, _, _| async { Ok(()) }, ) }); drop(subscription3); @@ -910,16 +976,16 @@ mod tests { let (mut done_tx1, _done_rx1) = postage::oneshot::channel(); let (mut done_tx2, mut done_rx2) = postage::oneshot::channel(); let subscription1 = model.update(&mut cx, |_, cx| { - client.subscribe(cx, move |_, _: TypedEnvelope, _, _| { + client.add_message_handler(cx, move |_, _: TypedEnvelope, _, _| { postage::sink::Sink::try_send(&mut done_tx1, ()).unwrap(); - Ok(()) + async { Ok(()) } }) }); drop(subscription1); let _subscription2 = model.update(&mut cx, |_, cx| { - client.subscribe(cx, move |_, _: TypedEnvelope, _, _| { + client.add_message_handler(cx, move |_, _: TypedEnvelope, _, _| { postage::sink::Sink::try_send(&mut done_tx2, ()).unwrap(); - Ok(()) + async { Ok(()) } }) }); server.send(proto::Ping {}); @@ -937,12 +1003,12 @@ mod tests { let model = cx.add_model(|_| Model { subscription: None }); let (mut done_tx, mut done_rx) = postage::oneshot::channel(); model.update(&mut cx, |model, cx| { - model.subscription = Some(client.subscribe( + model.subscription = Some(client.add_message_handler( cx, - move |model, _: TypedEnvelope, _, _| { - model.subscription.take(); + move |model, _: TypedEnvelope, _, mut cx| { + model.update(&mut cx, |model, _| model.subscription.take()); postage::sink::Sink::try_send(&mut done_tx, ()).unwrap(); - Ok(()) + async { Ok(()) } }, )); }); diff --git a/crates/client/src/user.rs b/crates/client/src/user.rs index 26be77bf2d66dbc450d7fa4bc99a89ce2a9a79e0..1e4f7fe4d4d5811512bc177fd60151ab31e6d0cf 100644 --- a/crates/client/src/user.rs +++ b/crates/client/src/user.rs @@ -58,11 +58,11 @@ impl UserStore { let (mut current_user_tx, current_user_rx) = watch::channel(); let (mut update_contacts_tx, mut update_contacts_rx) = watch::channel::>(); - let update_contacts_subscription = client.subscribe( + let update_contacts_subscription = client.add_message_handler( cx, - move |_: &mut Self, msg: TypedEnvelope, _, _| { - let _ = update_contacts_tx.blocking_send(Some(msg.payload)); - Ok(()) + move |_: ModelHandle, msg: TypedEnvelope, _, _| { + *update_contacts_tx.borrow_mut() = Some(msg.payload); + async move { Ok(()) } }, ); Self { diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index ac197a7456eb6ae94f27cbcab1ea9847a14de547..b9ffbc64d0bbd53eba08443d0f5184e2fa6df8c1 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -7,7 +7,7 @@ use editor::{ display_map::{BlockDisposition, BlockId, BlockProperties, RenderBlock}, highlight_diagnostic_message, items::BufferItemHandle, - Autoscroll, BuildSettings, Editor, ExcerptId, ExcerptProperties, MultiBuffer, ToOffset, + Autoscroll, BuildSettings, Editor, ExcerptId, MultiBuffer, ToOffset, }; use gpui::{ action, elements::*, fonts::TextStyle, keymap::Binding, AnyViewHandle, AppContext, Entity, @@ -28,7 +28,7 @@ use std::{ sync::Arc, }; use util::TryFutureExt; -use workspace::{ItemNavHistory, Workspace}; +use workspace::{ItemNavHistory, ItemViewHandle as _, Workspace}; action!(Deploy); action!(OpenExcerpts); @@ -68,7 +68,6 @@ struct ProjectDiagnosticsEditor { struct PathState { path: ProjectPath, - header: Option, diagnostic_groups: Vec, } @@ -145,7 +144,12 @@ impl ProjectDiagnosticsEditor { let excerpts = cx.add_model(|cx| MultiBuffer::new(project.read(cx).replica_id())); let build_settings = editor::settings_builder(excerpts.downgrade(), settings.clone()); let editor = cx.add_view(|cx| { - let mut editor = Editor::for_buffer(excerpts.clone(), build_settings.clone(), cx); + let mut editor = Editor::for_buffer( + excerpts.clone(), + build_settings.clone(), + Some(project.clone()), + cx, + ); editor.set_vertical_scroll_margin(5, cx); editor }); @@ -187,7 +191,7 @@ impl ProjectDiagnosticsEditor { for selection in editor.local_selections::(cx) { for (buffer, mut range) in - excerpts.excerpted_buffers(selection.start..selection.end, cx) + excerpts.range_to_buffer_ranges(selection.start..selection.end, cx) { if selection.reversed { mem::swap(&mut range.start, &mut range.end); @@ -253,7 +257,6 @@ impl ProjectDiagnosticsEditor { ix, PathState { path: path.clone(), - header: None, diagnostic_groups: Default::default(), }, ); @@ -330,14 +333,15 @@ impl ProjectDiagnosticsEditor { Point::new(range.end.row + CONTEXT_LINE_COUNT, u32::MAX), Bias::Left, ); - let excerpt_id = excerpts.insert_excerpt_after( - &prev_excerpt_id, - ExcerptProperties { - buffer: &buffer, - range: excerpt_start..excerpt_end, - }, - excerpts_cx, - ); + let excerpt_id = excerpts + .insert_excerpts_after( + &prev_excerpt_id, + buffer.clone(), + [excerpt_start..excerpt_end], + excerpts_cx, + ) + .pop() + .unwrap(); prev_excerpt_id = excerpt_id.clone(); first_excerpt_id.get_or_insert_with(|| prev_excerpt_id.clone()); @@ -360,14 +364,6 @@ impl ProjectDiagnosticsEditor { ), disposition: BlockDisposition::Above, }); - } else { - group_state.block_count += 1; - blocks_to_add.push(BlockProperties { - position: header_position, - height: 1, - render: context_header_renderer(self.build_settings.clone()), - disposition: BlockDisposition::Above, - }); } for entry in &group.entries[*start_ix..ix] { @@ -416,27 +412,17 @@ impl ProjectDiagnosticsEditor { }); self.editor.update(cx, |editor, cx| { - blocks_to_remove.extend(path_state.header); editor.remove_blocks(blocks_to_remove, cx); - let header_block = first_excerpt_id.map(|excerpt_id| BlockProperties { - position: excerpts_snapshot.anchor_in_excerpt(excerpt_id, language::Anchor::min()), - height: 2, - render: path_header_renderer(buffer, self.build_settings.clone()), - disposition: BlockDisposition::Above, - }); let block_ids = editor.insert_blocks( - blocks_to_add - .into_iter() - .map(|block| { - let (excerpt_id, text_anchor) = block.position; - BlockProperties { - position: excerpts_snapshot.anchor_in_excerpt(excerpt_id, text_anchor), - height: block.height, - render: block.render, - disposition: block.disposition, - } - }) - .chain(header_block.into_iter()), + blocks_to_add.into_iter().map(|block| { + let (excerpt_id, text_anchor) = block.position; + BlockProperties { + position: excerpts_snapshot.anchor_in_excerpt(excerpt_id, text_anchor), + height: block.height, + render: block.render, + disposition: block.disposition, + } + }), cx, ); @@ -444,7 +430,6 @@ impl ProjectDiagnosticsEditor { for group_state in &mut groups_to_add { group_state.blocks = block_ids.by_ref().take(group_state.block_count).collect(); } - path_state.header = block_ids.next(); }); for ix in group_ixs_to_remove.into_iter().rev() { @@ -554,10 +539,8 @@ impl workspace::Item for ProjectDiagnostics { } impl workspace::ItemView for ProjectDiagnosticsEditor { - type ItemHandle = ModelHandle; - - fn item_handle(&self, _: &AppContext) -> Self::ItemHandle { - self.model.clone() + fn item_id(&self, _: &AppContext) -> usize { + self.model.id() } fn tab_content(&self, style: &theme::Tab, _: &AppContext) -> ElementBox { @@ -589,8 +572,12 @@ impl workspace::ItemView for ProjectDiagnosticsEditor { true } - fn save(&mut self, cx: &mut ViewContext) -> Task> { - self.excerpts.update(cx, |excerpts, cx| excerpts.save(cx)) + fn save( + &mut self, + project: ModelHandle, + cx: &mut ViewContext, + ) -> Task> { + self.editor.save(project, cx) } fn can_save_as(&self, _: &AppContext) -> bool { @@ -655,51 +642,6 @@ impl workspace::ItemView for ProjectDiagnosticsEditor { } } -fn path_header_renderer(buffer: ModelHandle, build_settings: BuildSettings) -> RenderBlock { - Arc::new(move |cx| { - let settings = build_settings(cx); - let style = settings.style.diagnostic_path_header; - let font_size = (style.text_scale_factor * settings.style.text.font_size).round(); - - let mut filename = None; - let mut path = None; - if let Some(file) = buffer.read(&**cx).file() { - filename = file - .path() - .file_name() - .map(|f| f.to_string_lossy().to_string()); - path = file - .path() - .parent() - .map(|p| p.to_string_lossy().to_string() + "/"); - } - - Flex::row() - .with_child( - Label::new( - filename.unwrap_or_else(|| "untitled".to_string()), - style.filename.text.clone().with_font_size(font_size), - ) - .contained() - .with_style(style.filename.container) - .boxed(), - ) - .with_children(path.map(|path| { - Label::new(path, style.path.text.clone().with_font_size(font_size)) - .contained() - .with_style(style.path.container) - .boxed() - })) - .aligned() - .left() - .contained() - .with_style(style.container) - .with_padding_left(cx.gutter_padding + cx.scroll_x * cx.em_width) - .expanded() - .named("path header block") - }) -} - fn diagnostic_header_renderer( diagnostic: Diagnostic, build_settings: BuildSettings, @@ -753,17 +695,6 @@ fn diagnostic_header_renderer( }) } -fn context_header_renderer(build_settings: BuildSettings) -> RenderBlock { - Arc::new(move |cx| { - let settings = build_settings(cx); - let text_style = settings.style.text.clone(); - Label::new("…".to_string(), text_style) - .contained() - .with_padding_left(cx.gutter_padding + cx.scroll_x * cx.em_width) - .named("collapsed context") - }) -} - pub(crate) fn render_summary( summary: &DiagnosticSummary, text_style: &TextStyle, @@ -838,7 +769,10 @@ fn compare_diagnostics( #[cfg(test)] mod tests { use super::*; - use editor::{display_map::BlockContext, DisplayPoint, EditorSnapshot}; + use editor::{ + display_map::{BlockContext, TransformBlock}, + DisplayPoint, EditorSnapshot, + }; use gpui::TestAppContext; use language::{Diagnostic, DiagnosticEntry, DiagnosticSeverity, PointUtf16}; use serde_json::json; @@ -985,8 +919,9 @@ mod tests { [ (0, "path header block".into()), (2, "diagnostic header".into()), - (15, "diagnostic header".into()), - (24, "collapsed context".into()), + (15, "collapsed context".into()), + (16, "diagnostic header".into()), + (25, "collapsed context".into()), ] ); assert_eq!( @@ -1011,6 +946,7 @@ mod tests { " c(y);\n", "\n", // supporting diagnostic " d(x);\n", + "\n", // context ellipsis // diagnostic group 2 "\n", // primary message "\n", // padding @@ -1073,8 +1009,9 @@ mod tests { (2, "diagnostic header".into()), (7, "path header block".into()), (9, "diagnostic header".into()), - (22, "diagnostic header".into()), - (31, "collapsed context".into()), + (22, "collapsed context".into()), + (23, "diagnostic header".into()), + (32, "collapsed context".into()), ] ); assert_eq!( @@ -1110,6 +1047,7 @@ mod tests { " c(y);\n", "\n", // supporting diagnostic " d(x);\n", + "\n", // collapsed context // diagnostic group 2 "\n", // primary message "\n", // filename @@ -1184,11 +1122,13 @@ mod tests { [ (0, "path header block".into()), (2, "diagnostic header".into()), - (7, "diagnostic header".into()), - (12, "path header block".into()), - (14, "diagnostic header".into()), - (27, "diagnostic header".into()), - (36, "collapsed context".into()), + (7, "collapsed context".into()), + (8, "diagnostic header".into()), + (13, "path header block".into()), + (15, "diagnostic header".into()), + (28, "collapsed context".into()), + (29, "diagnostic header".into()), + (38, "collapsed context".into()), ] ); assert_eq!( @@ -1205,6 +1145,7 @@ mod tests { "const a: i32 = 'a';\n", "\n", // supporting diagnostic "const b: i32 = c;\n", + "\n", // context ellipsis // diagnostic group 2 "\n", // primary message "\n", // padding @@ -1230,6 +1171,7 @@ mod tests { " c(y);\n", "\n", // supporting diagnostic " d(x);\n", + "\n", // context ellipsis // diagnostic group 2 "\n", // primary message "\n", // filename @@ -1254,18 +1196,31 @@ mod tests { editor .blocks_in_range(0..editor.max_point().row()) .filter_map(|(row, block)| { - block - .render(&BlockContext { - cx, - anchor_x: 0., - scroll_x: 0., - gutter_padding: 0., - gutter_width: 0., - line_height: 0., - em_width: 0., - }) - .name() - .map(|s| (row, s.to_string())) + let name = match block { + TransformBlock::Custom(block) => block + .render(&BlockContext { + cx, + anchor_x: 0., + scroll_x: 0., + gutter_padding: 0., + gutter_width: 0., + line_height: 0., + em_width: 0., + }) + .name()? + .to_string(), + TransformBlock::ExcerptHeader { + starts_new_buffer, .. + } => { + if *starts_new_buffer { + "path header block".to_string() + } else { + "collapsed context".to_string() + } + } + }; + + Some((row, name)) }) .collect() } diff --git a/crates/editor/Cargo.toml b/crates/editor/Cargo.toml index 77dcbab3df86a0d461144de72ee30881e441815f..23ce651d9b9af550711a822bd811dd6fd92cadea 100644 --- a/crates/editor/Cargo.toml +++ b/crates/editor/Cargo.toml @@ -12,6 +12,7 @@ test-support = [ "text/test-support", "language/test-support", "gpui/test-support", + "project/test-support", "util/test-support", ] @@ -48,6 +49,7 @@ language = { path = "../language", features = ["test-support"] } lsp = { path = "../lsp", features = ["test-support"] } gpui = { path = "../gpui", features = ["test-support"] } util = { path = "../util", features = ["test-support"] } +project = { path = "../project", features = ["test-support"] } ctor = "0.1" env_logger = "0.8" rand = "0.8" diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 697dc5ea6256f29a0bab8eb88b5d01bfdb7eff81..24fd53155886d5d980af6bec1639ed74113131e3 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -15,8 +15,8 @@ use tab_map::TabMap; use wrap_map::WrapMap; pub use block_map::{ - AlignedBlock, BlockBufferRows as DisplayBufferRows, BlockChunks as DisplayChunks, BlockContext, - BlockDisposition, BlockId, BlockProperties, RenderBlock, + BlockBufferRows as DisplayBufferRows, BlockChunks as DisplayChunks, BlockContext, + BlockDisposition, BlockId, BlockProperties, RenderBlock, TransformBlock, }; pub trait ToDisplayPoint { @@ -43,13 +43,15 @@ impl DisplayMap { font_id: FontId, font_size: f32, wrap_width: Option, + buffer_header_height: u8, + excerpt_header_height: u8, cx: &mut ModelContext, ) -> Self { let buffer_subscription = buffer.update(cx, |buffer, _| buffer.subscribe()); let (fold_map, snapshot) = FoldMap::new(buffer.read(cx).snapshot(cx)); let (tab_map, snapshot) = TabMap::new(snapshot, tab_size); let (wrap_map, snapshot) = WrapMap::new(snapshot, font_id, font_size, wrap_width, cx); - let block_map = BlockMap::new(snapshot); + let block_map = BlockMap::new(snapshot, buffer_header_height, excerpt_header_height); cx.observe(&wrap_map, |_, _, cx| cx.notify()).detach(); DisplayMap { buffer, @@ -318,7 +320,7 @@ impl DisplaySnapshot { pub fn blocks_in_range<'a>( &'a self, rows: Range, - ) -> impl Iterator { + ) -> impl Iterator { self.blocks_snapshot.blocks_in_range(rows) } @@ -471,6 +473,8 @@ mod tests { let font_cache = cx.font_cache().clone(); let tab_size = rng.gen_range(1..=4); + let buffer_start_excerpt_header_height = rng.gen_range(1..=5); + let excerpt_header_height = rng.gen_range(1..=5); let family_id = font_cache.load_family(&["Helvetica"]).unwrap(); let font_id = font_cache .select_font(family_id, &Default::default()) @@ -497,7 +501,16 @@ mod tests { }); let map = cx.add_model(|cx| { - DisplayMap::new(buffer.clone(), tab_size, font_id, font_size, wrap_width, cx) + DisplayMap::new( + buffer.clone(), + tab_size, + font_id, + font_size, + wrap_width, + buffer_start_excerpt_header_height, + excerpt_header_height, + cx, + ) }); let mut notifications = observe(&map, &mut cx); let mut fold_count = 0; @@ -711,7 +724,16 @@ mod tests { let text = "one two three four five\nsix seven eight"; let buffer = MultiBuffer::build_simple(text, cx); let map = cx.add_model(|cx| { - DisplayMap::new(buffer.clone(), tab_size, font_id, font_size, wrap_width, cx) + DisplayMap::new( + buffer.clone(), + tab_size, + font_id, + font_size, + wrap_width, + 1, + 1, + cx, + ) }); let snapshot = map.update(cx, |map, cx| map.snapshot(cx)); @@ -791,7 +813,7 @@ mod tests { .unwrap(); let font_size = 14.0; let map = cx.add_model(|cx| { - DisplayMap::new(buffer.clone(), tab_size, font_id, font_size, None, cx) + DisplayMap::new(buffer.clone(), tab_size, font_id, font_size, None, 1, 1, cx) }); buffer.update(cx, |buffer, cx| { buffer.edit( @@ -870,8 +892,8 @@ mod tests { .unwrap(); let font_size = 14.0; - let map = - cx.add_model(|cx| DisplayMap::new(buffer, tab_size, font_id, font_size, None, cx)); + let map = cx + .add_model(|cx| DisplayMap::new(buffer, tab_size, font_id, font_size, None, 1, 1, cx)); assert_eq!( cx.update(|cx| chunks(0..5, &map, &theme, cx)), vec![ @@ -958,8 +980,9 @@ mod tests { .unwrap(); let font_size = 16.0; - let map = cx - .add_model(|cx| DisplayMap::new(buffer, tab_size, font_id, font_size, Some(40.0), cx)); + let map = cx.add_model(|cx| { + DisplayMap::new(buffer, tab_size, font_id, font_size, Some(40.0), 1, 1, cx) + }); assert_eq!( cx.update(|cx| chunks(0..5, &map, &theme, cx)), [ @@ -1003,7 +1026,7 @@ mod tests { .unwrap(); let font_size = 14.0; let map = cx.add_model(|cx| { - DisplayMap::new(buffer.clone(), tab_size, font_id, font_size, None, cx) + DisplayMap::new(buffer.clone(), tab_size, font_id, font_size, None, 1, 1, cx) }); let map = map.update(cx, |map, cx| map.snapshot(cx)); @@ -1047,7 +1070,7 @@ mod tests { let font_size = 14.0; let map = cx.add_model(|cx| { - DisplayMap::new(buffer.clone(), tab_size, font_id, font_size, None, cx) + DisplayMap::new(buffer.clone(), tab_size, font_id, font_size, None, 1, 1, cx) }); let map = map.update(cx, |map, cx| map.snapshot(cx)); assert_eq!(map.text(), "✅ α\nβ \n🏀β γ"); @@ -1105,7 +1128,7 @@ mod tests { .unwrap(); let font_size = 14.0; let map = cx.add_model(|cx| { - DisplayMap::new(buffer.clone(), tab_size, font_id, font_size, None, cx) + DisplayMap::new(buffer.clone(), tab_size, font_id, font_size, None, 1, 1, cx) }); assert_eq!( map.update(cx, |map, cx| map.snapshot(cx)).max_point(), diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 96848016a9f7ff44268e21960ec49d9837fce730..465e46af213aa42db30508ed956e3192b47773ad 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -1,11 +1,12 @@ use super::wrap_map::{self, WrapEdit, WrapPoint, WrapSnapshot}; use crate::{Anchor, ToPoint as _}; -use collections::{HashMap, HashSet}; +use collections::{Bound, HashMap, HashSet}; use gpui::{AppContext, ElementBox}; -use language::Chunk; +use language::{BufferSnapshot, Chunk, Patch}; use parking_lot::Mutex; use std::{ - cmp::{self, Ordering, Reverse}, + cell::RefCell, + cmp::{self, Ordering}, fmt::Debug, ops::{Deref, Range}, sync::{ @@ -20,9 +21,11 @@ const NEWLINES: &'static [u8] = &[b'\n'; u8::MAX as usize]; pub struct BlockMap { next_block_id: AtomicUsize, - wrap_snapshot: Mutex, + wrap_snapshot: RefCell, blocks: Vec>, - transforms: Mutex>, + transforms: RefCell>, + buffer_header_height: u8, + excerpt_header_height: u8, } pub struct BlockMapWriter<'a>(&'a mut BlockMap); @@ -84,13 +87,46 @@ pub enum BlockDisposition { #[derive(Clone, Debug)] struct Transform { summary: TransformSummary, - block: Option, + block: Option, } -#[derive(Clone, Debug)] -pub struct AlignedBlock { - block: Arc, - column: u32, +#[derive(Clone)] +pub enum TransformBlock { + Custom(Arc), + ExcerptHeader { + buffer: BufferSnapshot, + range: Range, + height: u8, + starts_new_buffer: bool, + }, +} + +impl TransformBlock { + fn disposition(&self) -> BlockDisposition { + match self { + TransformBlock::Custom(block) => block.disposition, + TransformBlock::ExcerptHeader { .. } => BlockDisposition::Above, + } + } + + pub fn height(&self) -> u8 { + match self { + TransformBlock::Custom(block) => block.height, + TransformBlock::ExcerptHeader { height, .. } => *height, + } + } +} + +impl Debug for TransformBlock { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Custom(block) => f.debug_struct("Custom").field("block", block).finish(), + Self::ExcerptHeader { buffer, .. } => f + .debug_struct("ExcerptHeader") + .field("path", &buffer.path()) + .finish(), + } + } } #[derive(Clone, Debug, Default)] @@ -115,40 +151,71 @@ pub struct BlockBufferRows<'a> { } impl BlockMap { - pub fn new(wrap_snapshot: WrapSnapshot) -> Self { - Self { + pub fn new( + wrap_snapshot: WrapSnapshot, + buffer_header_height: u8, + excerpt_header_height: u8, + ) -> Self { + let row_count = wrap_snapshot.max_point().row() + 1; + let map = Self { next_block_id: AtomicUsize::new(0), blocks: Vec::new(), - transforms: Mutex::new(SumTree::from_item( - Transform::isomorphic(wrap_snapshot.text_summary().lines.row + 1), - &(), - )), - wrap_snapshot: Mutex::new(wrap_snapshot), - } + transforms: RefCell::new(SumTree::from_item(Transform::isomorphic(row_count), &())), + wrap_snapshot: RefCell::new(wrap_snapshot.clone()), + buffer_header_height, + excerpt_header_height, + }; + map.sync( + &wrap_snapshot, + Patch::new(vec![Edit { + old: 0..row_count, + new: 0..row_count, + }]), + ); + map } - pub fn read(&self, wrap_snapshot: WrapSnapshot, edits: Vec) -> BlockSnapshot { + pub fn read(&self, wrap_snapshot: WrapSnapshot, edits: Patch) -> BlockSnapshot { self.sync(&wrap_snapshot, edits); - *self.wrap_snapshot.lock() = wrap_snapshot.clone(); + *self.wrap_snapshot.borrow_mut() = wrap_snapshot.clone(); BlockSnapshot { wrap_snapshot, - transforms: self.transforms.lock().clone(), + transforms: self.transforms.borrow().clone(), } } - pub fn write(&mut self, wrap_snapshot: WrapSnapshot, edits: Vec) -> BlockMapWriter { + pub fn write(&mut self, wrap_snapshot: WrapSnapshot, edits: Patch) -> BlockMapWriter { self.sync(&wrap_snapshot, edits); - *self.wrap_snapshot.lock() = wrap_snapshot; + *self.wrap_snapshot.borrow_mut() = wrap_snapshot; BlockMapWriter(self) } - fn sync(&self, wrap_snapshot: &WrapSnapshot, edits: Vec) { + fn sync(&self, wrap_snapshot: &WrapSnapshot, mut edits: Patch) { + let buffer = wrap_snapshot.buffer_snapshot(); + + // Handle changing the last excerpt if it is empty. + if buffer.trailing_excerpt_update_count() + != self + .wrap_snapshot + .borrow() + .buffer_snapshot() + .trailing_excerpt_update_count() + { + let max_point = wrap_snapshot.max_point(); + let edit_start = wrap_snapshot.prev_row_boundary(max_point); + let edit_end = max_point.row() + 1; + edits = edits.compose([WrapEdit { + old: edit_start..edit_end, + new: edit_start..edit_end, + }]); + } + + let edits = edits.into_inner(); if edits.is_empty() { return; } - let buffer = wrap_snapshot.buffer_snapshot(); - let mut transforms = self.transforms.lock(); + let mut transforms = self.transforms.borrow_mut(); let mut new_transforms = SumTree::new(); let old_row_count = transforms.summary().input_rows; let new_row_count = wrap_snapshot.max_point().row() + 1; @@ -170,7 +237,7 @@ impl BlockMap { if transform .block .as_ref() - .map_or(false, |b| b.disposition.is_below()) + .map_or(false, |b| b.disposition().is_below()) { new_transforms.push(transform.clone(), &()); cursor.next(&()); @@ -195,7 +262,7 @@ impl BlockMap { if transform .block .as_ref() - .map_or(false, |b| b.disposition.is_below()) + .map_or(false, |b| b.disposition().is_below()) { cursor.next(&()); } else { @@ -216,7 +283,7 @@ impl BlockMap { if transform .block .as_ref() - .map_or(false, |b| b.disposition.is_below()) + .map_or(false, |b| b.disposition().is_below()) { cursor.next(&()); } else { @@ -233,28 +300,30 @@ impl BlockMap { // Find the blocks within this edited region. let new_buffer_start = wrap_snapshot.to_point(WrapPoint::new(new_start.0, 0), Bias::Left); - let start_anchor = buffer.anchor_before(new_buffer_start); + let start_bound = Bound::Included(new_buffer_start); let start_block_ix = match self.blocks[last_block_ix..].binary_search_by(|probe| { probe .position - .cmp(&start_anchor, &buffer) - .unwrap() + .to_point(&buffer) + .cmp(&new_buffer_start) .then(Ordering::Greater) }) { Ok(ix) | Err(ix) => last_block_ix + ix, }; + let end_bound; let end_block_ix = if new_end.0 > wrap_snapshot.max_point().row() { + end_bound = Bound::Unbounded; self.blocks.len() } else { let new_buffer_end = wrap_snapshot.to_point(WrapPoint::new(new_end.0, 0), Bias::Left); - let end_anchor = buffer.anchor_before(new_buffer_end); + end_bound = Bound::Excluded(new_buffer_end); match self.blocks[start_block_ix..].binary_search_by(|probe| { probe .position - .cmp(&end_anchor, &buffer) - .unwrap() + .to_point(&buffer) + .cmp(&new_buffer_end) .then(Ordering::Greater) }) { Ok(ix) | Err(ix) => start_block_ix + ix, @@ -268,7 +337,6 @@ impl BlockMap { .iter() .map(|block| { let mut position = block.position.to_point(&buffer); - let column = wrap_snapshot.from_point(position, Bias::Left).column(); match block.disposition { BlockDisposition::Above => position.column = 0, BlockDisposition::Below => { @@ -276,25 +344,57 @@ impl BlockMap { } } let position = wrap_snapshot.from_point(position, Bias::Left); - (position.row(), column, block.clone()) + (position.row(), TransformBlock::Custom(block.clone())) + }), + ); + blocks_in_edit.extend( + buffer + .excerpt_boundaries_in_range((start_bound, end_bound)) + .map(|excerpt_boundary| { + ( + wrap_snapshot + .from_point(Point::new(excerpt_boundary.row, 0), Bias::Left) + .row(), + TransformBlock::ExcerptHeader { + buffer: excerpt_boundary.buffer, + range: excerpt_boundary.range, + height: if excerpt_boundary.starts_new_buffer { + self.buffer_header_height + } else { + self.excerpt_header_height + }, + starts_new_buffer: excerpt_boundary.starts_new_buffer, + }, + ) }), ); - // When multiple blocks are on the same row, newer blocks appear above older - // blocks. This is arbitrary, but we currently rely on it in ProjectDiagnosticsEditor. - blocks_in_edit - .sort_by_key(|(row, _, block)| (*row, block.disposition, Reverse(block.id))); + // Place excerpt headers above custom blocks on the same row. + blocks_in_edit.sort_unstable_by(|(row_a, block_a), (row_b, block_b)| { + row_a.cmp(&row_b).then_with(|| match (block_a, block_b) { + ( + TransformBlock::ExcerptHeader { .. }, + TransformBlock::ExcerptHeader { .. }, + ) => Ordering::Equal, + (TransformBlock::ExcerptHeader { .. }, _) => Ordering::Less, + (_, TransformBlock::ExcerptHeader { .. }) => Ordering::Greater, + (TransformBlock::Custom(block_a), TransformBlock::Custom(block_b)) => block_a + .disposition + .cmp(&block_b.disposition) + .then_with(|| block_a.id.cmp(&block_b.id)), + }) + }); // For each of these blocks, insert a new isomorphic transform preceding the block, // and then insert the block itself. - for (block_row, column, block) in blocks_in_edit.drain(..) { - let insertion_row = match block.disposition { + for (block_row, block) in blocks_in_edit.drain(..) { + let insertion_row = match block.disposition() { BlockDisposition::Above => block_row, BlockDisposition::Below => block_row + 1, }; let extent_before_block = insertion_row - new_transforms.summary().input_rows; push_isomorphic(&mut new_transforms, extent_before_block); - new_transforms.push(Transform::block(block, column), &()); + new_transforms.push(Transform::block(block), &()); } old_end = WrapRow(old_end.0.min(old_row_count)); @@ -375,8 +475,8 @@ impl<'a> BlockMapWriter<'a> { blocks: impl IntoIterator>, ) -> Vec { let mut ids = Vec::new(); - let mut edits = Vec::>::new(); - let wrap_snapshot = &*self.0.wrap_snapshot.lock(); + let mut edits = Patch::default(); + let wrap_snapshot = &*self.0.wrap_snapshot.borrow(); let buffer = wrap_snapshot.buffer_snapshot(); for block in blocks { @@ -411,15 +511,10 @@ impl<'a> BlockMapWriter<'a> { }), ); - if let Err(edit_ix) = edits.binary_search_by_key(&start_row, |edit| edit.old.start) { - edits.insert( - edit_ix, - Edit { - old: start_row..end_row, - new: start_row..end_row, - }, - ); - } + edits = edits.compose([Edit { + old: start_row..end_row, + new: start_row..end_row, + }]); } self.0.sync(wrap_snapshot, edits); @@ -427,9 +522,9 @@ impl<'a> BlockMapWriter<'a> { } pub fn remove(&mut self, block_ids: HashSet) { - let wrap_snapshot = &*self.0.wrap_snapshot.lock(); + let wrap_snapshot = &*self.0.wrap_snapshot.borrow(); let buffer = wrap_snapshot.buffer_snapshot(); - let mut edits = Vec::new(); + let mut edits = Patch::default(); let mut last_block_buffer_row = None; self.0.blocks.retain(|block| { if block_ids.contains(&block.id) { @@ -524,7 +619,7 @@ impl BlockSnapshot { pub fn blocks_in_range<'a>( &'a self, rows: Range, - ) -> impl Iterator { + ) -> impl Iterator { let mut cursor = self.transforms.cursor::(); cursor.seek(&BlockRow(rows.start), Bias::Right, &()); std::iter::from_fn(move || { @@ -644,7 +739,7 @@ impl BlockSnapshot { let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(); cursor.seek(&BlockRow(block_point.row), Bias::Right, &()); if let Some(transform) = cursor.item() { - match transform.block.as_ref().map(|b| b.disposition) { + match transform.block.as_ref().map(|b| b.disposition()) { Some(BlockDisposition::Above) => WrapPoint::new(cursor.start().1 .0, 0), Some(BlockDisposition::Below) => { let wrap_row = cursor.start().1 .0 - 1; @@ -673,13 +768,13 @@ impl Transform { } } - fn block(block: Arc, column: u32) -> Self { + fn block(block: TransformBlock) -> Self { Self { summary: TransformSummary { input_rows: 0, - output_rows: block.height as u32, + output_rows: block.height() as u32, }, - block: Some(AlignedBlock { block, column }), + block: Some(block), } } @@ -809,37 +904,21 @@ impl BlockDisposition { } } -impl AlignedBlock { - pub fn height(&self) -> u32 { - self.height as u32 - } +impl<'a> Deref for BlockContext<'a> { + type Target = AppContext; - pub fn column(&self) -> u32 { - self.column + fn deref(&self) -> &Self::Target { + &self.cx } +} +impl Block { pub fn render(&self, cx: &BlockContext) -> ElementBox { self.render.lock()(cx) } pub fn position(&self) -> &Anchor { - &self.block.position - } -} - -impl Deref for AlignedBlock { - type Target = Block; - - fn deref(&self) -> &Self::Target { - self.block.as_ref() - } -} - -impl<'a> Deref for BlockContext<'a> { - type Target = AppContext; - - fn deref(&self) -> &Self::Target { - &self.cx + &self.position } } @@ -911,9 +990,9 @@ mod tests { let (fold_map, folds_snapshot) = FoldMap::new(buffer_snapshot.clone()); let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), 1); let (wrap_map, wraps_snapshot) = WrapMap::new(tabs_snapshot, font_id, 14.0, None, cx); - let mut block_map = BlockMap::new(wraps_snapshot.clone()); + let mut block_map = BlockMap::new(wraps_snapshot.clone(), 1, 1); - let mut writer = block_map.write(wraps_snapshot.clone(), vec![]); + let mut writer = block_map.write(wraps_snapshot.clone(), Default::default()); writer.insert(vec![ BlockProperties { position: buffer_snapshot.anchor_after(Point::new(1, 0)), @@ -935,15 +1014,15 @@ mod tests { }, ]); - let snapshot = block_map.read(wraps_snapshot, vec![]); + let snapshot = block_map.read(wraps_snapshot, Default::default()); assert_eq!(snapshot.text(), "aaa\n\n\n\nbbb\nccc\nddd\n\n\n"); let blocks = snapshot .blocks_in_range(0..8) .map(|(start_row, block)| { + let block = block.as_custom().unwrap(); ( - start_row..start_row + block.height(), - block.column(), + start_row..start_row + block.height as u32, block .render(&BlockContext { cx, @@ -965,9 +1044,9 @@ mod tests { assert_eq!( blocks, &[ - (1..3, 2, "block 2".to_string()), - (3..4, 0, "block 1".to_string()), - (7..10, 3, "block 3".to_string()), + (1..2, "block 1".to_string()), + (2..4, "block 2".to_string()), + (7..10, "block 3".to_string()), ] ); @@ -1089,9 +1168,9 @@ mod tests { let (_, folds_snapshot) = FoldMap::new(buffer_snapshot.clone()); let (_, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), 1); let (_, wraps_snapshot) = WrapMap::new(tabs_snapshot, font_id, 14.0, Some(60.), cx); - let mut block_map = BlockMap::new(wraps_snapshot.clone()); + let mut block_map = BlockMap::new(wraps_snapshot.clone(), 1, 1); - let mut writer = block_map.write(wraps_snapshot.clone(), vec![]); + let mut writer = block_map.write(wraps_snapshot.clone(), Default::default()); writer.insert(vec![ BlockProperties { position: buffer_snapshot.anchor_after(Point::new(1, 12)), @@ -1109,7 +1188,7 @@ mod tests { // Blocks with an 'above' disposition go above their corresponding buffer line. // Blocks with a 'below' disposition go below their corresponding buffer line. - let snapshot = block_map.read(wraps_snapshot, vec![]); + let snapshot = block_map.read(wraps_snapshot, Default::default()); assert_eq!( snapshot.text(), "one two \nthree\n\nfour five \nsix\n\nseven \neight" @@ -1134,8 +1213,11 @@ mod tests { .select_font(family_id, &Default::default()) .unwrap(); let font_size = 14.0; + let buffer_start_header_height = rng.gen_range(1..=5); + let excerpt_header_height = rng.gen_range(1..=5); log::info!("Wrap width: {:?}", wrap_width); + log::info!("Excerpt Header Height: {:?}", excerpt_header_height); let buffer = if rng.gen() { let len = rng.gen_range(0..10); @@ -1151,8 +1233,12 @@ mod tests { let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), tab_size); let (wrap_map, wraps_snapshot) = WrapMap::new(tabs_snapshot, font_id, font_size, wrap_width, cx); - let mut block_map = BlockMap::new(wraps_snapshot); - let mut expected_blocks = Vec::new(); + let mut block_map = BlockMap::new( + wraps_snapshot.clone(), + buffer_start_header_height, + excerpt_header_height, + ); + let mut custom_blocks = Vec::new(); for _ in 0..operations { let mut buffer_edits = Vec::new(); @@ -1205,15 +1291,15 @@ mod tests { let mut block_map = block_map.write(wraps_snapshot, wrap_edits); let block_ids = block_map.insert(block_properties.clone()); for (block_id, props) in block_ids.into_iter().zip(block_properties) { - expected_blocks.push((block_id, props)); + custom_blocks.push((block_id, props)); } } - 40..=59 if !expected_blocks.is_empty() => { - let block_count = rng.gen_range(1..=4.min(expected_blocks.len())); + 40..=59 if !custom_blocks.is_empty() => { + let block_count = rng.gen_range(1..=4.min(custom_blocks.len())); let block_ids_to_remove = (0..block_count) .map(|_| { - expected_blocks - .remove(rng.gen_range(0..expected_blocks.len())) + custom_blocks + .remove(rng.gen_range(0..custom_blocks.len())) .0 }) .collect(); @@ -1229,9 +1315,9 @@ mod tests { } _ => { buffer.update(cx, |buffer, cx| { - let edit_count = rng.gen_range(1..=5); + let mutation_count = rng.gen_range(1..=5); let subscription = buffer.subscribe(); - buffer.randomly_edit(&mut rng, edit_count, cx); + buffer.randomly_mutate(&mut rng, mutation_count, cx); buffer_snapshot = buffer.snapshot(cx); buffer_edits.extend(subscription.consume()); log::info!("buffer text: {:?}", buffer_snapshot.text()); @@ -1251,36 +1337,46 @@ mod tests { ); log::info!("blocks text: {:?}", blocks_snapshot.text()); - let mut sorted_blocks = expected_blocks - .iter() - .cloned() - .map(|(id, block)| { - let mut position = block.position.to_point(&buffer_snapshot); - let column = wraps_snapshot.from_point(position, Bias::Left).column(); - match block.disposition { - BlockDisposition::Above => { - position.column = 0; - } - BlockDisposition::Below => { - position.column = buffer_snapshot.line_len(position.row); - } - }; - let row = wraps_snapshot.from_point(position, Bias::Left).row(); + let mut expected_blocks = Vec::new(); + expected_blocks.extend(custom_blocks.iter().map(|(id, block)| { + let mut position = block.position.to_point(&buffer_snapshot); + match block.disposition { + BlockDisposition::Above => { + position.column = 0; + } + BlockDisposition::Below => { + position.column = buffer_snapshot.line_len(position.row); + } + }; + let row = wraps_snapshot.from_point(position, Bias::Left).row(); + ( + row, + ExpectedBlock::Custom { + disposition: block.disposition, + id: *id, + height: block.height, + }, + ) + })); + expected_blocks.extend(buffer_snapshot.excerpt_boundaries_in_range(0..).map( + |boundary| { + let position = + wraps_snapshot.from_point(Point::new(boundary.row, 0), Bias::Left); ( - id, - BlockProperties { - position: BlockPoint::new(row, column), - height: block.height, - disposition: block.disposition, - render: block.render.clone(), + position.row(), + ExpectedBlock::ExcerptHeader { + height: if boundary.starts_new_buffer { + buffer_start_header_height + } else { + excerpt_header_height + }, + starts_new_buffer: boundary.starts_new_buffer, }, ) - }) - .collect::>(); - sorted_blocks.sort_unstable_by_key(|(id, block)| { - (block.position.row, block.disposition, Reverse(*id)) - }); - let mut sorted_blocks_iter = sorted_blocks.iter().peekable(); + }, + )); + expected_blocks.sort_unstable(); + let mut sorted_blocks_iter = expected_blocks.into_iter().peekable(); let input_buffer_rows = buffer_snapshot.buffer_rows(0).collect::>(); let mut expected_buffer_rows = Vec::new(); @@ -1297,16 +1393,17 @@ mod tests { .to_point(WrapPoint::new(row, 0), Bias::Left) .row as usize]; - while let Some((block_id, block)) = sorted_blocks_iter.peek() { - if block.position.row == row && block.disposition == BlockDisposition::Above { + while let Some((block_row, block)) = sorted_blocks_iter.peek() { + if *block_row == row && block.disposition() == BlockDisposition::Above { + let (_, block) = sorted_blocks_iter.next().unwrap(); + let height = block.height() as usize; expected_block_positions - .push((expected_text.matches('\n').count() as u32, *block_id)); - let text = "\n".repeat(block.height as usize); + .push((expected_text.matches('\n').count() as u32, block)); + let text = "\n".repeat(height); expected_text.push_str(&text); - for _ in 0..block.height { + for _ in 0..height { expected_buffer_rows.push(None); } - sorted_blocks_iter.next(); } else { break; } @@ -1316,16 +1413,17 @@ mod tests { expected_buffer_rows.push(if soft_wrapped { None } else { buffer_row }); expected_text.push_str(input_line); - while let Some((block_id, block)) = sorted_blocks_iter.peek() { - if block.position.row == row && block.disposition == BlockDisposition::Below { + while let Some((block_row, block)) = sorted_blocks_iter.peek() { + if *block_row == row && block.disposition() == BlockDisposition::Below { + let (_, block) = sorted_blocks_iter.next().unwrap(); + let height = block.height() as usize; expected_block_positions - .push((expected_text.matches('\n').count() as u32 + 1, *block_id)); - let text = "\n".repeat(block.height as usize); + .push((expected_text.matches('\n').count() as u32 + 1, block)); + let text = "\n".repeat(height); expected_text.push_str(&text); - for _ in 0..block.height { + for _ in 0..height { expected_buffer_rows.push(None); } - sorted_blocks_iter.next(); } else { break; } @@ -1337,7 +1435,7 @@ mod tests { for start_row in 0..expected_row_count { let expected_text = expected_lines[start_row..].join("\n"); let actual_text = blocks_snapshot - .chunks(start_row as u32..expected_row_count as u32, false) + .chunks(start_row as u32..blocks_snapshot.max_point().row + 1, false) .map(|chunk| chunk.text) .collect::(); assert_eq!( @@ -1356,7 +1454,7 @@ mod tests { assert_eq!( blocks_snapshot .blocks_in_range(0..(expected_row_count as u32)) - .map(|(row, block)| (row, block.id)) + .map(|(row, block)| (row, block.clone().into())) .collect::>(), expected_block_positions ); @@ -1435,6 +1533,64 @@ mod tests { } } } + + #[derive(Debug, Eq, PartialEq, Ord, PartialOrd)] + enum ExpectedBlock { + ExcerptHeader { + height: u8, + starts_new_buffer: bool, + }, + Custom { + disposition: BlockDisposition, + id: BlockId, + height: u8, + }, + } + + impl ExpectedBlock { + fn height(&self) -> u8 { + match self { + ExpectedBlock::ExcerptHeader { height, .. } => *height, + ExpectedBlock::Custom { height, .. } => *height, + } + } + + fn disposition(&self) -> BlockDisposition { + match self { + ExpectedBlock::ExcerptHeader { .. } => BlockDisposition::Above, + ExpectedBlock::Custom { disposition, .. } => *disposition, + } + } + } + + impl From for ExpectedBlock { + fn from(block: TransformBlock) -> Self { + match block { + TransformBlock::Custom(block) => ExpectedBlock::Custom { + id: block.id, + disposition: block.disposition, + height: block.height, + }, + TransformBlock::ExcerptHeader { + height, + starts_new_buffer, + .. + } => ExpectedBlock::ExcerptHeader { + height, + starts_new_buffer, + }, + } + } + } + } + + impl TransformBlock { + fn as_custom(&self) -> Option<&Block> { + match self { + TransformBlock::Custom(block) => Some(block), + TransformBlock::ExcerptHeader { .. } => None, + } + } } impl BlockSnapshot { diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index a23f6ad01022c4775f4bc60686873606885c9abc..2866ae8f636122ef0cda82bbd8d8cdf52ea176d7 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -107,14 +107,23 @@ impl<'a> FoldMapWriter<'a> { let buffer = self.0.buffer.lock().clone(); for range in ranges.into_iter() { let range = range.start.to_offset(&buffer)..range.end.to_offset(&buffer); - if range.start != range.end { - let fold = Fold(buffer.anchor_after(range.start)..buffer.anchor_before(range.end)); - folds.push(fold); - edits.push(text::Edit { - old: range.clone(), - new: range, - }); + + // Ignore any empty ranges. + if range.start == range.end { + continue; + } + + // For now, ignore any ranges that span an excerpt boundary. + let fold = Fold(buffer.anchor_after(range.start)..buffer.anchor_before(range.end)); + if fold.0.start.excerpt_id() != fold.0.end.excerpt_id() { + continue; } + + folds.push(fold); + edits.push(text::Edit { + old: range.clone(), + new: range, + }); } folds.sort_unstable_by(|a, b| sum_tree::SeekTarget::cmp(a, b, &buffer)); @@ -268,6 +277,8 @@ impl FoldMap { let mut buffer = self.buffer.lock(); if buffer.parse_count() != new_buffer.parse_count() || buffer.diagnostics_update_count() != new_buffer.diagnostics_update_count() + || buffer.trailing_excerpt_update_count() + != new_buffer.trailing_excerpt_update_count() { self.version.fetch_add(1, SeqCst); } @@ -1281,7 +1292,7 @@ mod tests { _ => buffer.update(cx, |buffer, cx| { let subscription = buffer.subscribe(); let edit_count = rng.gen_range(1..=5); - buffer.randomly_edit(&mut rng, edit_count, cx); + buffer.randomly_mutate(&mut rng, edit_count, cx); buffer_snapshot = buffer.snapshot(cx); let edits = subscription.consume().into_inner(); log::info!("editing {:?}", edits); @@ -1407,7 +1418,6 @@ mod tests { fold_row = snapshot .clip_point(FoldPoint::new(fold_row, 0), Bias::Right) .row(); - eprintln!("fold_row: {} of {}", fold_row, expected_buffer_rows.len()); assert_eq!( snapshot.buffer_rows(fold_row).collect::>(), expected_buffer_rows[(fold_row as usize)..], diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index 1d5e64c8a565798948a46c08b4b32dbaa08b96e7..566efe7f4f99f0eaac91cfa53e41fb8c0cbe05f9 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -106,7 +106,7 @@ impl WrapMap { tab_snapshot: TabSnapshot, edits: Vec, cx: &mut ModelContext, - ) -> (WrapSnapshot, Vec) { + ) -> (WrapSnapshot, Patch) { if self.wrap_width.is_some() { self.pending_edits.push_back((tab_snapshot, edits)); self.flush_edits(cx); @@ -117,10 +117,7 @@ impl WrapMap { self.snapshot.interpolated = false; } - ( - self.snapshot.clone(), - mem::take(&mut self.edits_since_sync).into_inner(), - ) + (self.snapshot.clone(), mem::take(&mut self.edits_since_sync)) } pub fn set_font(&mut self, font_id: FontId, font_size: f32, cx: &mut ModelContext) { @@ -588,10 +585,6 @@ impl WrapSnapshot { } } - pub fn text_summary(&self) -> TextSummary { - self.transforms.summary().output - } - pub fn max_point(&self) -> WrapPoint { WrapPoint(self.transforms.summary().output.lines) } @@ -955,10 +948,6 @@ impl WrapPoint { &mut self.0.row } - pub fn column(&self) -> u32 { - self.0.column - } - pub fn column_mut(&mut self) -> &mut u32 { &mut self.0.column } @@ -1118,7 +1107,7 @@ mod tests { buffer.update(&mut cx, |buffer, cx| { let subscription = buffer.subscribe(); let edit_count = rng.gen_range(1..=5); - buffer.randomly_edit(&mut rng, edit_count, cx); + buffer.randomly_mutate(&mut rng, edit_count, cx); buffer_snapshot = buffer.snapshot(cx); buffer_edits.extend(subscription.consume()); }); diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index b0f7bbe478ab50ec9aa8b6beac35473f9b3acd04..f2ce168a09559ea40fc283c1f5c7171f897e4c4a 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -10,7 +10,7 @@ mod test; use aho_corasick::AhoCorasick; use anyhow::Result; use clock::ReplicaId; -use collections::{BTreeMap, HashMap, HashSet}; +use collections::{BTreeMap, Bound, HashMap, HashSet}; pub use display_map::DisplayPoint; use display_map::*; pub use element::*; @@ -27,19 +27,20 @@ use gpui::{ text_layout, AppContext, ClipboardItem, Element, ElementBox, Entity, ModelHandle, MutableAppContext, RenderContext, Task, View, ViewContext, WeakModelHandle, WeakViewHandle, }; -use items::BufferItemHandle; +use items::{BufferItemHandle, MultiBufferItemHandle}; use itertools::Itertools as _; use language::{ - AnchorRangeExt as _, BracketPair, Buffer, Completion, CompletionLabel, Diagnostic, + AnchorRangeExt as _, BracketPair, Buffer, CodeAction, Completion, CompletionLabel, Diagnostic, DiagnosticSeverity, Language, Point, Selection, SelectionGoal, TransactionId, }; use multi_buffer::MultiBufferChunks; pub use multi_buffer::{ - char_kind, Anchor, AnchorRangeExt, CharKind, ExcerptId, ExcerptProperties, MultiBuffer, - MultiBufferSnapshot, ToOffset, ToPoint, + char_kind, Anchor, AnchorRangeExt, CharKind, ExcerptId, MultiBuffer, MultiBufferSnapshot, + ToOffset, ToPoint, }; use ordered_float::OrderedFloat; use postage::watch; +use project::Project; use serde::{Deserialize, Serialize}; use smallvec::SmallVec; use smol::Timer; @@ -124,7 +125,9 @@ action!(FoldSelectedRanges); action!(Scroll, Vector2F); action!(Select, SelectPhase); action!(ShowCompletions); +action!(ToggleCodeActions, bool); action!(ConfirmCompletion, Option); +action!(ConfirmCodeAction, Option); pub fn init(cx: &mut MutableAppContext, path_openers: &mut Vec>) { path_openers.push(Box::new(items::BufferOpener)); @@ -143,10 +146,19 @@ pub fn init(cx: &mut MutableAppContext, path_openers: &mut Vec), Line(Range), @@ -407,6 +416,7 @@ pub struct Editor { scroll_top_anchor: Option, autoscroll_request: Option, build_settings: BuildSettings, + project: Option>, focused: bool, show_local_cursors: bool, blink_epoch: usize, @@ -417,9 +427,11 @@ pub struct Editor { highlighted_rows: Option>, highlighted_ranges: BTreeMap>)>, nav_history: Option, - completion_state: Option, + context_menu: Option, completion_tasks: Vec<(CompletionId, Task>)>, next_completion_id: CompletionId, + available_code_actions: Option<(ModelHandle, Arc<[CodeAction]>)>, + code_actions_task: Option>, } pub struct EditorSnapshot { @@ -431,7 +443,8 @@ pub struct EditorSnapshot { scroll_top_anchor: Option, } -struct PendingSelection { +#[derive(Clone)] +pub struct PendingSelection { selection: Selection, mode: SelectMode, } @@ -459,17 +472,157 @@ struct SnippetState { struct InvalidationStack(Vec); -struct CompletionState { +enum ContextMenu { + Completions(CompletionsMenu), + CodeActions(CodeActionsMenu), +} + +impl ContextMenu { + fn select_prev(&mut self, cx: &mut ViewContext) -> bool { + if self.visible() { + match self { + ContextMenu::Completions(menu) => menu.select_prev(cx), + ContextMenu::CodeActions(menu) => menu.select_prev(cx), + } + true + } else { + false + } + } + + fn select_next(&mut self, cx: &mut ViewContext) -> bool { + if self.visible() { + match self { + ContextMenu::Completions(menu) => menu.select_next(cx), + ContextMenu::CodeActions(menu) => menu.select_next(cx), + } + true + } else { + false + } + } + + fn visible(&self) -> bool { + match self { + ContextMenu::Completions(menu) => menu.visible(), + ContextMenu::CodeActions(menu) => menu.visible(), + } + } + + fn render( + &self, + cursor_position: DisplayPoint, + build_settings: BuildSettings, + cx: &AppContext, + ) -> (DisplayPoint, ElementBox) { + match self { + ContextMenu::Completions(menu) => (cursor_position, menu.render(build_settings, cx)), + ContextMenu::CodeActions(menu) => menu.render(cursor_position, build_settings, cx), + } + } +} + +struct CompletionsMenu { id: CompletionId, initial_position: Anchor, - completions: Arc<[Completion]>, + buffer: ModelHandle, + completions: Arc<[Completion]>, match_candidates: Vec, matches: Arc<[StringMatch]>, selected_item: usize, list: UniformListState, } -impl CompletionState { +impl CompletionsMenu { + fn select_prev(&mut self, cx: &mut ViewContext) { + if self.selected_item > 0 { + self.selected_item -= 1; + self.list.scroll_to(ScrollTarget::Show(self.selected_item)); + } + cx.notify(); + } + + fn select_next(&mut self, cx: &mut ViewContext) { + if self.selected_item + 1 < self.matches.len() { + self.selected_item += 1; + self.list.scroll_to(ScrollTarget::Show(self.selected_item)); + } + cx.notify(); + } + + fn visible(&self) -> bool { + !self.matches.is_empty() + } + + fn render(&self, build_settings: BuildSettings, cx: &AppContext) -> ElementBox { + enum CompletionTag {} + + let settings = build_settings(cx); + let completions = self.completions.clone(); + let matches = self.matches.clone(); + let selected_item = self.selected_item; + UniformList::new(self.list.clone(), matches.len(), move |range, items, cx| { + let settings = build_settings(cx); + let start_ix = range.start; + for (ix, mat) in matches[range].iter().enumerate() { + let completion = &completions[mat.candidate_id]; + let item_ix = start_ix + ix; + items.push( + MouseEventHandler::new::( + mat.candidate_id, + cx, + |state, _| { + let item_style = if item_ix == selected_item { + settings.style.autocomplete.selected_item + } else if state.hovered { + settings.style.autocomplete.hovered_item + } else { + settings.style.autocomplete.item + }; + + Text::new(completion.label.text.clone(), settings.style.text.clone()) + .with_soft_wrap(false) + .with_highlights(combine_syntax_and_fuzzy_match_highlights( + &completion.label.text, + settings.style.text.color.into(), + styled_runs_for_completion_label( + &completion.label, + settings.style.text.color, + &settings.style.syntax, + ), + &mat.positions, + )) + .contained() + .with_style(item_style) + .boxed() + }, + ) + .with_cursor_style(CursorStyle::PointingHand) + .on_mouse_down(move |cx| { + cx.dispatch_action(ConfirmCompletion(Some(item_ix))); + }) + .boxed(), + ); + } + }) + .with_width_from_item( + self.matches + .iter() + .enumerate() + .max_by_key(|(_, mat)| { + self.completions[mat.candidate_id] + .label + .text + .chars() + .count() + }) + .map(|(ix, _)| ix), + ) + .contained() + .with_style(settings.style.autocomplete.container) + .boxed() + } + pub async fn filter(&mut self, query: Option<&str>, executor: Arc) { let mut matches = if let Some(query) = query { fuzzy::match_strings( @@ -511,6 +664,94 @@ impl CompletionState { } } +#[derive(Clone)] +struct CodeActionsMenu { + actions: Arc<[CodeAction]>, + buffer: ModelHandle, + selected_item: usize, + list: UniformListState, + deployed_from_indicator: bool, +} + +impl CodeActionsMenu { + fn select_prev(&mut self, cx: &mut ViewContext) { + if self.selected_item > 0 { + self.selected_item -= 1; + cx.notify() + } + } + + fn select_next(&mut self, cx: &mut ViewContext) { + if self.selected_item + 1 < self.actions.len() { + self.selected_item += 1; + cx.notify() + } + } + + fn visible(&self) -> bool { + !self.actions.is_empty() + } + + fn render( + &self, + mut cursor_position: DisplayPoint, + build_settings: BuildSettings, + cx: &AppContext, + ) -> (DisplayPoint, ElementBox) { + enum ActionTag {} + + let settings = build_settings(cx); + let actions = self.actions.clone(); + let selected_item = self.selected_item; + let element = + UniformList::new(self.list.clone(), actions.len(), move |range, items, cx| { + let settings = build_settings(cx); + let start_ix = range.start; + for (ix, action) in actions[range].iter().enumerate() { + let item_ix = start_ix + ix; + items.push( + MouseEventHandler::new::(item_ix, cx, |state, _| { + let item_style = if item_ix == selected_item { + settings.style.autocomplete.selected_item + } else if state.hovered { + settings.style.autocomplete.hovered_item + } else { + settings.style.autocomplete.item + }; + + Text::new(action.lsp_action.title.clone(), settings.style.text.clone()) + .with_soft_wrap(false) + .contained() + .with_style(item_style) + .boxed() + }) + .with_cursor_style(CursorStyle::PointingHand) + .on_mouse_down(move |cx| { + cx.dispatch_action(ConfirmCodeAction(Some(item_ix))); + }) + .boxed(), + ); + } + }) + .with_width_from_item( + self.actions + .iter() + .enumerate() + .max_by_key(|(_, action)| action.lsp_action.title.chars().count()) + .map(|(ix, _)| ix), + ) + .contained() + .with_style(settings.style.autocomplete.container) + .boxed(); + + if self.deployed_from_indicator { + *cursor_position.column_mut() = 0; + } + + (cursor_position, element) + } +} + #[derive(Debug)] struct ActiveDiagnosticGroup { primary_range: Range, @@ -534,7 +775,7 @@ impl Editor { pub fn single_line(build_settings: BuildSettings, cx: &mut ViewContext) -> Self { let buffer = cx.add_model(|cx| Buffer::new(0, String::new(), cx)); let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); - let mut view = Self::for_buffer(buffer, build_settings, cx); + let mut view = Self::for_buffer(buffer, build_settings, None, cx); view.mode = EditorMode::SingleLine; view } @@ -546,7 +787,7 @@ impl Editor { ) -> Self { let buffer = cx.add_model(|cx| Buffer::new(0, String::new(), cx)); let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); - let mut view = Self::for_buffer(buffer, build_settings, cx); + let mut view = Self::for_buffer(buffer, build_settings, None, cx); view.mode = EditorMode::AutoHeight { max_lines }; view } @@ -554,13 +795,19 @@ impl Editor { pub fn for_buffer( buffer: ModelHandle, build_settings: BuildSettings, + project: Option>, cx: &mut ViewContext, ) -> Self { - Self::new(buffer, build_settings, cx) + Self::new(buffer, build_settings, project, cx) } pub fn clone(&self, cx: &mut ViewContext) -> Self { - let mut clone = Self::new(self.buffer.clone(), self.build_settings.clone(), cx); + let mut clone = Self::new( + self.buffer.clone(), + self.build_settings.clone(), + self.project.clone(), + cx, + ); clone.scroll_position = self.scroll_position; clone.scroll_top_anchor = self.scroll_top_anchor.clone(); clone.nav_history = self @@ -573,6 +820,7 @@ impl Editor { pub fn new( buffer: ModelHandle, build_settings: BuildSettings, + project: Option>, cx: &mut ViewContext, ) -> Self { let settings = build_settings(cx); @@ -583,6 +831,8 @@ impl Editor { settings.style.text.font_id, settings.style.text.font_size, None, + 2, + 1, cx, ) }); @@ -596,9 +846,18 @@ impl Editor { buffer, display_map, selections: Arc::from([]), - pending_selection: None, + pending_selection: Some(PendingSelection { + selection: Selection { + id: 0, + start: Anchor::min(), + end: Anchor::min(), + reversed: false, + goal: SelectionGoal::None, + }, + mode: SelectMode::Character, + }), columnar_selection_tail: None, - next_selection_id: 0, + next_selection_id: 1, add_selections_state: None, select_next_state: None, selection_history: Default::default(), @@ -607,6 +866,7 @@ impl Editor { select_larger_syntax_node_stack: Vec::new(), active_diagnostics: None, build_settings, + project, scroll_position: Vector2F::zero(), scroll_top_anchor: None, autoscroll_request: None, @@ -620,18 +880,13 @@ impl Editor { highlighted_rows: None, highlighted_ranges: Default::default(), nav_history: None, - completion_state: None, + context_menu: None, completion_tasks: Default::default(), next_completion_id: 0, + available_code_actions: Default::default(), + code_actions_task: Default::default(), }; - let selection = Selection { - id: post_inc(&mut this.next_selection_id), - start: 0, - end: 0, - reversed: false, - goal: SelectionGoal::None, - }; - this.update_selections(vec![selection], None, cx); + this.end_selection(cx); this } @@ -654,16 +909,7 @@ impl Editor { } pub fn title(&self, cx: &AppContext) -> String { - let filename = self - .buffer() - .read(cx) - .file(cx) - .map(|file| file.file_name(cx)); - if let Some(name) = filename { - name.to_string_lossy().into() - } else { - "untitled".into() - } + self.buffer().read(cx).title(cx) } pub fn snapshot(&mut self, cx: &mut MutableAppContext) -> EditorSnapshot { @@ -880,7 +1126,7 @@ impl Editor { } fn select(&mut self, Select(phase): &Select, cx: &mut ViewContext) { - self.hide_completions(cx); + self.hide_context_menu(cx); match phase { SelectPhase::Begin { @@ -919,7 +1165,7 @@ impl Editor { let position = position.to_offset(&display_map, Bias::Left); let tail_anchor = display_map.buffer_snapshot.anchor_before(tail); - let pending = self.pending_selection.as_mut().unwrap(); + let mut pending = self.pending_selection.clone().unwrap(); if position >= tail { pending.selection.start = tail_anchor.clone(); @@ -934,6 +1180,8 @@ impl Editor { } _ => {} } + + self.set_selections(self.selections.clone(), Some(pending), cx); } fn begin_selection( @@ -950,7 +1198,7 @@ impl Editor { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let buffer = &display_map.buffer_snapshot; - let newest_selection = self.newest_anchor_selection().unwrap().clone(); + let newest_selection = self.newest_anchor_selection().clone(); let start; let end; @@ -997,16 +1245,23 @@ impl Editor { goal: SelectionGoal::None, }; - if !add { - self.update_selections::(Vec::new(), None, cx); - } else if click_count > 1 { - // Remove the newest selection since it was only added as part of this multi-click. - let mut selections = self.local_selections(cx); - selections.retain(|selection| selection.id != newest_selection.id); - self.update_selections::(selections, None, cx) + let mut selections; + if add { + selections = self.selections.clone(); + // Remove the newest selection if it was added due to a previous mouse up + // within this multi-click. + if click_count > 1 { + selections = self + .selections + .iter() + .filter(|selection| selection.id != newest_selection.id) + .cloned() + .collect(); + } + } else { + selections = Arc::from([]); } - - self.pending_selection = Some(PendingSelection { selection, mode }); + self.set_selections(selections, Some(PendingSelection { selection, mode }), cx); cx.notify(); } @@ -1049,14 +1304,14 @@ impl Editor { if let Some(tail) = self.columnar_selection_tail.as_ref() { let tail = tail.to_display_point(&display_map); self.select_columns(tail, position, overshoot, &display_map, cx); - } else if let Some(PendingSelection { selection, mode }) = self.pending_selection.as_mut() { + } else if let Some(mut pending) = self.pending_selection.clone() { let buffer = self.buffer.read(cx).snapshot(cx); let head; let tail; - match mode { + match &pending.mode { SelectMode::Character => { head = position.to_point(&display_map); - tail = selection.tail().to_point(&buffer); + tail = pending.selection.tail().to_point(&buffer); } SelectMode::Word(original_range) => { let original_display_range = original_range.start.to_display_point(&display_map) @@ -1112,14 +1367,15 @@ impl Editor { }; if head < tail { - selection.start = buffer.anchor_before(head); - selection.end = buffer.anchor_before(tail); - selection.reversed = true; + pending.selection.start = buffer.anchor_before(head); + pending.selection.end = buffer.anchor_before(tail); + pending.selection.reversed = true; } else { - selection.start = buffer.anchor_before(tail); - selection.end = buffer.anchor_before(head); - selection.reversed = false; + pending.selection.start = buffer.anchor_before(tail); + pending.selection.end = buffer.anchor_before(head); + pending.selection.reversed = false; } + self.set_selections(self.selections.clone(), Some(pending), cx); } else { log::error!("update_selection dispatched with no pending selection"); return; @@ -1182,7 +1438,7 @@ impl Editor { } pub fn cancel(&mut self, _: &Cancel, cx: &mut ViewContext) { - if self.hide_completions(cx).is_some() { + if self.hide_context_menu(cx).is_some() { return; } @@ -1197,18 +1453,13 @@ impl Editor { if self.active_diagnostics.is_some() { self.dismiss_diagnostics(cx); - } else if let Some(PendingSelection { selection, .. }) = self.pending_selection.take() { - let buffer = self.buffer.read(cx).snapshot(cx); - let selection = Selection { - id: selection.id, - start: selection.start.to_point(&buffer), - end: selection.end.to_point(&buffer), - reversed: selection.reversed, - goal: selection.goal, - }; - if self.local_selections::(cx).is_empty() { - self.update_selections(vec![selection], Some(Autoscroll::Fit), cx); + } else if let Some(pending) = self.pending_selection.clone() { + let mut selections = self.selections.clone(); + if selections.is_empty() { + selections = Arc::from([pending.selection]); } + self.set_selections(selections, None, cx); + self.request_autoscroll(Autoscroll::Fit, cx); } else { let buffer = self.buffer.read(cx).snapshot(cx); let mut oldest_selection = self.oldest_selection::(&buffer); @@ -1502,16 +1753,15 @@ impl Editor { } fn trigger_completion_on_input(&mut self, text: &str, cx: &mut ViewContext) { - if let Some(selection) = self.newest_anchor_selection() { - if self - .buffer - .read(cx) - .is_completion_trigger(selection.head(), text, cx) - { - self.show_completions(&ShowCompletions, cx); - } else { - self.hide_completions(cx); - } + let selection = self.newest_anchor_selection(); + if self + .buffer + .read(cx) + .is_completion_trigger(selection.head(), text, cx) + { + self.show_completions(&ShowCompletions, cx); + } else { + self.hide_context_menu(cx); } } @@ -1656,23 +1906,37 @@ impl Editor { } fn show_completions(&mut self, _: &ShowCompletions, cx: &mut ViewContext) { - let position = if let Some(selection) = self.newest_anchor_selection() { - selection.head() + let project = if let Some(project) = self.project.clone() { + project } else { return; }; - let query = Self::completion_query(&self.buffer.read(cx).read(cx), position.clone()); - let completions = self + let position = self.newest_anchor_selection().head(); + let (buffer, buffer_position) = if let Some(output) = self .buffer - .update(cx, |buffer, cx| buffer.completions(position.clone(), cx)); + .read(cx) + .text_anchor_for_position(position.clone(), cx) + { + output + } else { + return; + }; + + let query = Self::completion_query(&self.buffer.read(cx).read(cx), position.clone()); + let completions = project.update(cx, |project, cx| { + project.completions(&buffer, buffer_position.clone(), cx) + }); let id = post_inc(&mut self.next_completion_id); let task = cx.spawn_weak(|this, mut cx| { async move { let completions = completions.await?; + if completions.is_empty() { + return Ok(()); + } - let mut completion_state = CompletionState { + let mut menu = CompletionsMenu { id, initial_position: position, match_candidates: completions @@ -1685,30 +1949,30 @@ impl Editor { ) }) .collect(), + buffer, completions: completions.into(), matches: Vec::new().into(), selected_item: 0, list: Default::default(), }; - completion_state - .filter(query.as_deref(), cx.background()) - .await; + menu.filter(query.as_deref(), cx.background()).await; - if let Some(this) = cx.read(|cx| this.upgrade(cx)) { + if let Some(this) = this.upgrade(&cx) { this.update(&mut cx, |this, cx| { - if let Some(prev_completion_state) = this.completion_state.as_ref() { - if prev_completion_state.id > completion_state.id { - return; + match this.context_menu.as_ref() { + None => {} + Some(ContextMenu::Completions(prev_menu)) => { + if prev_menu.id > menu.id { + return; + } } + _ => return, } - this.completion_tasks - .retain(|(id, _)| *id > completion_state.id); - if completion_state.matches.is_empty() { - this.hide_completions(cx); - } else if this.focused { - this.completion_state = Some(completion_state); + this.completion_tasks.retain(|(id, _)| *id > menu.id); + if this.focused { + this.show_context_menu(ContextMenu::Completions(menu), cx); } cx.notify(); @@ -1721,22 +1985,24 @@ impl Editor { self.completion_tasks.push((id, task)); } - fn hide_completions(&mut self, cx: &mut ViewContext) -> Option { - cx.notify(); - self.completion_tasks.clear(); - self.completion_state.take() - } - pub fn confirm_completion( &mut self, - completion_ix: Option, + ConfirmCompletion(completion_ix): &ConfirmCompletion, cx: &mut ViewContext, ) -> Option>> { - let completion_state = self.hide_completions(cx)?; - let mat = completion_state + use language::ToOffset as _; + + let completions_menu = if let ContextMenu::Completions(menu) = self.hide_context_menu(cx)? { + menu + } else { + return None; + }; + + let mat = completions_menu .matches - .get(completion_ix.unwrap_or(completion_state.selected_item))?; - let completion = completion_state.completions.get(mat.candidate_id)?; + .get(completion_ix.unwrap_or(completions_menu.selected_item))?; + let buffer_handle = completions_menu.buffer; + let completion = completions_menu.completions.get(mat.candidate_id)?; let snippet; let text; @@ -1747,22 +2013,31 @@ impl Editor { snippet = None; text = completion.new_text.clone(); }; - let snapshot = self.buffer.read(cx).snapshot(cx); - let old_range = completion.old_range.to_offset(&snapshot); - let old_text = snapshot - .text_for_range(old_range.clone()) - .collect::(); + let buffer = buffer_handle.read(cx); + let old_range = completion.old_range.to_offset(&buffer); + let old_text = buffer.text_for_range(old_range.clone()).collect::(); let selections = self.local_selections::(cx); - let newest_selection = selections.iter().max_by_key(|s| s.id)?; - let lookbehind = newest_selection.start.saturating_sub(old_range.start); - let lookahead = old_range.end.saturating_sub(newest_selection.end); + let newest_selection = self.newest_anchor_selection(); + if newest_selection.start.buffer_id != Some(buffer_handle.id()) { + return None; + } + + let lookbehind = newest_selection + .start + .text_anchor + .to_offset(buffer) + .saturating_sub(old_range.start); + let lookahead = old_range + .end + .saturating_sub(newest_selection.end.text_anchor.to_offset(buffer)); let mut common_prefix_len = old_text .bytes() .zip(text.bytes()) .take_while(|(a, b)| a == b) .count(); + let snapshot = self.buffer.read(cx).snapshot(cx); let mut ranges = Vec::new(); for selection in &selections { if snapshot.contains_str_at(selection.start.saturating_sub(lookbehind), &old_text) { @@ -1800,95 +2075,240 @@ impl Editor { } self.end_transaction(cx); - Some(self.buffer.update(cx, |buffer, cx| { - buffer.apply_additional_edits_for_completion(completion.clone(), cx) + let project = self.project.clone()?; + let apply_edits = project.update(cx, |project, cx| { + project.apply_additional_edits_for_completion( + buffer_handle, + completion.clone(), + true, + cx, + ) + }); + Some(cx.foreground().spawn(async move { + apply_edits.await?; + Ok(()) })) } - pub fn has_completions(&self) -> bool { - self.completion_state - .as_ref() - .map_or(false, |c| !c.matches.is_empty()) - } + pub fn toggle_code_actions( + &mut self, + &ToggleCodeActions(deployed_from_indicator): &ToggleCodeActions, + cx: &mut ViewContext, + ) { + if matches!( + self.context_menu.as_ref(), + Some(ContextMenu::CodeActions(_)) + ) { + self.context_menu.take(); + cx.notify(); + return; + } - pub fn render_completions(&self, cx: &AppContext) -> Option { - enum CompletionTag {} + let mut task = self.code_actions_task.take(); + cx.spawn_weak(|this, mut cx| async move { + while let Some(prev_task) = task { + prev_task.await; + task = this + .upgrade(&cx) + .and_then(|this| this.update(&mut cx, |this, _| this.code_actions_task.take())); + } - self.completion_state.as_ref().map(|state| { - let build_settings = self.build_settings.clone(); - let settings = build_settings(cx); - let completions = state.completions.clone(); - let matches = state.matches.clone(); - let selected_item = state.selected_item; - UniformList::new( - state.list.clone(), - matches.len(), - move |range, items, cx| { - let settings = build_settings(cx); - let start_ix = range.start; - for (ix, mat) in matches[range].iter().enumerate() { - let completion = &completions[mat.candidate_id]; - let item_ix = start_ix + ix; - items.push( - MouseEventHandler::new::( - mat.candidate_id, + if let Some(this) = this.upgrade(&cx) { + this.update(&mut cx, |this, cx| { + if this.focused { + if let Some((buffer, actions)) = this.available_code_actions.clone() { + this.show_context_menu( + ContextMenu::CodeActions(CodeActionsMenu { + buffer, + actions, + selected_item: Default::default(), + list: Default::default(), + deployed_from_indicator, + }), cx, - |state, _| { - let item_style = if item_ix == selected_item { - settings.style.autocomplete.selected_item - } else if state.hovered { - settings.style.autocomplete.hovered_item - } else { - settings.style.autocomplete.item - }; - - Text::new( - completion.label.text.clone(), - settings.style.text.clone(), - ) - .with_soft_wrap(false) - .with_highlights(combine_syntax_and_fuzzy_match_highlights( - &completion.label.text, - settings.style.text.color.into(), - styled_runs_for_completion_label( - &completion.label, - settings.style.text.color, - &settings.style.syntax, - ), - &mat.positions, - )) - .contained() - .with_style(item_style) - .boxed() - }, - ) - .with_cursor_style(CursorStyle::PointingHand) - .on_mouse_down(move |cx| { - cx.dispatch_action(ConfirmCompletion(Some(item_ix))); - }) - .boxed(), - ); + ); + } } - }, - ) - .with_width_from_item( - state - .matches - .iter() - .enumerate() - .max_by_key(|(_, mat)| { - state.completions[mat.candidate_id] - .label - .text - .chars() - .count() - }) - .map(|(ix, _)| ix), - ) - .contained() - .with_style(settings.style.autocomplete.container) - .boxed() + }) + } + Ok::<_, anyhow::Error>(()) }) + .detach_and_log_err(cx); + } + + pub fn confirm_code_action( + workspace: &mut Workspace, + ConfirmCodeAction(action_ix): &ConfirmCodeAction, + cx: &mut ViewContext, + ) -> Option>> { + let editor = workspace.active_item(cx)?.act_as::(cx)?; + let actions_menu = if let ContextMenu::CodeActions(menu) = + editor.update(cx, |editor, cx| editor.hide_context_menu(cx))? + { + menu + } else { + return None; + }; + let action_ix = action_ix.unwrap_or(actions_menu.selected_item); + let action = actions_menu.actions.get(action_ix)?.clone(); + let title = action.lsp_action.title.clone(); + let buffer = actions_menu.buffer; + let replica_id = editor.read(cx).replica_id(cx); + + let apply_code_actions = workspace.project().clone().update(cx, |project, cx| { + project.apply_code_action(buffer, action, true, cx) + }); + Some(cx.spawn(|workspace, mut cx| async move { + let project_transaction = apply_code_actions.await?; + + // If the code action's edits are all contained within this editor, then + // avoid opening a new editor to display them. + let mut entries = project_transaction.0.iter(); + if let Some((buffer, transaction)) = entries.next() { + if entries.next().is_none() { + let excerpt = editor.read_with(&cx, |editor, cx| { + editor + .buffer() + .read(cx) + .excerpt_containing(editor.newest_anchor_selection().head(), cx) + }); + if let Some((excerpted_buffer, excerpt_range)) = excerpt { + if excerpted_buffer == *buffer { + let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot()); + let excerpt_range = excerpt_range.to_offset(&snapshot); + if snapshot + .edited_ranges_for_transaction(transaction) + .all(|range| { + excerpt_range.start <= range.start + && excerpt_range.end >= range.end + }) + { + return Ok(()); + } + } + } + } + } + + let mut ranges_to_highlight = Vec::new(); + let excerpt_buffer = cx.add_model(|cx| { + let mut multibuffer = MultiBuffer::new(replica_id).with_title(title); + for (buffer, transaction) in &project_transaction.0 { + let snapshot = buffer.read(cx).snapshot(); + ranges_to_highlight.extend( + multibuffer.push_excerpts_with_context_lines( + buffer.clone(), + snapshot + .edited_ranges_for_transaction::(transaction) + .collect(), + 1, + cx, + ), + ); + } + multibuffer.push_transaction(&project_transaction.0); + multibuffer + }); + + workspace.update(&mut cx, |workspace, cx| { + let editor = workspace.open_item(MultiBufferItemHandle(excerpt_buffer), cx); + if let Some(editor) = editor.act_as::(cx) { + editor.update(cx, |editor, cx| { + let settings = (editor.build_settings)(cx); + editor.highlight_ranges::( + ranges_to_highlight, + settings.style.highlighted_line_background, + cx, + ); + }); + } + }); + + Ok(()) + })) + } + + fn refresh_code_actions(&mut self, cx: &mut ViewContext) -> Option<()> { + let project = self.project.as_ref()?; + let buffer = self.buffer.read(cx); + let newest_selection = self.newest_anchor_selection().clone(); + let (start_buffer, start) = buffer.text_anchor_for_position(newest_selection.start, cx)?; + let (end_buffer, end) = buffer.text_anchor_for_position(newest_selection.end, cx)?; + if start_buffer != end_buffer { + return None; + } + + let actions = project.update(cx, |project, cx| { + project.code_actions(&start_buffer, start..end, cx) + }); + self.code_actions_task = Some(cx.spawn_weak(|this, mut cx| async move { + let actions = actions.await; + if let Some(this) = this.upgrade(&cx) { + this.update(&mut cx, |this, cx| { + this.available_code_actions = actions.log_err().and_then(|actions| { + if actions.is_empty() { + None + } else { + Some((start_buffer, actions.into())) + } + }); + cx.notify(); + }) + } + })); + None + } + + pub fn render_code_actions_indicator(&self, cx: &mut ViewContext) -> Option { + if self.available_code_actions.is_some() { + enum Tag {} + let style = (self.build_settings)(cx).style; + Some( + MouseEventHandler::new::(0, cx, |_, _| { + Svg::new("icons/zap.svg") + .with_color(style.code_actions_indicator) + .boxed() + }) + .with_cursor_style(CursorStyle::PointingHand) + .with_padding(Padding::uniform(3.)) + .on_mouse_down(|cx| { + cx.dispatch_action(ToggleCodeActions(true)); + }) + .boxed(), + ) + } else { + None + } + } + + pub fn context_menu_visible(&self) -> bool { + self.context_menu + .as_ref() + .map_or(false, |menu| menu.visible()) + } + + pub fn render_context_menu( + &self, + cursor_position: DisplayPoint, + cx: &AppContext, + ) -> Option<(DisplayPoint, ElementBox)> { + self.context_menu + .as_ref() + .map(|menu| menu.render(cursor_position, self.build_settings.clone(), cx)) + } + + fn show_context_menu(&mut self, menu: ContextMenu, cx: &mut ViewContext) { + if !matches!(menu, ContextMenu::Completions(_)) { + self.completion_tasks.clear(); + } + self.context_menu = Some(menu); + cx.notify(); + } + + fn hide_context_menu(&mut self, cx: &mut ViewContext) -> Option { + cx.notify(); + self.completion_tasks.clear(); + self.context_menu.take() } pub fn insert_snippet( @@ -2331,7 +2751,14 @@ impl Editor { .0; // Don't move lines across excerpts - if !buffer.range_contains_excerpt_boundary(insertion_point..range_to_move.end) { + if buffer + .excerpt_boundaries_in_range(( + Bound::Excluded(insertion_point), + Bound::Included(range_to_move.end), + )) + .next() + .is_none() + { let text = buffer .text_for_range(range_to_move.clone()) .flat_map(|s| s.chars()) @@ -2431,7 +2858,14 @@ impl Editor { let insertion_point = display_map.next_line_boundary(Point::new(end_row, 0)).0; // Don't move lines across excerpt boundaries - if !buffer.range_contains_excerpt_boundary(range_to_move.start..insertion_point) { + if buffer + .excerpt_boundaries_in_range(( + Bound::Excluded(range_to_move.start), + Bound::Included(insertion_point), + )) + .next() + .is_none() + { let mut text = String::from("\n"); text.extend(buffer.text_for_range(range_to_move.clone())); text.pop(); // Drop trailing newline @@ -2608,7 +3042,7 @@ impl Editor { pub fn undo(&mut self, _: &Undo, cx: &mut ViewContext) { if let Some(tx_id) = self.buffer.update(cx, |buffer, cx| buffer.undo(cx)) { if let Some((selections, _)) = self.selection_history.get(&tx_id).cloned() { - self.set_selections(selections, cx); + self.set_selections(selections, None, cx); } self.request_autoscroll(Autoscroll::Fit, cx); } @@ -2617,7 +3051,7 @@ impl Editor { pub fn redo(&mut self, _: &Redo, cx: &mut ViewContext) { if let Some(tx_id) = self.buffer.update(cx, |buffer, cx| buffer.redo(cx)) { if let Some((_, Some(selections))) = self.selection_history.get(&tx_id).cloned() { - self.set_selections(selections, cx); + self.set_selections(selections, None, cx); } self.request_autoscroll(Autoscroll::Fit, cx); } @@ -2696,15 +3130,10 @@ impl Editor { } pub fn move_up(&mut self, _: &MoveUp, cx: &mut ViewContext) { - if let Some(completion_state) = &mut self.completion_state { - if completion_state.selected_item > 0 { - completion_state.selected_item -= 1; - completion_state - .list - .scroll_to(ScrollTarget::Show(completion_state.selected_item)); + if let Some(context_menu) = self.context_menu.as_mut() { + if context_menu.select_prev(cx) { + return; } - cx.notify(); - return; } if matches!(self.mode, EditorMode::SingleLine) { @@ -2745,15 +3174,10 @@ impl Editor { } pub fn move_down(&mut self, _: &MoveDown, cx: &mut ViewContext) { - if let Some(completion_state) = &mut self.completion_state { - if completion_state.selected_item + 1 < completion_state.matches.len() { - completion_state.selected_item += 1; - completion_state - .list - .scroll_to(ScrollTarget::Show(completion_state.selected_item)); + if let Some(context_menu) = self.context_menu.as_mut() { + if context_menu.select_next(cx) { + return; } - cx.notify(); - return; } if matches!(self.mode, EditorMode::SingleLine) { @@ -3592,7 +4016,13 @@ impl Editor { let editor = editor_handle.read(cx); let buffer = editor.buffer.read(cx); let head = editor.newest_selection::(&buffer.read(cx)).head(); - let (buffer, head) = editor.buffer.read(cx).text_anchor_for_position(head, cx); + let (buffer, head) = + if let Some(text_anchor) = editor.buffer.read(cx).text_anchor_for_position(head, cx) { + text_anchor + } else { + return; + }; + let definitions = workspace .project() .update(cx, |project, cx| project.definition(&buffer, head, cx)); @@ -3905,14 +4335,15 @@ impl Editor { &self, snapshot: &MultiBufferSnapshot, ) -> Selection { - self.resolve_selection(self.newest_anchor_selection().unwrap(), snapshot) + self.resolve_selection(self.newest_anchor_selection(), snapshot) } - pub fn newest_anchor_selection(&self) -> Option<&Selection> { + pub fn newest_anchor_selection(&self) -> &Selection { self.pending_selection .as_ref() .map(|s| &s.selection) .or_else(|| self.selections.iter().max_by_key(|s| s.id)) + .unwrap() } pub fn update_selections( @@ -3961,6 +4392,7 @@ impl Editor { goal: selection.goal, } })), + None, cx, ); } @@ -4014,55 +4446,70 @@ impl Editor { selections_with_lost_position } - fn set_selections(&mut self, selections: Arc<[Selection]>, cx: &mut ViewContext) { - let old_cursor_position = self.newest_anchor_selection().map(|s| s.head()); + fn set_selections( + &mut self, + selections: Arc<[Selection]>, + pending_selection: Option, + cx: &mut ViewContext, + ) { + let old_cursor_position = self.newest_anchor_selection().head(); + self.selections = selections; + self.pending_selection = pending_selection; if self.focused { self.buffer.update(cx, |buffer, cx| { buffer.set_active_selections(&self.selections, cx) }); } - let buffer = self.buffer.read(cx).snapshot(cx); - self.pending_selection = None; + let display_map = self + .display_map + .update(cx, |display_map, cx| display_map.snapshot(cx)); + let buffer = &display_map.buffer_snapshot; self.add_selections_state = None; self.select_next_state = None; self.select_larger_syntax_node_stack.clear(); self.autoclose_stack.invalidate(&self.selections, &buffer); self.snippet_stack.invalidate(&self.selections, &buffer); - let new_cursor_position = self - .selections - .iter() - .max_by_key(|s| s.id) - .map(|s| s.head()); - if let Some(old_cursor_position) = old_cursor_position { - if let Some(new_cursor_position) = new_cursor_position.as_ref() { - self.push_to_nav_history( - old_cursor_position, - Some(new_cursor_position.to_point(&buffer)), - cx, - ); + let new_cursor_position = self.newest_anchor_selection().head(); + + self.push_to_nav_history( + old_cursor_position.clone(), + Some(new_cursor_position.to_point(&buffer)), + cx, + ); + + let completion_menu = match self.context_menu.as_mut() { + Some(ContextMenu::Completions(menu)) => Some(menu), + _ => { + self.context_menu.take(); + None } - } + }; - if let Some((completion_state, cursor_position)) = - self.completion_state.as_mut().zip(new_cursor_position) - { - let cursor_position = cursor_position.to_offset(&buffer); + if let Some(completion_menu) = completion_menu { + let cursor_position = new_cursor_position.to_offset(&buffer); let (word_range, kind) = - buffer.surrounding_word(completion_state.initial_position.clone()); + buffer.surrounding_word(completion_menu.initial_position.clone()); if kind == Some(CharKind::Word) && word_range.to_inclusive().contains(&cursor_position) { let query = Self::completion_query(&buffer, cursor_position); cx.background() - .block(completion_state.filter(query.as_deref(), cx.background().clone())); + .block(completion_menu.filter(query.as_deref(), cx.background().clone())); self.show_completions(&ShowCompletions, cx); } else { - self.hide_completions(cx); + self.hide_context_menu(cx); } } + if old_cursor_position.to_display_point(&display_map).row() + != new_cursor_position.to_display_point(&display_map).row() + { + self.available_code_actions.take(); + } + self.refresh_code_actions(cx); + self.pause_cursor_blinking(cx); cx.emit(Event::SelectionsChanged); } @@ -4376,7 +4823,7 @@ impl Editor { let this = this.downgrade(); async move { Timer::after(CURSOR_BLINK_INTERVAL).await; - if let Some(this) = cx.read(|cx| this.upgrade(cx)) { + if let Some(this) = this.upgrade(&cx) { this.update(&mut cx, |this, cx| this.resume_cursor_blinking(epoch, cx)) } } @@ -4401,7 +4848,7 @@ impl Editor { let this = this.downgrade(); async move { Timer::after(CURSOR_BLINK_INTERVAL).await; - if let Some(this) = cx.read(|cx| this.upgrade(cx)) { + if let Some(this) = this.upgrade(&cx) { this.update(&mut cx, |this, cx| this.blink_cursors(epoch, cx)); } } @@ -4415,7 +4862,6 @@ impl Editor { } fn on_buffer_changed(&mut self, _: ModelHandle, cx: &mut ViewContext) { - self.refresh_active_diagnostics(cx); cx.notify(); } @@ -4426,12 +4872,19 @@ impl Editor { cx: &mut ViewContext, ) { match event { - language::Event::Edited => cx.emit(Event::Edited), + language::Event::Edited => { + self.refresh_active_diagnostics(cx); + self.refresh_code_actions(cx); + cx.emit(Event::Edited); + } language::Event::Dirtied => cx.emit(Event::Dirtied), language::Event::Saved => cx.emit(Event::Saved), language::Event::FileHandleChanged => cx.emit(Event::TitleChanged), language::Event::Reloaded => cx.emit(Event::TitleChanged), language::Event::Closed => cx.emit(Event::Closed), + language::Event::DiagnosticsUpdated => { + self.refresh_active_diagnostics(cx); + } _ => {} } } @@ -4544,6 +4997,7 @@ impl EditorSettings { hint_diagnostic: default_diagnostic_style.clone(), invalid_hint_diagnostic: default_diagnostic_style.clone(), autocomplete: Default::default(), + code_actions_indicator: Default::default(), } }, } @@ -4601,7 +5055,7 @@ impl View for Editor { self.focused = true; self.blink_cursors(self.blink_epoch, cx); self.buffer.update(cx, |buffer, cx| { - buffer.avoid_grouping_next_transaction(cx); + buffer.finalize_last_transaction(cx); buffer.set_active_selections(&self.selections, cx) }); } @@ -4611,7 +5065,7 @@ impl View for Editor { self.show_local_cursors = false; self.buffer .update(cx, |buffer, cx| buffer.remove_active_selections(cx)); - self.hide_completions(cx); + self.hide_context_menu(cx); cx.emit(Event::Blurred); cx.notify(); } @@ -4624,8 +5078,14 @@ impl View for Editor { EditorMode::Full => "full", }; cx.map.insert("mode".into(), mode.into()); - if self.completion_state.is_some() { - cx.set.insert("completing".into()); + match self.context_menu.as_ref() { + Some(ContextMenu::Completions(_)) => { + cx.set.insert("showing_completions".into()); + } + Some(ContextMenu::CodeActions(_)) => { + cx.set.insert("showing_code_actions".into()); + } + None => {} } cx } @@ -4985,9 +5445,11 @@ fn styled_runs_for_completion_label<'a>( #[cfg(test)] mod tests { use super::*; - use language::{FakeFile, LanguageConfig}; + use language::LanguageConfig; use lsp::FakeLanguageServer; - use std::{cell::RefCell, path::Path, rc::Rc, time::Instant}; + use postage::prelude::Stream; + use project::{FakeFs, ProjectPath}; + use std::{cell::RefCell, rc::Rc, time::Instant}; use text::Point; use unindent::Unindent; use util::test::sample_text; @@ -7275,7 +7737,7 @@ mod tests { }), ..Default::default() }, - cx.background(), + &cx, ) .await; @@ -7285,32 +7747,46 @@ mod tests { three " .unindent(); - let buffer = cx.add_model(|cx| { - Buffer::from_file( - 0, - text, - Box::new(FakeFile { - path: Arc::from(Path::new("/the/file")), - }), - cx, - ) - .with_language_server(language_server, cx) + + let fs = Arc::new(FakeFs::new(cx.background().clone())); + fs.insert_file("/file", text).await.unwrap(); + + let project = Project::test(fs, &mut cx); + + let (worktree, relative_path) = project + .update(&mut cx, |project, cx| { + project.find_or_create_local_worktree("/file", false, cx) + }) + .await + .unwrap(); + let project_path = ProjectPath { + worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()), + path: relative_path.into(), + }; + let buffer = project + .update(&mut cx, |project, cx| project.open_buffer(project_path, cx)) + .await + .unwrap(); + buffer.update(&mut cx, |buffer, cx| { + buffer.set_language_server(Some(language_server), cx); }); + let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); buffer.next_notification(&cx).await; let (_, editor) = cx.add_window(|cx| build_editor(buffer, settings, cx)); editor.update(&mut cx, |editor, cx| { + editor.project = Some(project); editor.select_ranges([Point::new(0, 3)..Point::new(0, 3)], None, cx); editor.handle_input(&Input(".".to_string()), cx); }); handle_completion_request( &mut fake, - "/the/file", + "/file", Point::new(0, 4), - &[ + vec![ (Point::new(0, 4)..Point::new(0, 4), "first_completion"), (Point::new(0, 4)..Point::new(0, 4), "second_completion"), ], @@ -7320,7 +7796,9 @@ mod tests { let apply_additional_edits = editor.update(&mut cx, |editor, cx| { editor.move_down(&MoveDown, cx); - let apply_additional_edits = editor.confirm_completion(None, cx).unwrap(); + let apply_additional_edits = editor + .confirm_completion(&ConfirmCompletion(None), cx) + .unwrap(); assert_eq!( editor.text(cx), " @@ -7361,16 +7839,16 @@ mod tests { ); editor.handle_input(&Input(" ".to_string()), cx); - assert!(editor.completion_state.is_none()); + assert!(editor.context_menu.is_none()); editor.handle_input(&Input("s".to_string()), cx); - assert!(editor.completion_state.is_none()); + assert!(editor.context_menu.is_none()); }); handle_completion_request( &mut fake, - "/the/file", + "/file", Point::new(2, 7), - &[ + vec![ (Point::new(2, 6)..Point::new(2, 7), "fourth_completion"), (Point::new(2, 6)..Point::new(2, 7), "fifth_completion"), (Point::new(2, 6)..Point::new(2, 7), "sixth_completion"), @@ -7378,7 +7856,7 @@ mod tests { ) .await; editor - .condition(&cx, |editor, _| editor.completion_state.is_some()) + .condition(&cx, |editor, _| editor.context_menu.is_some()) .await; editor.update(&mut cx, |editor, cx| { @@ -7387,9 +7865,9 @@ mod tests { handle_completion_request( &mut fake, - "/the/file", + "/file", Point::new(2, 8), - &[ + vec![ (Point::new(2, 6)..Point::new(2, 8), "fourth_completion"), (Point::new(2, 6)..Point::new(2, 8), "fifth_completion"), (Point::new(2, 6)..Point::new(2, 8), "sixth_completion"), @@ -7399,7 +7877,9 @@ mod tests { editor.next_notification(&cx).await; let apply_additional_edits = editor.update(&mut cx, |editor, cx| { - let apply_additional_edits = editor.confirm_completion(None, cx).unwrap(); + let apply_additional_edits = editor + .confirm_completion(&ConfirmCompletion(None), cx) + .unwrap(); assert_eq!( editor.text(cx), " @@ -7417,47 +7897,45 @@ mod tests { async fn handle_completion_request( fake: &mut FakeLanguageServer, - path: &str, + path: &'static str, position: Point, - completions: &[(Range, &str)], + completions: Vec<(Range, &'static str)>, ) { - let (id, params) = fake.receive_request::().await; - assert_eq!( - params.text_document_position.text_document.uri, - lsp::Url::from_file_path(path).unwrap() - ); - assert_eq!( - params.text_document_position.position, - lsp::Position::new(position.row, position.column) - ); - - let completions = completions - .iter() - .map(|(range, new_text)| lsp::CompletionItem { - label: new_text.to_string(), - text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { - range: lsp::Range::new( - lsp::Position::new(range.start.row, range.start.column), - lsp::Position::new(range.start.row, range.start.column), - ), - new_text: new_text.to_string(), - })), - ..Default::default() - }) - .collect(); - fake.respond(id, Some(lsp::CompletionResponse::Array(completions))) - .await; + fake.handle_request::(move |params| { + assert_eq!( + params.text_document_position.text_document.uri, + lsp::Url::from_file_path(path).unwrap() + ); + assert_eq!( + params.text_document_position.position, + lsp::Position::new(position.row, position.column) + ); + Some(lsp::CompletionResponse::Array( + completions + .into_iter() + .map(|(range, new_text)| lsp::CompletionItem { + label: new_text.to_string(), + text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { + range: lsp::Range::new( + lsp::Position::new(range.start.row, range.start.column), + lsp::Position::new(range.start.row, range.start.column), + ), + new_text: new_text.to_string(), + })), + ..Default::default() + }) + .collect(), + )) + }) + .recv() + .await; } async fn handle_resolve_completion_request( fake: &mut FakeLanguageServer, - edit: Option<(Range, &str)>, + edit: Option<(Range, &'static str)>, ) { - let (id, _) = fake - .receive_request::() - .await; - fake.respond( - id, + fake.handle_request::(move |_| { lsp::CompletionItem { additional_text_edits: edit.map(|(range, new_text)| { vec![lsp::TextEdit::new( @@ -7469,8 +7947,9 @@ mod tests { )] }), ..Default::default() - }, - ) + } + }) + .recv() .await; } } @@ -7561,18 +8040,12 @@ mod tests { let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(3, 4, 'a'), cx)); let multibuffer = cx.add_model(|cx| { let mut multibuffer = MultiBuffer::new(0); - multibuffer.push_excerpt( - ExcerptProperties { - buffer: &buffer, - range: Point::new(0, 0)..Point::new(0, 4), - }, - cx, - ); - multibuffer.push_excerpt( - ExcerptProperties { - buffer: &buffer, - range: Point::new(1, 0)..Point::new(1, 4), - }, + multibuffer.push_excerpts( + buffer.clone(), + [ + Point::new(0, 0)..Point::new(0, 4), + Point::new(1, 0)..Point::new(1, 4), + ], cx, ); multibuffer @@ -7584,21 +8057,23 @@ mod tests { build_editor(multibuffer, settings, cx) }); view.update(cx, |view, cx| { - view.select_display_ranges( - &[ - DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0), - DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0), + assert_eq!(view.text(cx), "aaaa\nbbbb"); + view.select_ranges( + [ + Point::new(0, 0)..Point::new(0, 0), + Point::new(1, 0)..Point::new(1, 0), ], + None, cx, ); view.handle_input(&Input("X".to_string()), cx); assert_eq!(view.text(cx), "Xaaaa\nXbbbb"); assert_eq!( - view.selected_display_ranges(cx), - &[ - DisplayPoint::new(0, 1)..DisplayPoint::new(0, 1), - DisplayPoint::new(1, 1)..DisplayPoint::new(1, 1), + view.selected_ranges(cx), + [ + Point::new(0, 1)..Point::new(0, 1), + Point::new(1, 1)..Point::new(1, 1), ] ) }); @@ -7610,18 +8085,12 @@ mod tests { let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(3, 4, 'a'), cx)); let multibuffer = cx.add_model(|cx| { let mut multibuffer = MultiBuffer::new(0); - multibuffer.push_excerpt( - ExcerptProperties { - buffer: &buffer, - range: Point::new(0, 0)..Point::new(1, 4), - }, - cx, - ); - multibuffer.push_excerpt( - ExcerptProperties { - buffer: &buffer, - range: Point::new(1, 0)..Point::new(2, 4), - }, + multibuffer.push_excerpts( + buffer, + [ + Point::new(0, 0)..Point::new(1, 4), + Point::new(1, 0)..Point::new(2, 4), + ], cx, ); multibuffer @@ -7636,31 +8105,32 @@ mod tests { build_editor(multibuffer, settings, cx) }); view.update(cx, |view, cx| { - view.select_display_ranges( - &[ - DisplayPoint::new(1, 1)..DisplayPoint::new(1, 1), - DisplayPoint::new(2, 3)..DisplayPoint::new(2, 3), + view.select_ranges( + [ + Point::new(1, 1)..Point::new(1, 1), + Point::new(2, 3)..Point::new(2, 3), ], + None, cx, ); view.handle_input(&Input("X".to_string()), cx); assert_eq!(view.text(cx), "aaaa\nbXbbXb\nbXbbXb\ncccc"); assert_eq!( - view.selected_display_ranges(cx), - &[ - DisplayPoint::new(1, 2)..DisplayPoint::new(1, 2), - DisplayPoint::new(2, 5)..DisplayPoint::new(2, 5), + view.selected_ranges(cx), + [ + Point::new(1, 2)..Point::new(1, 2), + Point::new(2, 5)..Point::new(2, 5), ] ); view.newline(&Newline, cx); assert_eq!(view.text(cx), "aaaa\nbX\nbbX\nb\nbX\nbbX\nb\ncccc"); assert_eq!( - view.selected_display_ranges(cx), - &[ - DisplayPoint::new(2, 0)..DisplayPoint::new(2, 0), - DisplayPoint::new(6, 0)..DisplayPoint::new(6, 0), + view.selected_ranges(cx), + [ + Point::new(2, 0)..Point::new(2, 0), + Point::new(6, 0)..Point::new(6, 0), ] ); }); @@ -7673,20 +8143,17 @@ mod tests { let mut excerpt1_id = None; let multibuffer = cx.add_model(|cx| { let mut multibuffer = MultiBuffer::new(0); - excerpt1_id = Some(multibuffer.push_excerpt( - ExcerptProperties { - buffer: &buffer, - range: Point::new(0, 0)..Point::new(1, 4), - }, - cx, - )); - multibuffer.push_excerpt( - ExcerptProperties { - buffer: &buffer, - range: Point::new(1, 0)..Point::new(2, 4), - }, - cx, - ); + excerpt1_id = multibuffer + .push_excerpts( + buffer.clone(), + [ + Point::new(0, 0)..Point::new(1, 4), + Point::new(1, 0)..Point::new(2, 4), + ], + cx, + ) + .into_iter() + .next(); multibuffer }); assert_eq!( @@ -7695,11 +8162,12 @@ mod tests { ); let (_, editor) = cx.add_window(Default::default(), |cx| { let mut editor = build_editor(multibuffer.clone(), settings, cx); - editor.select_display_ranges( - &[ - DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3), - DisplayPoint::new(2, 1)..DisplayPoint::new(2, 1), + editor.select_ranges( + [ + Point::new(1, 3)..Point::new(1, 3), + Point::new(2, 1)..Point::new(2, 1), ], + None, cx, ); editor @@ -7709,10 +8177,10 @@ mod tests { editor.update(cx, |editor, cx| { editor.refresh_selections(cx); assert_eq!( - editor.selected_display_ranges(cx), + editor.selected_ranges(cx), [ - DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3), - DisplayPoint::new(2, 1)..DisplayPoint::new(2, 1), + Point::new(1, 3)..Point::new(1, 3), + Point::new(2, 1)..Point::new(2, 1), ] ); }); @@ -7723,10 +8191,10 @@ mod tests { editor.update(cx, |editor, cx| { // Removing an excerpt causes the first selection to become degenerate. assert_eq!( - editor.selected_display_ranges(cx), + editor.selected_ranges(cx), [ - DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0), - DisplayPoint::new(0, 1)..DisplayPoint::new(0, 1) + Point::new(0, 0)..Point::new(0, 0), + Point::new(0, 1)..Point::new(0, 1) ] ); @@ -7734,10 +8202,10 @@ mod tests { // location. editor.refresh_selections(cx); assert_eq!( - editor.selected_display_ranges(cx), + editor.selected_ranges(cx), [ - DisplayPoint::new(0, 1)..DisplayPoint::new(0, 1), - DisplayPoint::new(0, 3)..DisplayPoint::new(0, 3) + Point::new(0, 1)..Point::new(0, 1), + Point::new(0, 3)..Point::new(0, 3) ] ); }); @@ -7971,7 +8439,7 @@ mod tests { settings: EditorSettings, cx: &mut ViewContext, ) -> Editor { - Editor::for_buffer(buffer, Arc::new(move |_| settings.clone()), cx) + Editor::for_buffer(buffer, Arc::new(move |_| settings.clone()), None, cx) } } diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index a22e6021e06bce11d7993518a59b3d5da08f6aae..f72320be42a2973d9502be4a509ddc349ca13652 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -3,11 +3,12 @@ use super::{ Anchor, DisplayPoint, Editor, EditorMode, EditorSettings, EditorSnapshot, EditorStyle, Input, Scroll, Select, SelectPhase, SoftWrap, ToPoint, MAX_LINE_LEN, }; +use crate::display_map::TransformBlock; use clock::ReplicaId; use collections::{BTreeMap, HashMap}; use gpui::{ color::Color, - elements::layout_highlighted_chunks, + elements::*, fonts::{HighlightStyle, Underline}, geometry::{ rect::RectF, @@ -280,7 +281,7 @@ impl EditorElement { &mut self, bounds: RectF, visible_bounds: RectF, - layout: &LayoutState, + layout: &mut LayoutState, cx: &mut PaintContext, ) { let scroll_top = layout.snapshot.scroll_position().y() * layout.line_height; @@ -294,6 +295,14 @@ impl EditorElement { line.paint(line_origin, visible_bounds, layout.line_height, cx); } } + + if let Some((row, indicator)) = layout.code_actions_indicator.as_mut() { + let mut x = bounds.width() - layout.gutter_padding; + let mut y = *row as f32 * layout.line_height - scroll_top; + x += ((layout.gutter_padding + layout.text_offset.x()) - indicator.size().x()) / 2.; + y += (layout.line_height - indicator.size().y()) / 2.; + indicator.paint(bounds.origin() + vec2f(x, y), visible_bounds, cx); + } } fn paint_text( @@ -392,20 +401,20 @@ impl EditorElement { } cx.scene.pop_layer(); - if let Some((position, completions_list)) = layout.completions.as_mut() { + if let Some((position, context_menu)) = layout.context_menu.as_mut() { cx.scene.push_stacking_context(None); let cursor_row_layout = &layout.line_layouts[(position.row() - start_row) as usize]; let x = cursor_row_layout.x_for_index(position.column() as usize) - scroll_left; let y = (position.row() + 1) as f32 * layout.line_height - scroll_top; let mut list_origin = content_origin + vec2f(x, y); - let list_height = completions_list.size().y(); + let list_height = context_menu.size().y(); if list_origin.y() + list_height > bounds.lower_left().y() { list_origin.set_y(list_origin.y() - layout.line_height - list_height); } - completions_list.paint( + context_menu.paint( list_origin, RectF::from_points(Vector2F::zero(), vec2f(f32::MAX, f32::MAX)), // Let content bleed outside of editor cx, @@ -649,33 +658,91 @@ impl EditorElement { line_layouts: &[text_layout::Line], cx: &mut LayoutContext, ) -> Vec<(u32, ElementBox)> { + let scroll_x = snapshot.scroll_position.x(); snapshot .blocks_in_range(rows.clone()) - .map(|(start_row, block)| { - let anchor_row = block - .position() - .to_point(&snapshot.buffer_snapshot) - .to_display_point(snapshot) - .row(); - - let anchor_x = text_x - + if rows.contains(&anchor_row) { - line_layouts[(anchor_row - rows.start) as usize] - .x_for_index(block.column() as usize) - } else { - layout_line(anchor_row, snapshot, style, cx.text_layout_cache) - .x_for_index(block.column() as usize) - }; + .map(|(block_row, block)| { + let mut element = match block { + TransformBlock::Custom(block) => { + let align_to = block + .position() + .to_point(&snapshot.buffer_snapshot) + .to_display_point(snapshot); + let anchor_x = text_x + + if rows.contains(&align_to.row()) { + line_layouts[(align_to.row() - rows.start) as usize] + .x_for_index(align_to.column() as usize) + } else { + layout_line(align_to.row(), snapshot, style, cx.text_layout_cache) + .x_for_index(align_to.column() as usize) + }; + + block.render(&BlockContext { + cx, + anchor_x, + gutter_padding, + line_height, + scroll_x, + gutter_width, + em_width, + }) + } + TransformBlock::ExcerptHeader { + buffer, + starts_new_buffer, + .. + } => { + if *starts_new_buffer { + let style = &self.settings.style.diagnostic_path_header; + let font_size = (style.text_scale_factor + * self.settings.style.text.font_size) + .round(); + + let mut filename = None; + let mut parent_path = None; + if let Some(path) = buffer.path() { + filename = + path.file_name().map(|f| f.to_string_lossy().to_string()); + parent_path = + path.parent().map(|p| p.to_string_lossy().to_string() + "/"); + } + + Flex::row() + .with_child( + Label::new( + filename.unwrap_or_else(|| "untitled".to_string()), + style.filename.text.clone().with_font_size(font_size), + ) + .contained() + .with_style(style.filename.container) + .boxed(), + ) + .with_children(parent_path.map(|path| { + Label::new( + path, + style.path.text.clone().with_font_size(font_size), + ) + .contained() + .with_style(style.path.container) + .boxed() + })) + .aligned() + .left() + .contained() + .with_style(style.container) + .with_padding_left(gutter_padding + scroll_x * em_width) + .expanded() + .named("path header block") + } else { + let text_style = self.settings.style.text.clone(); + Label::new("…".to_string(), text_style) + .contained() + .with_padding_left(gutter_padding + scroll_x * em_width) + .named("collapsed context") + } + } + }; - let mut element = block.render(&BlockContext { - cx, - anchor_x, - gutter_padding, - line_height, - scroll_x: snapshot.scroll_position.x(), - gutter_width, - em_width, - }); element.layout( SizeConstraint { min: Vector2F::zero(), @@ -683,7 +750,7 @@ impl EditorElement { }, cx, ); - (start_row, element) + (block_row, element) }) .collect() } @@ -859,7 +926,8 @@ impl Element for EditorElement { max_row.saturating_sub(1) as f32, ); - let mut completions = None; + let mut context_menu = None; + let mut code_actions_indicator = None; self.update_view(cx.app, |view, cx| { let clamped = view.clamp_scroll_left(scroll_max.x()); let autoscrolled; @@ -880,21 +948,24 @@ impl Element for EditorElement { snapshot = view.snapshot(cx); } - if view.has_completions() { - let newest_selection_head = view - .newest_selection::(&snapshot.buffer_snapshot) - .head() - .to_display_point(&snapshot); + let newest_selection_head = view + .newest_selection::(&snapshot.buffer_snapshot) + .head() + .to_display_point(&snapshot); - if (start_row..end_row).contains(&newest_selection_head.row()) { - let list = view.render_completions(cx).unwrap(); - completions = Some((newest_selection_head, list)); + if (start_row..end_row).contains(&newest_selection_head.row()) { + if view.context_menu_visible() { + context_menu = view.render_context_menu(newest_selection_head, cx); } + + code_actions_indicator = view + .render_code_actions_indicator(cx) + .map(|indicator| (newest_selection_head.row(), indicator)); } }); - if let Some((_, completions_list)) = completions.as_mut() { - completions_list.layout( + if let Some((_, context_menu)) = context_menu.as_mut() { + context_menu.layout( SizeConstraint { min: Vector2F::zero(), max: vec2f( @@ -906,6 +977,13 @@ impl Element for EditorElement { ); } + if let Some((_, indicator)) = code_actions_indicator.as_mut() { + indicator.layout( + SizeConstraint::strict_along(Axis::Vertical, line_height * 0.618), + cx, + ); + } + let blocks = self.layout_blocks( start_row..end_row, &snapshot, @@ -940,7 +1018,8 @@ impl Element for EditorElement { em_width, em_advance, selections, - completions, + context_menu, + code_actions_indicator, }, ) } @@ -989,8 +1068,14 @@ impl Element for EditorElement { paint: &mut PaintState, cx: &mut EventContext, ) -> bool { - if let Some((_, completion_list)) = &mut layout.completions { - if completion_list.dispatch_event(event, cx) { + if let Some((_, context_menu)) = &mut layout.context_menu { + if context_menu.dispatch_event(event, cx) { + return true; + } + } + + if let Some((_, indicator)) = &mut layout.code_actions_indicator { + if indicator.dispatch_event(event, cx) { return true; } } @@ -1051,7 +1136,8 @@ pub struct LayoutState { highlighted_ranges: Vec<(Range, Color)>, selections: HashMap>>, text_offset: Vector2F, - completions: Option<(DisplayPoint, ElementBox)>, + context_menu: Option<(DisplayPoint, ElementBox)>, + code_actions_indicator: Option<(u32, ElementBox)>, } fn layout_line( @@ -1298,6 +1384,7 @@ mod tests { let settings = settings.clone(); Arc::new(move |_| settings.clone()) }, + None, cx, ) }); diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 97ce05615246cdd236e4a32cf580296bd142d119..c669bc744e1b7dc82642aeba9f2e4ac8248a8a9d 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -11,7 +11,7 @@ use std::path::PathBuf; use std::rc::Rc; use std::{cell::RefCell, fmt::Write}; use text::{Point, Selection}; -use util::TryFutureExt; +use util::ResultExt; use workspace::{ ItemHandle, ItemNavHistory, ItemView, ItemViewHandle, NavHistory, PathOpener, Settings, StatusItemView, WeakItemHandle, Workspace, @@ -25,6 +25,12 @@ pub struct BufferItemHandle(pub ModelHandle); #[derive(Clone)] struct WeakBufferItemHandle(WeakModelHandle); +#[derive(Clone)] +pub struct MultiBufferItemHandle(pub ModelHandle); + +#[derive(Clone)] +struct WeakMultiBufferItemHandle(WeakModelHandle); + impl PathOpener for BufferOpener { fn open( &self, @@ -55,6 +61,7 @@ impl ItemHandle for BufferItemHandle { let mut editor = Editor::for_buffer( buffer, crate::settings_builder(weak_buffer, workspace.settings()), + Some(workspace.project().clone()), cx, ); editor.nav_history = Some(ItemNavHistory::new(nav_history, &cx.handle())); @@ -86,6 +93,48 @@ impl ItemHandle for BufferItemHandle { } } +impl ItemHandle for MultiBufferItemHandle { + fn add_view( + &self, + window_id: usize, + workspace: &Workspace, + nav_history: Rc>, + cx: &mut MutableAppContext, + ) -> Box { + let weak_buffer = self.0.downgrade(); + Box::new(cx.add_view(window_id, |cx| { + let mut editor = Editor::for_buffer( + self.0.clone(), + crate::settings_builder(weak_buffer, workspace.settings()), + Some(workspace.project().clone()), + cx, + ); + editor.nav_history = Some(ItemNavHistory::new(nav_history, &cx.handle())); + editor + })) + } + + fn boxed_clone(&self) -> Box { + Box::new(self.clone()) + } + + fn to_any(&self) -> gpui::AnyModelHandle { + self.0.clone().into() + } + + fn downgrade(&self) -> Box { + Box::new(WeakMultiBufferItemHandle(self.0.downgrade())) + } + + fn project_path(&self, _: &AppContext) -> Option { + None + } + + fn id(&self) -> usize { + self.0.id() + } +} + impl WeakItemHandle for WeakBufferItemHandle { fn upgrade(&self, cx: &AppContext) -> Option> { self.0 @@ -98,11 +147,25 @@ impl WeakItemHandle for WeakBufferItemHandle { } } -impl ItemView for Editor { - type ItemHandle = BufferItemHandle; +impl WeakItemHandle for WeakMultiBufferItemHandle { + fn upgrade(&self, cx: &AppContext) -> Option> { + self.0 + .upgrade(cx) + .map(|buffer| Box::new(MultiBufferItemHandle(buffer)) as Box) + } - fn item_handle(&self, cx: &AppContext) -> Self::ItemHandle { - BufferItemHandle(self.buffer.read(cx).as_singleton().unwrap()) + fn id(&self) -> usize { + self.0.id() + } +} + +impl ItemView for Editor { + fn item_id(&self, cx: &AppContext) -> usize { + if let Some(buffer) = self.buffer.read(cx).as_singleton() { + buffer.id() + } else { + self.buffer.id() + } } fn navigate(&mut self, data: Box, cx: &mut ViewContext) { @@ -141,9 +204,8 @@ impl ItemView for Editor { } fn deactivated(&mut self, cx: &mut ViewContext) { - if let Some(selection) = self.newest_anchor_selection() { - self.push_to_nav_history(selection.head(), None, cx); - } + let selection = self.newest_anchor_selection(); + self.push_to_nav_history(selection.head(), None, cx); } fn is_dirty(&self, cx: &AppContext) -> bool { @@ -155,25 +217,39 @@ impl ItemView for Editor { } fn can_save(&self, cx: &AppContext) -> bool { - self.project_path(cx).is_some() + !self.buffer().read(cx).is_singleton() || self.project_path(cx).is_some() } - fn save(&mut self, cx: &mut ViewContext) -> Task> { + fn save( + &mut self, + project: ModelHandle, + cx: &mut ViewContext, + ) -> Task> { let buffer = self.buffer().clone(); - cx.spawn(|editor, mut cx| async move { - buffer - .update(&mut cx, |buffer, cx| buffer.format(cx).log_err()) - .await; - editor.update(&mut cx, |editor, cx| { + let buffers = buffer.read(cx).all_buffers(); + let transaction = project.update(cx, |project, cx| project.format(buffers, true, cx)); + cx.spawn(|this, mut cx| async move { + let transaction = transaction.await.log_err(); + this.update(&mut cx, |editor, cx| { editor.request_autoscroll(Autoscroll::Fit, cx) }); - buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?; + buffer + .update(&mut cx, |buffer, cx| { + if let Some(transaction) = transaction { + if !buffer.is_singleton() { + buffer.push_transaction(&transaction.0); + } + } + + buffer.save(cx) + }) + .await?; Ok(()) }) } - fn can_save_as(&self, _: &AppContext) -> bool { - true + fn can_save_as(&self, cx: &AppContext) -> bool { + self.buffer().read(cx).is_singleton() } fn save_as( @@ -331,7 +407,7 @@ impl View for DiagnosticMessage { if let Some(diagnostic) = &self.diagnostic { let theme = &self.settings.borrow().theme.workspace.status_bar; Label::new( - diagnostic.message.lines().next().unwrap().to_string(), + diagnostic.message.split('\n').next().unwrap().to_string(), theme.diagnostic_message.clone(), ) .contained() diff --git a/crates/editor/src/movement.rs b/crates/editor/src/movement.rs index 287ad34dc87e6687e7041f027b93ed205621ef99..5951e2c20db8b9f675f0d6a25afb7e5590e8dc7b 100644 --- a/crates/editor/src/movement.rs +++ b/crates/editor/src/movement.rs @@ -225,13 +225,8 @@ pub fn surrounding_word(map: &DisplaySnapshot, position: DisplayPoint) -> Range< #[cfg(test)] mod tests { use super::*; - use crate::{ - display_map::{BlockDisposition, BlockProperties}, - Buffer, DisplayMap, ExcerptProperties, MultiBuffer, - }; - use gpui::{elements::Empty, Element}; + use crate::{Buffer, DisplayMap, MultiBuffer}; use language::Point; - use std::sync::Arc; #[gpui::test] fn test_move_up_and_down_with_excerpts(cx: &mut gpui::MutableAppContext) { @@ -242,62 +237,24 @@ mod tests { .unwrap(); let buffer = cx.add_model(|cx| Buffer::new(0, "abc\ndefg\nhijkl\nmn", cx)); - let mut excerpt1_header_position = None; - let mut excerpt2_header_position = None; let multibuffer = cx.add_model(|cx| { let mut multibuffer = MultiBuffer::new(0); - let excerpt1_id = multibuffer.push_excerpt( - ExcerptProperties { - buffer: &buffer, - range: Point::new(0, 0)..Point::new(1, 4), - }, - cx, - ); - let excerpt2_id = multibuffer.push_excerpt( - ExcerptProperties { - buffer: &buffer, - range: Point::new(2, 0)..Point::new(3, 2), - }, + multibuffer.push_excerpts( + buffer.clone(), + [ + Point::new(0, 0)..Point::new(1, 4), + Point::new(2, 0)..Point::new(3, 2), + ], cx, ); - - excerpt1_header_position = Some( - multibuffer - .read(cx) - .anchor_in_excerpt(excerpt1_id, language::Anchor::min()), - ); - excerpt2_header_position = Some( - multibuffer - .read(cx) - .anchor_in_excerpt(excerpt2_id, language::Anchor::min()), - ); multibuffer }); let display_map = - cx.add_model(|cx| DisplayMap::new(multibuffer, 2, font_id, 14.0, None, cx)); - display_map.update(cx, |display_map, cx| { - display_map.insert_blocks( - [ - BlockProperties { - position: excerpt1_header_position.unwrap(), - height: 2, - render: Arc::new(|_| Empty::new().boxed()), - disposition: BlockDisposition::Above, - }, - BlockProperties { - position: excerpt2_header_position.unwrap(), - height: 3, - render: Arc::new(|_| Empty::new().boxed()), - disposition: BlockDisposition::Above, - }, - ], - cx, - ) - }); + cx.add_model(|cx| DisplayMap::new(multibuffer, 2, font_id, 14.0, None, 2, 2, cx)); let snapshot = display_map.update(cx, |map, cx| map.snapshot(cx)); - assert_eq!(snapshot.text(), "\n\nabc\ndefg\n\n\n\nhijkl\nmn"); + assert_eq!(snapshot.text(), "\n\nabc\ndefg\n\n\nhijkl\nmn"); // Can't move up into the first excerpt's header assert_eq!( @@ -321,22 +278,22 @@ mod tests { // Move up and down across second excerpt's header assert_eq!( - up(&snapshot, DisplayPoint::new(7, 5), SelectionGoal::Column(5)).unwrap(), + up(&snapshot, DisplayPoint::new(6, 5), SelectionGoal::Column(5)).unwrap(), (DisplayPoint::new(3, 4), SelectionGoal::Column(5)), ); assert_eq!( down(&snapshot, DisplayPoint::new(3, 4), SelectionGoal::Column(5)).unwrap(), - (DisplayPoint::new(7, 5), SelectionGoal::Column(5)), + (DisplayPoint::new(6, 5), SelectionGoal::Column(5)), ); // Can't move down off the end assert_eq!( - down(&snapshot, DisplayPoint::new(8, 0), SelectionGoal::Column(0)).unwrap(), - (DisplayPoint::new(8, 2), SelectionGoal::Column(2)), + down(&snapshot, DisplayPoint::new(7, 0), SelectionGoal::Column(0)).unwrap(), + (DisplayPoint::new(7, 2), SelectionGoal::Column(2)), ); assert_eq!( - down(&snapshot, DisplayPoint::new(8, 2), SelectionGoal::Column(2)).unwrap(), - (DisplayPoint::new(8, 2), SelectionGoal::Column(2)), + down(&snapshot, DisplayPoint::new(7, 2), SelectionGoal::Column(2)).unwrap(), + (DisplayPoint::new(7, 2), SelectionGoal::Column(2)), ); } @@ -351,8 +308,8 @@ mod tests { let font_size = 14.0; let buffer = MultiBuffer::build_simple("a bcΔ defγ hi—jk", cx); - let display_map = - cx.add_model(|cx| DisplayMap::new(buffer, tab_size, font_id, font_size, None, cx)); + let display_map = cx + .add_model(|cx| DisplayMap::new(buffer, tab_size, font_id, font_size, None, 1, 1, cx)); let snapshot = display_map.update(cx, |map, cx| map.snapshot(cx)); assert_eq!( prev_word_boundary(&snapshot, DisplayPoint::new(0, 12)), @@ -407,8 +364,8 @@ mod tests { .unwrap(); let font_size = 14.0; let buffer = MultiBuffer::build_simple("lorem ipsum dolor\n sit", cx); - let display_map = - cx.add_model(|cx| DisplayMap::new(buffer, tab_size, font_id, font_size, None, cx)); + let display_map = cx + .add_model(|cx| DisplayMap::new(buffer, tab_size, font_id, font_size, None, 1, 1, cx)); let snapshot = display_map.update(cx, |map, cx| map.snapshot(cx)); assert_eq!( diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 38703230487febfb85af1c1030ac4bcd8536993e..8180acacc55864fec77797ab126d01e3c176fe6d 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -3,18 +3,18 @@ mod anchor; pub use anchor::{Anchor, AnchorRangeExt}; use anyhow::Result; use clock::ReplicaId; -use collections::{HashMap, HashSet}; +use collections::{Bound, HashMap, HashSet}; use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task}; pub use language::Completion; use language::{ Buffer, BufferChunks, BufferSnapshot, Chunk, DiagnosticEntry, Event, File, Language, Outline, - OutlineItem, Selection, ToOffset as _, ToPoint as _, TransactionId, + OutlineItem, Selection, ToOffset as _, ToPoint as _, ToPointUtf16 as _, TransactionId, }; use std::{ cell::{Ref, RefCell}, cmp, fmt, io, iter::{self, FromIterator}, - ops::{Range, Sub}, + ops::{Range, RangeBounds, Sub}, str, sync::Arc, time::{Duration, Instant}, @@ -27,7 +27,6 @@ use text::{ AnchorRangeExt as _, Edit, Point, PointUtf16, TextSummary, }; use theme::SyntaxTheme; -use util::post_inc; const NEWLINES: &'static [u8] = &[b'\n'; u8::MAX as usize]; @@ -40,10 +39,11 @@ pub struct MultiBuffer { singleton: bool, replica_id: ReplicaId, history: History, + title: Option, } struct History { - next_transaction_id: usize, + next_transaction_id: TransactionId, undo_stack: Vec, redo_stack: Vec, transaction_depth: usize, @@ -59,10 +59,11 @@ pub enum CharKind { } struct Transaction { - id: usize, - buffer_transactions: HashSet<(usize, text::TransactionId)>, + id: TransactionId, + buffer_transactions: HashMap, first_edit_at: Instant, last_edit_at: Instant, + suppress_grouping: bool, } pub trait ToOffset: 'static + fmt::Debug { @@ -73,12 +74,17 @@ pub trait ToPoint: 'static + fmt::Debug { fn to_point(&self, snapshot: &MultiBufferSnapshot) -> Point; } +pub trait ToPointUtf16: 'static + fmt::Debug { + fn to_point_utf16(&self, snapshot: &MultiBufferSnapshot) -> PointUtf16; +} + struct BufferState { buffer: ModelHandle, last_version: clock::Global, last_parse_count: usize, last_selections_update_count: usize, last_diagnostics_update_count: usize, + last_file_update_count: usize, excerpts: Vec, _subscriptions: [gpui::Subscription; 2], } @@ -89,13 +95,16 @@ pub struct MultiBufferSnapshot { excerpts: SumTree, parse_count: usize, diagnostics_update_count: usize, + trailing_excerpt_update_count: usize, is_dirty: bool, has_conflict: bool, } -pub struct ExcerptProperties<'a, T> { - pub buffer: &'a ModelHandle, - pub range: Range, +pub struct ExcerptBoundary { + pub row: u32, + pub buffer: BufferSnapshot, + pub range: Range, + pub starts_new_buffer: bool, } #[derive(Clone)] @@ -160,19 +169,19 @@ impl MultiBuffer { transaction_depth: 0, group_interval: Duration::from_millis(300), }, + title: Default::default(), } } + pub fn with_title(mut self, title: String) -> Self { + self.title = Some(title); + self + } + pub fn singleton(buffer: ModelHandle, cx: &mut ModelContext) -> Self { let mut this = Self::new(buffer.read(cx).replica_id()); this.singleton = true; - this.push_excerpt( - ExcerptProperties { - buffer: &buffer, - range: text::Anchor::min()..text::Anchor::max(), - }, - cx, - ); + this.push_excerpts(buffer, [text::Anchor::min()..text::Anchor::max()], cx); this.snapshot.borrow_mut().singleton = true; this } @@ -185,56 +194,13 @@ impl MultiBuffer { #[cfg(any(test, feature = "test-support"))] pub fn build_random( - mut rng: &mut impl rand::Rng, + rng: &mut impl rand::Rng, cx: &mut gpui::MutableAppContext, ) -> ModelHandle { - use rand::prelude::*; - use std::env; - use text::RandomCharIter; - - let max_excerpts = env::var("MAX_EXCERPTS") - .map(|i| i.parse().expect("invalid `MAX_EXCERPTS` variable")) - .unwrap_or(5); - let excerpts = rng.gen_range(1..=max_excerpts); - cx.add_model(|cx| { let mut multibuffer = MultiBuffer::new(0); - let mut buffers = Vec::new(); - for _ in 0..excerpts { - let buffer_handle = if rng.gen() || buffers.is_empty() { - let text = RandomCharIter::new(&mut rng).take(10).collect::(); - buffers.push(cx.add_model(|cx| Buffer::new(0, text, cx))); - let buffer = buffers.last().unwrap(); - log::info!( - "Creating new buffer {} with text: {:?}", - buffer.id(), - buffer.read(cx).text() - ); - buffers.last().unwrap() - } else { - buffers.choose(rng).unwrap() - }; - - let buffer = buffer_handle.read(cx); - let end_ix = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Bias::Right); - let start_ix = buffer.clip_offset(rng.gen_range(0..=end_ix), Bias::Left); - let header_height = rng.gen_range(0..=5); - log::info!( - "Inserting excerpt from buffer {} with header height {} and range {:?}: {:?}", - buffer_handle.id(), - header_height, - start_ix..end_ix, - &buffer.text()[start_ix..end_ix] - ); - - multibuffer.push_excerpt( - ExcerptProperties { - buffer: buffer_handle, - range: start_ix..end_ix, - }, - cx, - ); - } + let mutation_count = rng.gen_range(1..=5); + multibuffer.randomly_edit_excerpts(rng, mutation_count, cx); multibuffer }) } @@ -269,6 +235,10 @@ impl MultiBuffer { } } + pub fn is_singleton(&self) -> bool { + self.singleton + } + pub fn subscribe(&mut self) -> Subscription { self.subscriptions.subscribe() } @@ -306,6 +276,10 @@ impl MultiBuffer { S: ToOffset, T: Into, { + if self.buffers.borrow().is_empty() { + return; + } + if let Some(buffer) = self.as_singleton() { let snapshot = self.read(cx); let ranges = ranges_iter @@ -451,12 +425,12 @@ impl MultiBuffer { return buffer.update(cx, |buffer, cx| buffer.end_transaction_at(now, cx)); } - let mut buffer_transactions = HashSet::default(); + let mut buffer_transactions = HashMap::default(); for BufferState { buffer, .. } in self.buffers.borrow().values() { if let Some(transaction_id) = buffer.update(cx, |buffer, cx| buffer.end_transaction_at(now, cx)) { - buffer_transactions.insert((buffer.id(), transaction_id)); + buffer_transactions.insert(buffer.id(), transaction_id); } } @@ -468,12 +442,24 @@ impl MultiBuffer { } } - pub fn avoid_grouping_next_transaction(&mut self, cx: &mut ModelContext) { + pub fn finalize_last_transaction(&mut self, cx: &mut ModelContext) { + self.history.finalize_last_transaction(); for BufferState { buffer, .. } in self.buffers.borrow().values() { - buffer.update(cx, |buffer, _| buffer.avoid_grouping_next_transaction()); + buffer.update(cx, |buffer, _| { + buffer.finalize_last_transaction(); + }); } } + pub fn push_transaction<'a, T>(&mut self, buffer_transactions: T) + where + T: IntoIterator, &'a language::Transaction)>, + { + self.history + .push_transaction(buffer_transactions, Instant::now()); + self.history.finalize_last_transaction(); + } + pub fn set_active_selections( &mut self, selections: &[Selection], @@ -571,10 +557,14 @@ impl MultiBuffer { while let Some(transaction) = self.history.pop_undo() { let mut undone = false; - for (buffer_id, buffer_transaction_id) in &transaction.buffer_transactions { + for (buffer_id, buffer_transaction_id) in &mut transaction.buffer_transactions { if let Some(BufferState { buffer, .. }) = self.buffers.borrow().get(&buffer_id) { - undone |= buffer.update(cx, |buf, cx| { - buf.undo_transaction(*buffer_transaction_id, cx) + undone |= buffer.update(cx, |buffer, cx| { + let undo_to = *buffer_transaction_id; + if let Some(entry) = buffer.peek_undo_stack() { + *buffer_transaction_id = entry.transaction_id(); + } + buffer.undo_to_transaction(undo_to, cx) }); } } @@ -594,10 +584,14 @@ impl MultiBuffer { while let Some(transaction) = self.history.pop_redo() { let mut redone = false; - for (buffer_id, buffer_transaction_id) in &transaction.buffer_transactions { + for (buffer_id, buffer_transaction_id) in &mut transaction.buffer_transactions { if let Some(BufferState { buffer, .. }) = self.buffers.borrow().get(&buffer_id) { - redone |= buffer.update(cx, |buf, cx| { - buf.redo_transaction(*buffer_transaction_id, cx) + redone |= buffer.update(cx, |buffer, cx| { + let redo_to = *buffer_transaction_id; + if let Some(entry) = buffer.peek_redo_stack() { + *buffer_transaction_id = entry.transaction_id(); + } + buffer.redo_to_transaction(redo_to, cx) }); } } @@ -610,32 +604,114 @@ impl MultiBuffer { None } - pub fn push_excerpt( + pub fn push_excerpts( &mut self, - props: ExcerptProperties, + buffer: ModelHandle, + ranges: impl IntoIterator>, cx: &mut ModelContext, - ) -> ExcerptId + ) -> Vec where O: text::ToOffset, { - self.insert_excerpt_after(&ExcerptId::max(), props, cx) + self.insert_excerpts_after(&ExcerptId::max(), buffer, ranges, cx) } - pub fn insert_excerpt_after( + pub fn push_excerpts_with_context_lines( + &mut self, + buffer: ModelHandle, + ranges: Vec>, + context_line_count: u32, + cx: &mut ModelContext, + ) -> Vec> + where + O: text::ToPoint + text::ToOffset, + { + let buffer_id = buffer.id(); + let buffer_snapshot = buffer.read(cx).snapshot(); + let max_point = buffer_snapshot.max_point(); + + let mut range_counts = Vec::new(); + let mut excerpt_ranges = Vec::new(); + let mut range_iter = ranges + .iter() + .map(|range| { + range.start.to_point(&buffer_snapshot)..range.end.to_point(&buffer_snapshot) + }) + .peekable(); + while let Some(range) = range_iter.next() { + let excerpt_start = Point::new(range.start.row.saturating_sub(context_line_count), 0); + let mut excerpt_end = + Point::new(range.end.row + 1 + context_line_count, 0).min(max_point); + let mut ranges_in_excerpt = 1; + + while let Some(next_range) = range_iter.peek() { + if next_range.start.row <= excerpt_end.row + context_line_count { + excerpt_end = + Point::new(next_range.end.row + 1 + context_line_count, 0).min(max_point); + ranges_in_excerpt += 1; + range_iter.next(); + } else { + break; + } + } + + excerpt_ranges.push(excerpt_start..excerpt_end); + range_counts.push(ranges_in_excerpt); + } + + let excerpt_ids = self.push_excerpts(buffer, excerpt_ranges, cx); + + let mut anchor_ranges = Vec::new(); + let mut ranges = ranges.into_iter(); + for (excerpt_id, range_count) in excerpt_ids.into_iter().zip(range_counts.into_iter()) { + anchor_ranges.extend(ranges.by_ref().take(range_count).map(|range| { + let start = Anchor { + buffer_id: Some(buffer_id), + excerpt_id: excerpt_id.clone(), + text_anchor: buffer_snapshot.anchor_after(range.start), + }; + let end = Anchor { + buffer_id: Some(buffer_id), + excerpt_id: excerpt_id.clone(), + text_anchor: buffer_snapshot.anchor_after(range.end), + }; + start..end + })) + } + anchor_ranges + } + + pub fn insert_excerpts_after( &mut self, prev_excerpt_id: &ExcerptId, - props: ExcerptProperties, + buffer: ModelHandle, + ranges: impl IntoIterator>, cx: &mut ModelContext, - ) -> ExcerptId + ) -> Vec where O: text::ToOffset, { assert_eq!(self.history.transaction_depth, 0); self.sync(cx); - let buffer_snapshot = props.buffer.read(cx).snapshot(); - let range = buffer_snapshot.anchor_before(&props.range.start) - ..buffer_snapshot.anchor_after(&props.range.end); + let buffer_id = buffer.id(); + let buffer_snapshot = buffer.read(cx).snapshot(); + + let mut buffers = self.buffers.borrow_mut(); + let buffer_state = buffers.entry(buffer_id).or_insert_with(|| BufferState { + last_version: buffer_snapshot.version().clone(), + last_parse_count: buffer_snapshot.parse_count(), + last_selections_update_count: buffer_snapshot.selections_update_count(), + last_diagnostics_update_count: buffer_snapshot.diagnostics_update_count(), + last_file_update_count: buffer_snapshot.file_update_count(), + excerpts: Default::default(), + _subscriptions: [ + cx.observe(&buffer, |_, _, cx| cx.notify()), + cx.subscribe(&buffer, Self::on_buffer_event), + ], + buffer, + }); + let mut snapshot = self.snapshot.borrow_mut(); let mut cursor = snapshot.excerpts.cursor::>(); let mut new_excerpts = cursor.slice(&Some(prev_excerpt_id), Bias::Right, &()); @@ -655,40 +731,37 @@ impl MultiBuffer { next_id = next_excerpt.id.clone(); } - let id = ExcerptId::between(&prev_id, &next_id); - - let mut buffers = self.buffers.borrow_mut(); - let buffer_state = buffers - .entry(props.buffer.id()) - .or_insert_with(|| BufferState { - last_version: buffer_snapshot.version().clone(), - last_parse_count: buffer_snapshot.parse_count(), - last_selections_update_count: buffer_snapshot.selections_update_count(), - last_diagnostics_update_count: buffer_snapshot.diagnostics_update_count(), - excerpts: Default::default(), - _subscriptions: [ - cx.observe(&props.buffer, |_, _, cx| cx.notify()), - cx.subscribe(&props.buffer, Self::on_buffer_event), - ], - buffer: props.buffer.clone(), - }); - if let Err(ix) = buffer_state.excerpts.binary_search(&id) { - buffer_state.excerpts.insert(ix, id.clone()); + let mut ids = Vec::new(); + let mut ranges = ranges.into_iter().peekable(); + while let Some(range) = ranges.next() { + let id = ExcerptId::between(&prev_id, &next_id); + if let Err(ix) = buffer_state.excerpts.binary_search(&id) { + buffer_state.excerpts.insert(ix, id.clone()); + } + let range = buffer_snapshot.anchor_before(&range.start) + ..buffer_snapshot.anchor_after(&range.end); + let excerpt = Excerpt::new( + id.clone(), + buffer_id, + buffer_snapshot.clone(), + range, + ranges.peek().is_some() || cursor.item().is_some(), + ); + new_excerpts.push(excerpt, &()); + prev_id = id.clone(); + ids.push(id); } - let excerpt = Excerpt::new( - id.clone(), - props.buffer.id(), - buffer_snapshot, - range, - cursor.item().is_some(), - ); - new_excerpts.push(excerpt, &()); let edit_end = new_excerpts.summary().text.bytes; - new_excerpts.push_tree(cursor.suffix(&()), &()); + let suffix = cursor.suffix(&()); + let changed_trailing_excerpt = suffix.is_empty(); + new_excerpts.push_tree(suffix, &()); drop(cursor); snapshot.excerpts = new_excerpts; + if changed_trailing_excerpt { + snapshot.trailing_excerpt_update_count += 1; + } self.subscriptions.publish_mut([Edit { old: edit_start..edit_start, @@ -696,7 +769,7 @@ impl MultiBuffer { }]); cx.notify(); - id + ids } pub fn excerpt_ids_for_buffer(&self, buffer: &ModelHandle) -> Vec { @@ -706,12 +779,35 @@ impl MultiBuffer { .map_or(Vec::new(), |state| state.excerpts.clone()) } - pub fn excerpted_buffers<'a, T: ToOffset>( + pub fn excerpt_containing( + &self, + position: impl ToOffset, + cx: &AppContext, + ) -> Option<(ModelHandle, Range)> { + let snapshot = self.read(cx); + let position = position.to_offset(&snapshot); + + let mut cursor = snapshot.excerpts.cursor::(); + cursor.seek(&position, Bias::Right, &()); + cursor.item().map(|excerpt| { + ( + self.buffers + .borrow() + .get(&excerpt.buffer_id) + .unwrap() + .buffer + .clone(), + excerpt.range.clone(), + ) + }) + } + + pub fn range_to_buffer_ranges<'a, T: ToOffset>( &'a self, range: Range, cx: &AppContext, ) -> Vec<(ModelHandle, Range)> { - let snapshot = self.snapshot(cx); + let snapshot = self.read(cx); let start = range.start.to_offset(&snapshot); let end = range.end.to_offset(&snapshot); @@ -799,9 +895,15 @@ impl MultiBuffer { }); } } - new_excerpts.push_tree(cursor.suffix(&()), &()); + let suffix = cursor.suffix(&()); + let changed_trailing_excerpt = suffix.is_empty(); + new_excerpts.push_tree(suffix, &()); drop(cursor); snapshot.excerpts = new_excerpts; + if changed_trailing_excerpt { + snapshot.trailing_excerpt_update_count += 1; + } + self.subscriptions.publish_mut(edits); cx.notify(); } @@ -810,13 +912,13 @@ impl MultiBuffer { &'a self, position: T, cx: &AppContext, - ) -> (ModelHandle, language::Anchor) { + ) -> Option<(ModelHandle, language::Anchor)> { let snapshot = self.read(cx); let anchor = snapshot.anchor_before(position); - ( - self.buffers.borrow()[&anchor.buffer_id].buffer.clone(), + Some(( + self.buffers.borrow()[&anchor.buffer_id?].buffer.clone(), anchor.text_anchor, - ) + )) } fn on_buffer_event( @@ -828,18 +930,12 @@ impl MultiBuffer { cx.emit(event.clone()); } - pub fn format(&mut self, cx: &mut ModelContext) -> Task> { - let mut format_tasks = Vec::new(); - for BufferState { buffer, .. } in self.buffers.borrow().values() { - format_tasks.push(buffer.update(cx, |buffer, cx| buffer.format(cx))); - } - - cx.spawn(|_, _| async move { - for format in format_tasks { - format.await?; - } - Ok(()) - }) + pub fn all_buffers(&self) -> HashSet> { + self.buffers + .borrow() + .values() + .map(|state| state.buffer.clone()) + .collect() } pub fn save(&mut self, cx: &mut ModelContext) -> Task> { @@ -856,41 +952,6 @@ impl MultiBuffer { }) } - pub fn completions( - &self, - position: T, - cx: &mut ModelContext, - ) -> Task>>> - where - T: ToOffset, - { - let anchor = self.read(cx).anchor_before(position); - let buffer = self.buffers.borrow()[&anchor.buffer_id].buffer.clone(); - let completions = - buffer.update(cx, |buffer, cx| buffer.completions(anchor.text_anchor, cx)); - cx.spawn(|this, cx| async move { - completions.await.map(|completions| { - let snapshot = this.read_with(&cx, |buffer, cx| buffer.snapshot(cx)); - completions - .into_iter() - .map(|completion| Completion { - old_range: snapshot.anchor_in_excerpt( - anchor.excerpt_id.clone(), - completion.old_range.start, - ) - ..snapshot.anchor_in_excerpt( - anchor.excerpt_id.clone(), - completion.old_range.end, - ), - new_text: completion.new_text, - label: completion.label, - lsp_completion: completion.lsp_completion, - }) - .collect() - }) - }) - } - pub fn is_completion_trigger(&self, position: T, text: &str, cx: &AppContext) -> bool where T: ToOffset, @@ -911,45 +972,13 @@ impl MultiBuffer { let snapshot = self.snapshot(cx); let anchor = snapshot.anchor_before(position); - let buffer = self.buffers.borrow()[&anchor.buffer_id].buffer.clone(); - buffer - .read(cx) - .completion_triggers() - .iter() - .any(|string| string == text) - } - - pub fn apply_additional_edits_for_completion( - &self, - completion: Completion, - cx: &mut ModelContext, - ) -> Task> { - let buffer = if let Some(buffer_state) = self - .buffers - .borrow() - .get(&completion.old_range.start.buffer_id) - { - buffer_state.buffer.clone() - } else { - return Task::ready(Ok(())); - }; - - let apply_edits = buffer.update(cx, |buffer, cx| { - buffer.apply_additional_edits_for_completion( - Completion { - old_range: completion.old_range.start.text_anchor - ..completion.old_range.end.text_anchor, - new_text: completion.new_text, - label: completion.label, - lsp_completion: completion.lsp_completion, - }, - true, - cx, - ) - }); - cx.foreground().spawn(async move { - apply_edits.await?; - Ok(()) + anchor.buffer_id.map_or(false, |buffer_id| { + let buffer = self.buffers.borrow()[&buffer_id].buffer.clone(); + buffer + .read(cx) + .completion_triggers() + .iter() + .any(|string| string == text) }) } @@ -965,6 +994,16 @@ impl MultiBuffer { self.as_singleton()?.read(cx).file() } + pub fn title(&self, cx: &AppContext) -> String { + if let Some(title) = self.title.clone() { + title + } else if let Some(file) = self.file(cx) { + file.file_name(cx).to_string_lossy().into() + } else { + "untitled".into() + } + } + #[cfg(test)] pub fn is_parsing(&self, cx: &AppContext) -> bool { self.as_singleton().unwrap().read(cx).is_parsing() @@ -984,6 +1023,7 @@ impl MultiBuffer { let parse_count = buffer.parse_count(); let selections_update_count = buffer.selections_update_count(); let diagnostics_update_count = buffer.diagnostics_update_count(); + let file_update_count = buffer.file_update_count(); let buffer_edited = version.changed_since(&buffer_state.last_version); let buffer_reparsed = parse_count > buffer_state.last_parse_count; @@ -991,15 +1031,18 @@ impl MultiBuffer { selections_update_count > buffer_state.last_selections_update_count; let buffer_diagnostics_updated = diagnostics_update_count > buffer_state.last_diagnostics_update_count; + let buffer_file_updated = file_update_count > buffer_state.last_file_update_count; if buffer_edited || buffer_reparsed || buffer_selections_updated || buffer_diagnostics_updated + || buffer_file_updated { buffer_state.last_version = version; buffer_state.last_parse_count = parse_count; buffer_state.last_selections_update_count = selections_update_count; buffer_state.last_diagnostics_update_count = diagnostics_update_count; + buffer_state.last_file_update_count = file_update_count; excerpts_to_edit.extend( buffer_state .excerpts @@ -1105,6 +1148,88 @@ impl MultiBuffer { self.edit(old_ranges.iter().cloned(), new_text.as_str(), cx); } + + pub fn randomly_edit_excerpts( + &mut self, + rng: &mut impl rand::Rng, + mutation_count: usize, + cx: &mut ModelContext, + ) { + use rand::prelude::*; + use std::env; + use text::RandomCharIter; + + let max_excerpts = env::var("MAX_EXCERPTS") + .map(|i| i.parse().expect("invalid `MAX_EXCERPTS` variable")) + .unwrap_or(5); + + let mut buffers = Vec::new(); + for _ in 0..mutation_count { + let excerpt_ids = self + .buffers + .borrow() + .values() + .flat_map(|b| &b.excerpts) + .cloned() + .collect::>(); + if excerpt_ids.len() == 0 || (rng.gen() && excerpt_ids.len() < max_excerpts) { + let buffer_handle = if rng.gen() || self.buffers.borrow().is_empty() { + let text = RandomCharIter::new(&mut *rng).take(10).collect::(); + buffers.push(cx.add_model(|cx| Buffer::new(0, text, cx))); + let buffer = buffers.last().unwrap(); + log::info!( + "Creating new buffer {} with text: {:?}", + buffer.id(), + buffer.read(cx).text() + ); + buffers.last().unwrap().clone() + } else { + self.buffers + .borrow() + .values() + .choose(rng) + .unwrap() + .buffer + .clone() + }; + + let buffer = buffer_handle.read(cx); + let end_ix = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Bias::Right); + let start_ix = buffer.clip_offset(rng.gen_range(0..=end_ix), Bias::Left); + log::info!( + "Inserting excerpt from buffer {} and range {:?}: {:?}", + buffer_handle.id(), + start_ix..end_ix, + &buffer.text()[start_ix..end_ix] + ); + + let excerpt_id = self.push_excerpts(buffer_handle.clone(), [start_ix..end_ix], cx); + log::info!("Inserted with id: {:?}", excerpt_id); + } else { + let remove_count = rng.gen_range(1..=excerpt_ids.len()); + let mut excerpts_to_remove = excerpt_ids + .choose_multiple(rng, remove_count) + .cloned() + .collect::>(); + excerpts_to_remove.sort(); + log::info!("Removing excerpts {:?}", excerpts_to_remove); + self.remove_excerpts(&excerpts_to_remove, cx); + } + } + } + + pub fn randomly_mutate( + &mut self, + rng: &mut impl rand::Rng, + mutation_count: usize, + cx: &mut ModelContext, + ) { + if rng.gen_bool(0.7) || self.singleton { + self.randomly_edit(rng, mutation_count, cx); + } else { + self.randomly_edit_excerpts(rng, mutation_count, cx); + } + } } impl Entity for MultiBuffer { @@ -1360,6 +1485,48 @@ impl MultiBufferSnapshot { } } + pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 { + if let Some(excerpt) = self.as_singleton() { + return excerpt.buffer.offset_to_point_utf16(offset); + } + + let mut cursor = self.excerpts.cursor::<(usize, PointUtf16)>(); + cursor.seek(&offset, Bias::Right, &()); + if let Some(excerpt) = cursor.item() { + let (start_offset, start_point) = cursor.start(); + let overshoot = offset - start_offset; + let excerpt_start_offset = excerpt.range.start.to_offset(&excerpt.buffer); + let excerpt_start_point = excerpt.range.start.to_point_utf16(&excerpt.buffer); + let buffer_point = excerpt + .buffer + .offset_to_point_utf16(excerpt_start_offset + overshoot); + *start_point + (buffer_point - excerpt_start_point) + } else { + self.excerpts.summary().text.lines_utf16 + } + } + + pub fn point_to_point_utf16(&self, point: Point) -> PointUtf16 { + if let Some(excerpt) = self.as_singleton() { + return excerpt.buffer.point_to_point_utf16(point); + } + + let mut cursor = self.excerpts.cursor::<(Point, PointUtf16)>(); + cursor.seek(&point, Bias::Right, &()); + if let Some(excerpt) = cursor.item() { + let (start_offset, start_point) = cursor.start(); + let overshoot = point - start_offset; + let excerpt_start_point = excerpt.range.start.to_point(&excerpt.buffer); + let excerpt_start_point_utf16 = excerpt.range.start.to_point_utf16(&excerpt.buffer); + let buffer_point = excerpt + .buffer + .point_to_point_utf16(excerpt_start_point + overshoot); + *start_point + (buffer_point - excerpt_start_point_utf16) + } else { + self.excerpts.summary().text.lines_utf16 + } + } + pub fn point_to_offset(&self, point: Point) -> usize { if let Some(excerpt) = self.as_singleton() { return excerpt.buffer.point_to_offset(point); @@ -1523,7 +1690,7 @@ impl MultiBufferSnapshot { let mut position = D::from_text_summary(&cursor.start().text); if let Some(excerpt) = cursor.item() { - if excerpt.id == anchor.excerpt_id && excerpt.buffer_id == anchor.buffer_id { + if excerpt.id == anchor.excerpt_id && Some(excerpt.buffer_id) == anchor.buffer_id { let excerpt_buffer_start = excerpt.range.start.summary::(&excerpt.buffer); let excerpt_buffer_end = excerpt.range.end.summary::(&excerpt.buffer); let buffer_position = cmp::min( @@ -1572,7 +1739,7 @@ impl MultiBufferSnapshot { let position = D::from_text_summary(&cursor.start().text); if let Some(excerpt) = cursor.item() { - if excerpt.id == *excerpt_id && excerpt.buffer_id == buffer_id { + if excerpt.id == *excerpt_id && Some(excerpt.buffer_id) == buffer_id { let excerpt_buffer_start = excerpt.range.start.summary::(&excerpt.buffer); let excerpt_buffer_end = excerpt.range.end.summary::(&excerpt.buffer); summaries.extend( @@ -1665,7 +1832,7 @@ impl MultiBufferSnapshot { text_anchor = excerpt.range.end.clone(); } Anchor { - buffer_id: excerpt.buffer_id, + buffer_id: Some(excerpt.buffer_id), excerpt_id: excerpt.id.clone(), text_anchor, } @@ -1682,7 +1849,7 @@ impl MultiBufferSnapshot { text_anchor = excerpt.range.start.clone(); } Anchor { - buffer_id: excerpt.buffer_id, + buffer_id: Some(excerpt.buffer_id), excerpt_id: excerpt.id.clone(), text_anchor, } @@ -1712,7 +1879,7 @@ impl MultiBufferSnapshot { let offset = position.to_offset(self); if let Some(excerpt) = self.as_singleton() { return Anchor { - buffer_id: excerpt.buffer_id, + buffer_id: Some(excerpt.buffer_id), excerpt_id: excerpt.id.clone(), text_anchor: excerpt.buffer.anchor_at(offset, bias), }; @@ -1734,7 +1901,7 @@ impl MultiBufferSnapshot { let text_anchor = excerpt.clip_anchor(excerpt.buffer.anchor_at(buffer_start + overshoot, bias)); Anchor { - buffer_id: excerpt.buffer_id, + buffer_id: Some(excerpt.buffer_id), excerpt_id: excerpt.id.clone(), text_anchor, } @@ -1753,7 +1920,7 @@ impl MultiBufferSnapshot { let text_anchor = excerpt.clip_anchor(text_anchor); drop(cursor); return Anchor { - buffer_id: excerpt.buffer_id, + buffer_id: Some(excerpt.buffer_id), excerpt_id, text_anchor, }; @@ -1768,27 +1935,72 @@ impl MultiBufferSnapshot { } else if let Some((buffer_id, buffer_snapshot)) = self.buffer_snapshot_for_excerpt(&anchor.excerpt_id) { - anchor.buffer_id == buffer_id && buffer_snapshot.can_resolve(&anchor.text_anchor) + anchor.buffer_id == Some(buffer_id) && buffer_snapshot.can_resolve(&anchor.text_anchor) } else { false } } - pub fn range_contains_excerpt_boundary(&self, range: Range) -> bool { - let start = range.start.to_offset(self); - let end = range.end.to_offset(self); - let mut cursor = self.excerpts.cursor::<(usize, Option<&ExcerptId>)>(); - cursor.seek(&start, Bias::Right, &()); - let start_id = cursor - .item() - .or_else(|| cursor.prev_item()) - .map(|excerpt| &excerpt.id); - cursor.seek_forward(&end, Bias::Right, &()); - let end_id = cursor - .item() - .or_else(|| cursor.prev_item()) - .map(|excerpt| &excerpt.id); - start_id != end_id + pub fn excerpt_boundaries_in_range<'a, R, T>( + &'a self, + range: R, + ) -> impl Iterator + 'a + where + R: RangeBounds, + T: ToOffset, + { + let start_offset; + let start = match range.start_bound() { + Bound::Included(start) => { + start_offset = start.to_offset(self); + Bound::Included(start_offset) + } + Bound::Excluded(start) => { + start_offset = start.to_offset(self); + Bound::Excluded(start_offset) + } + Bound::Unbounded => { + start_offset = 0; + Bound::Unbounded + } + }; + let end = match range.end_bound() { + Bound::Included(end) => Bound::Included(end.to_offset(self)), + Bound::Excluded(end) => Bound::Excluded(end.to_offset(self)), + Bound::Unbounded => Bound::Unbounded, + }; + let bounds = (start, end); + + let mut cursor = self.excerpts.cursor::<(usize, Point)>(); + cursor.seek(&start_offset, Bias::Right, &()); + if cursor.item().is_none() { + cursor.prev(&()); + } + if !bounds.contains(&cursor.start().0) { + cursor.next(&()); + } + + let mut prev_buffer_id = cursor.prev_item().map(|excerpt| excerpt.buffer_id); + std::iter::from_fn(move || { + if self.singleton { + None + } else if bounds.contains(&cursor.start().0) { + let excerpt = cursor.item()?; + let starts_new_buffer = Some(excerpt.buffer_id) != prev_buffer_id; + let boundary = ExcerptBoundary { + row: cursor.start().1.row, + buffer: excerpt.buffer.clone(), + range: excerpt.range.clone(), + starts_new_buffer, + }; + + prev_buffer_id = Some(excerpt.buffer_id); + cursor.next(&()); + Some(boundary) + } else { + None + } + }) } pub fn parse_count(&self) -> usize { @@ -1849,6 +2061,10 @@ impl MultiBufferSnapshot { self.diagnostics_update_count } + pub fn trailing_excerpt_update_count(&self) -> usize { + self.trailing_excerpt_update_count + } + pub fn language(&self) -> Option<&Arc> { self.excerpts .iter() @@ -1985,12 +2201,12 @@ impl MultiBufferSnapshot { .flat_map(move |(replica_id, selections)| { selections.map(move |selection| { let mut start = Anchor { - buffer_id: excerpt.buffer_id, + buffer_id: Some(excerpt.buffer_id), excerpt_id: excerpt.id.clone(), text_anchor: selection.start.clone(), }; let mut end = Anchor { - buffer_id: excerpt.buffer_id, + buffer_id: Some(excerpt.buffer_id), excerpt_id: excerpt.id.clone(), text_anchor: selection.end.clone(), }; @@ -2021,12 +2237,13 @@ impl History { fn start_transaction(&mut self, now: Instant) -> Option { self.transaction_depth += 1; if self.transaction_depth == 1 { - let id = post_inc(&mut self.next_transaction_id); + let id = self.next_transaction_id.tick(); self.undo_stack.push(Transaction { id, buffer_transactions: Default::default(), first_edit_at: now, last_edit_at: now, + suppress_grouping: false, }); Some(id) } else { @@ -2037,7 +2254,7 @@ impl History { fn end_transaction( &mut self, now: Instant, - buffer_transactions: HashSet<(usize, TransactionId)>, + buffer_transactions: HashMap, ) -> bool { assert_ne!(self.transaction_depth, 0); self.transaction_depth -= 1; @@ -2048,7 +2265,12 @@ impl History { } else { let transaction = self.undo_stack.last_mut().unwrap(); transaction.last_edit_at = now; - transaction.buffer_transactions.extend(buffer_transactions); + for (buffer_id, transaction_id) in buffer_transactions { + transaction + .buffer_transactions + .entry(buffer_id) + .or_insert(transaction_id); + } true } } else { @@ -2056,21 +2278,47 @@ impl History { } } - fn pop_undo(&mut self) -> Option<&Transaction> { + fn push_transaction<'a, T>(&mut self, buffer_transactions: T, now: Instant) + where + T: IntoIterator, &'a language::Transaction)>, + { + assert_eq!(self.transaction_depth, 0); + let transaction = Transaction { + id: self.next_transaction_id.tick(), + buffer_transactions: buffer_transactions + .into_iter() + .map(|(buffer, transaction)| (buffer.id(), transaction.id)) + .collect(), + first_edit_at: now, + last_edit_at: now, + suppress_grouping: false, + }; + if !transaction.buffer_transactions.is_empty() { + self.undo_stack.push(transaction); + } + } + + fn finalize_last_transaction(&mut self) { + if let Some(transaction) = self.undo_stack.last_mut() { + transaction.suppress_grouping = true; + } + } + + fn pop_undo(&mut self) -> Option<&mut Transaction> { assert_eq!(self.transaction_depth, 0); if let Some(transaction) = self.undo_stack.pop() { self.redo_stack.push(transaction); - self.redo_stack.last() + self.redo_stack.last_mut() } else { None } } - fn pop_redo(&mut self) -> Option<&Transaction> { + fn pop_redo(&mut self) -> Option<&mut Transaction> { assert_eq!(self.transaction_depth, 0); if let Some(transaction) = self.redo_stack.pop() { self.undo_stack.push(transaction); - self.undo_stack.last() + self.undo_stack.last_mut() } else { None } @@ -2082,7 +2330,9 @@ impl History { if let Some(mut transaction) = transactions.next_back() { while let Some(prev_transaction) = transactions.next_back() { - if transaction.first_edit_at - prev_transaction.last_edit_at <= self.group_interval + if !prev_transaction.suppress_grouping + && transaction.first_edit_at - prev_transaction.last_edit_at + <= self.group_interval { transaction = prev_transaction; new_len -= 1; @@ -2097,6 +2347,14 @@ impl History { if let Some(transaction) = transactions_to_merge.last() { last_transaction.last_edit_at = transaction.last_edit_at; } + for to_merge in transactions_to_merge { + for (buffer_id, transaction_id) in &to_merge.buffer_transactions { + last_transaction + .buffer_transactions + .entry(*buffer_id) + .or_insert(*transaction_id); + } + } } self.undo_stack.truncate(new_len); @@ -2188,7 +2446,7 @@ impl Excerpt { } fn contains(&self, anchor: &Anchor) -> bool { - self.buffer_id == anchor.buffer_id + Some(self.buffer_id) == anchor.buffer_id && self .range .start @@ -2487,6 +2745,24 @@ impl ToPoint for Point { } } +impl ToPointUtf16 for usize { + fn to_point_utf16<'a>(&self, snapshot: &MultiBufferSnapshot) -> PointUtf16 { + snapshot.offset_to_point_utf16(*self) + } +} + +impl ToPointUtf16 for Point { + fn to_point_utf16<'a>(&self, snapshot: &MultiBufferSnapshot) -> PointUtf16 { + snapshot.point_to_point_utf16(*self) + } +} + +impl ToPointUtf16 for PointUtf16 { + fn to_point_utf16<'a>(&self, _: &MultiBufferSnapshot) -> PointUtf16 { + *self + } +} + pub fn char_kind(c: char) -> CharKind { if c == '\n' { CharKind::Newline @@ -2564,13 +2840,7 @@ mod tests { let subscription = multibuffer.update(cx, |multibuffer, cx| { let subscription = multibuffer.subscribe(); - multibuffer.push_excerpt( - ExcerptProperties { - buffer: &buffer_1, - range: Point::new(1, 2)..Point::new(2, 5), - }, - cx, - ); + multibuffer.push_excerpts(buffer_1.clone(), [Point::new(1, 2)..Point::new(2, 5)], cx); assert_eq!( subscription.consume().into_inner(), [Edit { @@ -2579,20 +2849,8 @@ mod tests { }] ); - multibuffer.push_excerpt( - ExcerptProperties { - buffer: &buffer_1, - range: Point::new(3, 3)..Point::new(4, 4), - }, - cx, - ); - multibuffer.push_excerpt( - ExcerptProperties { - buffer: &buffer_2, - range: Point::new(3, 1)..Point::new(3, 3), - }, - cx, - ); + multibuffer.push_excerpts(buffer_1.clone(), [Point::new(3, 3)..Point::new(4, 4)], cx); + multibuffer.push_excerpts(buffer_2.clone(), [Point::new(3, 1)..Point::new(3, 3)], cx); assert_eq!( subscription.consume().into_inner(), [Edit { @@ -2625,12 +2883,47 @@ mod tests { ); assert_eq!(snapshot.buffer_rows(4).collect::>(), [Some(3)]); assert_eq!(snapshot.buffer_rows(5).collect::>(), []); - assert!(!snapshot.range_contains_excerpt_boundary(Point::new(1, 0)..Point::new(1, 5))); - assert!(snapshot.range_contains_excerpt_boundary(Point::new(1, 0)..Point::new(2, 0))); - assert!(snapshot.range_contains_excerpt_boundary(Point::new(1, 0)..Point::new(4, 0))); - assert!(!snapshot.range_contains_excerpt_boundary(Point::new(2, 0)..Point::new(3, 0))); - assert!(!snapshot.range_contains_excerpt_boundary(Point::new(4, 0)..Point::new(4, 2))); - assert!(!snapshot.range_contains_excerpt_boundary(Point::new(4, 2)..Point::new(4, 2))); + + assert_eq!( + boundaries_in_range(Point::new(0, 0)..Point::new(4, 2), &snapshot), + &[ + (0, "bbbb\nccccc".to_string(), true), + (2, "ddd\neeee".to_string(), false), + (4, "jj".to_string(), true), + ] + ); + assert_eq!( + boundaries_in_range(Point::new(0, 0)..Point::new(2, 0), &snapshot), + &[(0, "bbbb\nccccc".to_string(), true)] + ); + assert_eq!( + boundaries_in_range(Point::new(1, 0)..Point::new(1, 5), &snapshot), + &[] + ); + assert_eq!( + boundaries_in_range(Point::new(1, 0)..Point::new(2, 0), &snapshot), + &[] + ); + assert_eq!( + boundaries_in_range(Point::new(1, 0)..Point::new(4, 0), &snapshot), + &[(2, "ddd\neeee".to_string(), false)] + ); + assert_eq!( + boundaries_in_range(Point::new(1, 0)..Point::new(4, 0), &snapshot), + &[(2, "ddd\neeee".to_string(), false)] + ); + assert_eq!( + boundaries_in_range(Point::new(2, 0)..Point::new(3, 0), &snapshot), + &[(2, "ddd\neeee".to_string(), false)] + ); + assert_eq!( + boundaries_in_range(Point::new(4, 0)..Point::new(4, 2), &snapshot), + &[(4, "jj".to_string(), true)] + ); + assert_eq!( + boundaries_in_range(Point::new(4, 2)..Point::new(4, 2), &snapshot), + &[] + ); buffer_1.update(cx, |buffer, cx| { buffer.edit( @@ -2702,6 +2995,61 @@ mod tests { "eeee", // ) ); + + fn boundaries_in_range( + range: Range, + snapshot: &MultiBufferSnapshot, + ) -> Vec<(u32, String, bool)> { + snapshot + .excerpt_boundaries_in_range(range) + .map(|boundary| { + ( + boundary.row, + boundary + .buffer + .text_for_range(boundary.range) + .collect::(), + boundary.starts_new_buffer, + ) + }) + .collect::>() + } + } + + #[gpui::test] + fn test_excerpts_with_context_lines(cx: &mut MutableAppContext) { + let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(20, 3, 'a'), cx)); + let multibuffer = cx.add_model(|_| MultiBuffer::new(0)); + let anchor_ranges = multibuffer.update(cx, |multibuffer, cx| { + multibuffer.push_excerpts_with_context_lines( + buffer.clone(), + vec![ + Point::new(3, 2)..Point::new(4, 2), + Point::new(7, 1)..Point::new(7, 3), + Point::new(15, 0)..Point::new(15, 0), + ], + 2, + cx, + ) + }); + + let snapshot = multibuffer.read(cx).snapshot(cx); + assert_eq!( + snapshot.text(), + "bbb\nccc\nddd\neee\nfff\nggg\nhhh\niii\njjj\n\nnnn\nooo\nppp\nqqq\nrrr\n" + ); + + assert_eq!( + anchor_ranges + .iter() + .map(|range| range.to_point(&snapshot)) + .collect::>(), + vec![ + Point::new(2, 2)..Point::new(3, 2), + Point::new(6, 1)..Point::new(6, 3), + Point::new(12, 0)..Point::new(12, 0) + ] + ); } #[gpui::test] @@ -2740,20 +3088,8 @@ mod tests { let buffer_2 = cx.add_model(|cx| Buffer::new(0, "efghi", cx)); let multibuffer = cx.add_model(|cx| { let mut multibuffer = MultiBuffer::new(0); - multibuffer.push_excerpt( - ExcerptProperties { - buffer: &buffer_1, - range: 0..4, - }, - cx, - ); - multibuffer.push_excerpt( - ExcerptProperties { - buffer: &buffer_2, - range: 0..5, - }, - cx, - ); + multibuffer.push_excerpts(buffer_1.clone(), [0..4], cx); + multibuffer.push_excerpts(buffer_2.clone(), [0..5], cx); multibuffer }); let old_snapshot = multibuffer.read(cx).snapshot(cx); @@ -2802,44 +3138,22 @@ mod tests { // Add an excerpt from buffer 1 that spans this new insertion. buffer_1.update(cx, |buffer, cx| buffer.edit([4..4], "123", cx)); let excerpt_id_1 = multibuffer.update(cx, |multibuffer, cx| { - multibuffer.push_excerpt( - ExcerptProperties { - buffer: &buffer_1, - range: 0..7, - }, - cx, - ) + multibuffer + .push_excerpts(buffer_1.clone(), [0..7], cx) + .pop() + .unwrap() }); let snapshot_1 = multibuffer.read(cx).snapshot(cx); assert_eq!(snapshot_1.text(), "abcd123"); // Replace the buffer 1 excerpt with new excerpts from buffer 2. - let (excerpt_id_2, excerpt_id_3, _) = multibuffer.update(cx, |multibuffer, cx| { + let (excerpt_id_2, excerpt_id_3) = multibuffer.update(cx, |multibuffer, cx| { multibuffer.remove_excerpts([&excerpt_id_1], cx); - ( - multibuffer.push_excerpt( - ExcerptProperties { - buffer: &buffer_2, - range: 0..4, - }, - cx, - ), - multibuffer.push_excerpt( - ExcerptProperties { - buffer: &buffer_2, - range: 6..10, - }, - cx, - ), - multibuffer.push_excerpt( - ExcerptProperties { - buffer: &buffer_2, - range: 12..16, - }, - cx, - ), - ) + let mut ids = multibuffer + .push_excerpts(buffer_2.clone(), [0..4, 6..10, 12..16], cx) + .into_iter(); + (ids.next().unwrap(), ids.next().unwrap()) }); let snapshot_2 = multibuffer.read(cx).snapshot(cx); assert_eq!(snapshot_2.text(), "ABCD\nGHIJ\nMNOP"); @@ -2876,14 +3190,10 @@ mod tests { // that intersects the old excerpt. let excerpt_id_5 = multibuffer.update(cx, |multibuffer, cx| { multibuffer.remove_excerpts([&excerpt_id_3], cx); - multibuffer.insert_excerpt_after( - &excerpt_id_3, - ExcerptProperties { - buffer: &buffer_2, - range: 5..8, - }, - cx, - ) + multibuffer + .insert_excerpts_after(&excerpt_id_3, buffer_2.clone(), [5..8], cx) + .pop() + .unwrap() }); let snapshot_3 = multibuffer.read(cx).snapshot(cx); @@ -3023,14 +3333,15 @@ mod tests { ); let excerpt_id = multibuffer.update(cx, |multibuffer, cx| { - multibuffer.insert_excerpt_after( - &prev_excerpt_id, - ExcerptProperties { - buffer: &buffer_handle, - range: start_ix..end_ix, - }, - cx, - ) + multibuffer + .insert_excerpts_after( + &prev_excerpt_id, + buffer_handle.clone(), + [start_ix..end_ix], + cx, + ) + .pop() + .unwrap() }); excerpt_ids.insert(excerpt_ix, excerpt_id); @@ -3249,8 +3560,9 @@ mod tests { start_ix..end_ix ); - let excerpted_buffer_ranges = - multibuffer.read(cx).excerpted_buffers(start_ix..end_ix, cx); + let excerpted_buffer_ranges = multibuffer + .read(cx) + .range_to_buffer_ranges(start_ix..end_ix, cx); let excerpted_buffers_text = excerpted_buffer_ranges .into_iter() .map(|(buffer, buffer_range)| { @@ -3334,20 +3646,8 @@ mod tests { let multibuffer = cx.add_model(|_| MultiBuffer::new(0)); let group_interval = multibuffer.read(cx).history.group_interval; multibuffer.update(cx, |multibuffer, cx| { - multibuffer.push_excerpt( - ExcerptProperties { - buffer: &buffer_1, - range: 0..buffer_1.read(cx).len(), - }, - cx, - ); - multibuffer.push_excerpt( - ExcerptProperties { - buffer: &buffer_2, - range: 0..buffer_2.read(cx).len(), - }, - cx, - ); + multibuffer.push_excerpts(buffer_1.clone(), [0..buffer_1.read(cx).len()], cx); + multibuffer.push_excerpts(buffer_2.clone(), [0..buffer_2.read(cx).len()], cx); }); let mut now = Instant::now(); @@ -3373,12 +3673,29 @@ mod tests { multibuffer.end_transaction_at(now, cx); assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678"); + // Edit buffer 1 through the multibuffer now += 2 * group_interval; multibuffer.start_transaction_at(now, cx); multibuffer.edit([2..2], "C", cx); multibuffer.end_transaction_at(now, cx); assert_eq!(multibuffer.read(cx).text(), "ABC1234\nAB5678"); + // Edit buffer 1 independently + buffer_1.update(cx, |buffer_1, cx| { + buffer_1.start_transaction_at(now); + buffer_1.edit([3..3], "D", cx); + buffer_1.end_transaction_at(now, cx); + + now += 2 * group_interval; + buffer_1.start_transaction_at(now); + buffer_1.edit([4..4], "E", cx); + buffer_1.end_transaction_at(now, cx); + }); + assert_eq!(multibuffer.read(cx).text(), "ABCDE1234\nAB5678"); + + // An undo in the multibuffer undoes the multibuffer transaction + // and also any individual buffer edits that have occured since + // that transaction. multibuffer.undo(cx); assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678"); @@ -3389,10 +3706,16 @@ mod tests { assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678"); multibuffer.redo(cx); - assert_eq!(multibuffer.read(cx).text(), "ABC1234\nAB5678"); + assert_eq!(multibuffer.read(cx).text(), "ABCDE1234\nAB5678"); - buffer_1.update(cx, |buffer_1, cx| buffer_1.undo(cx)); - assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678"); + // Undo buffer 2 independently. + buffer_2.update(cx, |buffer_2, cx| buffer_2.undo(cx)); + assert_eq!(multibuffer.read(cx).text(), "ABCDE1234\n5678"); + + // An undo in the multibuffer undoes the components of the + // the last multibuffer transaction that are not already undone. + multibuffer.undo(cx); + assert_eq!(multibuffer.read(cx).text(), "AB1234\n5678"); multibuffer.undo(cx); assert_eq!(multibuffer.read(cx).text(), "1234\n5678"); @@ -3400,17 +3723,11 @@ mod tests { multibuffer.redo(cx); assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678"); - multibuffer.redo(cx); - assert_eq!(multibuffer.read(cx).text(), "ABC1234\nAB5678"); - - multibuffer.undo(cx); - assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678"); - buffer_1.update(cx, |buffer_1, cx| buffer_1.redo(cx)); - assert_eq!(multibuffer.read(cx).text(), "ABC1234\nAB5678"); + assert_eq!(multibuffer.read(cx).text(), "ABCD1234\nAB5678"); multibuffer.undo(cx); - assert_eq!(multibuffer.read(cx).text(), "C1234\n5678"); + assert_eq!(multibuffer.read(cx).text(), "1234\n5678"); }); } } diff --git a/crates/editor/src/multi_buffer/anchor.rs b/crates/editor/src/multi_buffer/anchor.rs index 03d16db1efc86ff8e80aff87e871feebdc258682..c51eb2a4c73110e8013a4fbc68ef143251b23f53 100644 --- a/crates/editor/src/multi_buffer/anchor.rs +++ b/crates/editor/src/multi_buffer/anchor.rs @@ -9,7 +9,7 @@ use text::{rope::TextDimension, Point}; #[derive(Clone, Eq, PartialEq, Debug, Hash)] pub struct Anchor { - pub(crate) buffer_id: usize, + pub(crate) buffer_id: Option, pub(crate) excerpt_id: ExcerptId, pub(crate) text_anchor: text::Anchor, } @@ -17,7 +17,7 @@ pub struct Anchor { impl Anchor { pub fn min() -> Self { Self { - buffer_id: 0, + buffer_id: None, excerpt_id: ExcerptId::min(), text_anchor: text::Anchor::min(), } @@ -25,7 +25,7 @@ impl Anchor { pub fn max() -> Self { Self { - buffer_id: 0, + buffer_id: None, excerpt_id: ExcerptId::max(), text_anchor: text::Anchor::max(), } @@ -46,11 +46,11 @@ impl Anchor { // Even though the anchor refers to a valid excerpt the underlying buffer might have // changed. In that case, treat the anchor as if it were at the start of that // excerpt. - if self.buffer_id == buffer_id && other.buffer_id == buffer_id { + if self.buffer_id == Some(buffer_id) && other.buffer_id == Some(buffer_id) { self.text_anchor.cmp(&other.text_anchor, buffer_snapshot) - } else if self.buffer_id == buffer_id { + } else if self.buffer_id == Some(buffer_id) { Ok(Ordering::Greater) - } else if other.buffer_id == buffer_id { + } else if other.buffer_id == Some(buffer_id) { Ok(Ordering::Less) } else { Ok(Ordering::Equal) @@ -68,7 +68,7 @@ impl Anchor { if let Some((buffer_id, buffer_snapshot)) = snapshot.buffer_snapshot_for_excerpt(&self.excerpt_id) { - if self.buffer_id == buffer_id { + if self.buffer_id == Some(buffer_id) { return Self { buffer_id: self.buffer_id, excerpt_id: self.excerpt_id.clone(), @@ -85,7 +85,7 @@ impl Anchor { if let Some((buffer_id, buffer_snapshot)) = snapshot.buffer_snapshot_for_excerpt(&self.excerpt_id) { - if self.buffer_id == buffer_id { + if self.buffer_id == Some(buffer_id) { return Self { buffer_id: self.buffer_id, excerpt_id: self.excerpt_id.clone(), diff --git a/crates/find/src/find.rs b/crates/find/src/find.rs index 8844f2cf907f79e7dafb149f92eb5e6376041c39..47d31cf76597f67e24acce6e8576a8b53a52c62a 100644 --- a/crates/find/src/find.rs +++ b/crates/find/src/find.rs @@ -355,11 +355,8 @@ impl FindBar { if let Some(mut index) = self.active_match_index { if let Some(editor) = self.active_editor.as_ref() { editor.update(cx, |editor, cx| { - let newest_selection = editor.newest_anchor_selection().cloned(); - if let Some(((_, ranges), newest_selection)) = editor - .highlighted_ranges_for_type::() - .zip(newest_selection) - { + let newest_selection = editor.newest_anchor_selection().clone(); + if let Some((_, ranges)) = editor.highlighted_ranges_for_type::() { let position = newest_selection.head(); let buffer = editor.buffer().read(cx).read(cx); if ranges[index].start.cmp(&position, &buffer).unwrap().is_gt() { @@ -467,7 +464,7 @@ impl FindBar { self.pending_search = Some(cx.spawn(|this, mut cx| async move { match ranges.await { Ok(ranges) => { - if let Some(editor) = cx.read(|cx| editor.upgrade(cx)) { + if let Some(editor) = editor.upgrade(&cx) { this.update(&mut cx, |this, cx| { this.highlighted_editors.insert(editor.downgrade()); editor.update(cx, |editor, cx| { @@ -502,7 +499,7 @@ impl FindBar { fn active_match_index(&mut self, cx: &mut ViewContext) -> Option { let editor = self.active_editor.as_ref()?; let editor = editor.read(cx); - let position = editor.newest_anchor_selection()?.head(); + let position = editor.newest_anchor_selection().head(); let ranges = editor.highlighted_ranges_for_type::()?.1; if ranges.is_empty() { None @@ -655,7 +652,7 @@ mod tests { ) }); let editor = cx.add_view(Default::default(), |cx| { - Editor::new(buffer.clone(), Arc::new(EditorSettings::test), cx) + Editor::new(buffer.clone(), Arc::new(EditorSettings::test), None, cx) }); let find_bar = cx.add_view(Default::default(), |cx| { diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index 3a2124de95e3a04e1e360dda59fe3e37c62b253d..1ba05c7b4ac2df25e10915f84d1812db388da74c 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -80,8 +80,14 @@ pub trait UpdateModel { } pub trait UpgradeModelHandle { - fn upgrade_model_handle(&self, handle: WeakModelHandle) - -> Option>; + fn upgrade_model_handle( + &self, + handle: &WeakModelHandle, + ) -> Option>; +} + +pub trait UpgradeViewHandle { + fn upgrade_view_handle(&self, handle: &WeakViewHandle) -> Option>; } pub trait ReadView { @@ -558,12 +564,18 @@ impl UpdateModel for AsyncAppContext { impl UpgradeModelHandle for AsyncAppContext { fn upgrade_model_handle( &self, - handle: WeakModelHandle, + handle: &WeakModelHandle, ) -> Option> { self.0.borrow_mut().upgrade_model_handle(handle) } } +impl UpgradeViewHandle for AsyncAppContext { + fn upgrade_view_handle(&self, handle: &WeakViewHandle) -> Option> { + self.0.borrow_mut().upgrade_view_handle(handle) + } +} + impl ReadModelWith for AsyncAppContext { fn read_model_with( &self, @@ -831,6 +843,17 @@ impl MutableAppContext { .push(handler); } + pub fn add_async_action(&mut self, mut handler: F) + where + A: Action, + V: View, + F: 'static + FnMut(&mut V, &A, &mut ViewContext) -> Option>>, + { + self.add_action(move |view, action, cx| { + handler(view, action, cx).map(|task| task.detach_and_log_err(cx)); + }) + } + pub fn add_global_action(&mut self, mut handler: F) where A: Action, @@ -1721,12 +1744,18 @@ impl UpdateModel for MutableAppContext { impl UpgradeModelHandle for MutableAppContext { fn upgrade_model_handle( &self, - handle: WeakModelHandle, + handle: &WeakModelHandle, ) -> Option> { self.cx.upgrade_model_handle(handle) } } +impl UpgradeViewHandle for MutableAppContext { + fn upgrade_view_handle(&self, handle: &WeakViewHandle) -> Option> { + self.cx.upgrade_view_handle(handle) + } +} + impl ReadView for MutableAppContext { fn read_view(&self, handle: &ViewHandle) -> &T { if let Some(view) = self.cx.views.get(&(handle.window_id, handle.view_id)) { @@ -1835,7 +1864,7 @@ impl ReadModel for AppContext { impl UpgradeModelHandle for AppContext { fn upgrade_model_handle( &self, - handle: WeakModelHandle, + handle: &WeakModelHandle, ) -> Option> { if self.models.contains_key(&handle.model_id) { Some(ModelHandle::new(handle.model_id, &self.ref_counts)) @@ -1845,6 +1874,20 @@ impl UpgradeModelHandle for AppContext { } } +impl UpgradeViewHandle for AppContext { + fn upgrade_view_handle(&self, handle: &WeakViewHandle) -> Option> { + if self.ref_counts.lock().is_entity_alive(handle.view_id) { + Some(ViewHandle::new( + handle.window_id, + handle.view_id, + &self.ref_counts, + )) + } else { + None + } + } +} + impl ReadView for AppContext { fn read_view(&self, handle: &ViewHandle) -> &T { if let Some(view) = self.views.get(&(handle.window_id, handle.view_id)) { @@ -2217,7 +2260,7 @@ impl UpdateModel for ModelContext<'_, M> { impl UpgradeModelHandle for ModelContext<'_, M> { fn upgrade_model_handle( &self, - handle: WeakModelHandle, + handle: &WeakModelHandle, ) -> Option> { self.cx.upgrade_model_handle(handle) } @@ -2547,12 +2590,18 @@ impl ReadModel for ViewContext<'_, V> { impl UpgradeModelHandle for ViewContext<'_, V> { fn upgrade_model_handle( &self, - handle: WeakModelHandle, + handle: &WeakModelHandle, ) -> Option> { self.cx.upgrade_model_handle(handle) } } +impl UpgradeViewHandle for ViewContext<'_, V> { + fn upgrade_view_handle(&self, handle: &WeakViewHandle) -> Option> { + self.cx.upgrade_view_handle(handle) + } +} + impl UpdateModel for ViewContext<'_, V> { fn update_model( &mut self, @@ -2654,7 +2703,7 @@ impl ModelHandle { let (mut tx, mut rx) = mpsc::channel(1); let mut cx = cx.cx.borrow_mut(); let subscription = cx.observe(self, move |_, _| { - tx.blocking_send(()).ok(); + tx.try_send(()).ok(); }); let duration = if std::env::var("CI").is_ok() { @@ -2850,7 +2899,7 @@ impl WeakModelHandle { self.model_id } - pub fn upgrade(self, cx: &impl UpgradeModelHandle) -> Option> { + pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option> { cx.upgrade_model_handle(self) } } @@ -2958,7 +3007,7 @@ impl ViewHandle { let (mut tx, mut rx) = mpsc::channel(1); let mut cx = cx.cx.borrow_mut(); let subscription = cx.observe(self, move |_, _| { - tx.blocking_send(()).ok(); + tx.try_send(()).ok(); }); let duration = if std::env::var("CI").is_ok() { @@ -3266,16 +3315,8 @@ impl WeakViewHandle { self.view_id } - pub fn upgrade(&self, cx: &AppContext) -> Option> { - if cx.ref_counts.lock().is_entity_alive(self.view_id) { - Some(ViewHandle::new( - self.window_id, - self.view_id, - &cx.ref_counts, - )) - } else { - None - } + pub fn upgrade(&self, cx: &impl UpgradeViewHandle) -> Option> { + cx.upgrade_view_handle(self) } } diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index 8021391978d76e5a24dc954f048871e03c613b45..e9e71e4b72ac348e7adf1db8d0ecfe4db732fdce 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -550,8 +550,11 @@ impl Background { pub async fn simulate_random_delay(&self) { match self { Self::Deterministic { executor, .. } => { - if executor.state.lock().rng.gen_range(0..100) < 20 { - yield_now().await; + if executor.state.lock().rng.gen_bool(0.2) { + let yields = executor.state.lock().rng.gen_range(1..=10); + for _ in 0..yields { + yield_now().await; + } } } _ => panic!("this method can only be called on a deterministic executor"), diff --git a/crates/gpui/src/platform/mac/window.rs b/crates/gpui/src/platform/mac/window.rs index ba75964039e9c36a7d439a3e2246fd3c9fca9b7c..367596a13f2c08c52cce5d2b9c3bf6e6033b1c8a 100644 --- a/crates/gpui/src/platform/mac/window.rs +++ b/crates/gpui/src/platform/mac/window.rs @@ -107,6 +107,10 @@ unsafe fn build_classes() { sel!(scrollWheel:), handle_view_event as extern "C" fn(&Object, Sel, id), ); + decl.add_method( + sel!(cancelOperation:), + cancel_operation as extern "C" fn(&Object, Sel, id), + ); decl.add_method( sel!(makeBackingLayer), @@ -538,6 +542,34 @@ extern "C" fn handle_view_event(this: &Object, _: Sel, native_event: id) { } } +// Allows us to receive `cmd-.` (the shortcut for closing a dialog) +// https://bugs.eclipse.org/bugs/show_bug.cgi?id=300620#c6 +extern "C" fn cancel_operation(this: &Object, _sel: Sel, _sender: id) { + let window_state = unsafe { get_window_state(this) }; + let mut window_state_borrow = window_state.as_ref().borrow_mut(); + + let chars = ".".to_string(); + let keystroke = Keystroke { + cmd: true, + ctrl: false, + alt: false, + shift: false, + key: chars.clone(), + }; + let event = Event::KeyDown { + keystroke: keystroke.clone(), + chars: chars.clone(), + is_held: false, + }; + + window_state_borrow.last_fresh_keydown = Some((keystroke, chars)); + if let Some(mut callback) = window_state_borrow.event_callback.take() { + drop(window_state_borrow); + callback(event); + window_state.borrow_mut().event_callback = Some(callback); + } +} + extern "C" fn send_event(this: &Object, _: Sel, native_event: id) { unsafe { let () = msg_send![super(this, class!(NSWindow)), sendEvent: native_event]; diff --git a/crates/gpui/src/presenter.rs b/crates/gpui/src/presenter.rs index 2666a329f0613d26711589d3c05b94412845ea90..1b5adbc994db269bfb30af77196d334ba35362bd 100644 --- a/crates/gpui/src/presenter.rs +++ b/crates/gpui/src/presenter.rs @@ -7,7 +7,8 @@ use crate::{ platform::Event, text_layout::TextLayoutCache, Action, AnyAction, AnyViewHandle, AssetCache, ElementBox, Entity, FontSystem, ModelHandle, - ReadModel, ReadView, Scene, View, ViewHandle, + ReadModel, ReadView, Scene, UpgradeModelHandle, UpgradeViewHandle, View, ViewHandle, + WeakModelHandle, WeakViewHandle, }; use pathfinder_geometry::vector::{vec2f, Vector2F}; use serde_json::json; @@ -270,6 +271,21 @@ impl<'a> ReadModel for LayoutContext<'a> { } } +impl<'a> UpgradeModelHandle for LayoutContext<'a> { + fn upgrade_model_handle( + &self, + handle: &WeakModelHandle, + ) -> Option> { + self.app.upgrade_model_handle(handle) + } +} + +impl<'a> UpgradeViewHandle for LayoutContext<'a> { + fn upgrade_view_handle(&self, handle: &WeakViewHandle) -> Option> { + self.app.upgrade_view_handle(handle) + } +} + pub struct PaintContext<'a> { rendered_views: &'a mut HashMap, pub scene: &'a mut Scene, diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 18e620fcd25d8605bc6b605db39bf72c2d902890..f19ff21081af8b2fb9773e8ea3ae6f6496b7c155 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -68,6 +68,7 @@ pub struct Buffer { remote_selections: TreeMap, selections_update_count: usize, diagnostics_update_count: usize, + file_update_count: usize, language_server: Option, completion_triggers: Vec, deferred_ops: OperationQueue, @@ -78,8 +79,10 @@ pub struct Buffer { pub struct BufferSnapshot { text: text::BufferSnapshot, tree: Option, + path: Option>, diagnostics: DiagnosticSet, diagnostics_update_count: usize, + file_update_count: usize, remote_selections: TreeMap, selections_update_count: usize, is_parsing: bool, @@ -111,16 +114,22 @@ pub struct Diagnostic { } #[derive(Clone, Debug)] -pub struct Completion { - pub old_range: Range, +pub struct Completion { + pub old_range: Range, pub new_text: String, pub label: CompletionLabel, pub lsp_completion: lsp::CompletionItem, } +#[derive(Clone, Debug)] +pub struct CodeAction { + pub range: Range, + pub lsp_action: lsp::CodeAction, +} + struct LanguageServerState { server: Arc, - latest_snapshot: watch::Sender>, + latest_snapshot: watch::Sender, pending_snapshots: BTreeMap, next_version: usize, _maintain_server: Task<()>, @@ -141,12 +150,12 @@ pub enum Operation { lamport_timestamp: clock::Lamport, }, UpdateSelections { - replica_id: ReplicaId, selections: Arc<[Selection]>, lamport_timestamp: clock::Lamport, }, UpdateCompletionTriggers { triggers: Vec, + lamport_timestamp: clock::Lamport, }, } @@ -192,24 +201,6 @@ pub trait File { cx: &mut MutableAppContext, ) -> Task>; - fn format_remote(&self, buffer_id: u64, cx: &mut MutableAppContext) - -> Option>>; - - fn completions( - &self, - buffer_id: u64, - position: Anchor, - language: Option>, - cx: &mut MutableAppContext, - ) -> Task>>>; - - fn apply_additional_edits_for_completion( - &self, - buffer_id: u64, - completion: Completion, - cx: &mut MutableAppContext, - ) -> Task>>; - fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext); fn buffer_removed(&self, buffer_id: u64, cx: &mut MutableAppContext); @@ -234,12 +225,21 @@ pub trait LocalFile: File { ); } -#[cfg(feature = "test-support")] +#[cfg(any(test, feature = "test-support"))] pub struct FakeFile { pub path: Arc, } -#[cfg(feature = "test-support")] +#[cfg(any(test, feature = "test-support"))] +impl FakeFile { + pub fn new(path: impl AsRef) -> Self { + Self { + path: path.as_ref().into(), + } + } +} + +#[cfg(any(test, feature = "test-support"))] impl File for FakeFile { fn as_local(&self) -> Option<&dyn LocalFile> { Some(self) @@ -275,29 +275,6 @@ impl File for FakeFile { cx.spawn(|_| async move { Ok((Default::default(), SystemTime::UNIX_EPOCH)) }) } - fn format_remote(&self, _: u64, _: &mut MutableAppContext) -> Option>> { - None - } - - fn completions( - &self, - _: u64, - _: Anchor, - _: Option>, - _: &mut MutableAppContext, - ) -> Task>>> { - Task::ready(Ok(Default::default())) - } - - fn apply_additional_edits_for_completion( - &self, - _: u64, - _: Completion, - _: &mut MutableAppContext, - ) -> Task>> { - Task::ready(Ok(Default::default())) - } - fn buffer_updated(&self, _: u64, _: Operation, _: &mut MutableAppContext) {} fn buffer_removed(&self, _: u64, _: &mut MutableAppContext) {} @@ -311,7 +288,7 @@ impl File for FakeFile { } } -#[cfg(feature = "test-support")] +#[cfg(any(test, feature = "test-support"))] impl LocalFile for FakeFile { fn abs_path(&self, _: &AppContext) -> PathBuf { self.path.to_path_buf() @@ -378,6 +355,7 @@ pub(crate) struct Diff { base_version: clock::Global, new_text: Arc, changes: Vec<(ChangeTag, usize)>, + start_offset: usize, } #[derive(Clone, Copy)] @@ -425,27 +403,19 @@ impl Buffer { file: Option>, cx: &mut ModelContext, ) -> Result { - let fragments_len = message.fragments.len(); - let buffer = TextBuffer::from_parts( + let buffer = TextBuffer::new( replica_id, message.id, - &message.visible_text, - &message.deleted_text, - message - .undo_map - .into_iter() - .map(proto::deserialize_undo_map_entry), - message - .fragments - .into_iter() - .enumerate() - .map(|(i, fragment)| { - proto::deserialize_buffer_fragment(fragment, i, fragments_len) - }), - message.lamport_timestamp, - From::from(message.version), + History::new(Arc::from(message.base_text)), ); let mut this = Self::build(buffer, file); + let ops = message + .operations + .into_iter() + .map(proto::deserialize_operation) + .collect::>>()?; + this.apply_ops(ops, cx)?; + for selection_set in message.selections { this.remote_selections.insert( selection_set.replica_id as ReplicaId, @@ -464,37 +434,24 @@ impl Buffer { DiagnosticSet::from_sorted_entries(entries.into_iter().cloned(), &snapshot), cx, ); - this.completion_triggers = message.completion_triggers; - let deferred_ops = message - .deferred_operations - .into_iter() - .map(proto::deserialize_operation) - .collect::>>()?; - this.apply_ops(deferred_ops, cx)?; - Ok(this) } pub fn to_proto(&self) -> proto::BufferState { + let mut operations = self + .text + .history() + .map(|op| proto::serialize_operation(&Operation::Buffer(op.clone()))) + .chain(self.deferred_ops.iter().map(proto::serialize_operation)) + .collect::>(); + operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation); proto::BufferState { id: self.remote_id(), file: self.file.as_ref().map(|f| f.to_proto()), - visible_text: self.text.text(), - deleted_text: self.text.deleted_text(), - undo_map: self - .text - .undo_history() - .map(proto::serialize_undo_map_entry) - .collect(), - version: From::from(&self.version), - lamport_timestamp: self.lamport_clock.value, - fragments: self - .text - .fragments() - .map(proto::serialize_buffer_fragment) - .collect(), + base_text: self.base_text().to_string(), + operations, selections: self .remote_selections .iter() @@ -505,16 +462,6 @@ impl Buffer { }) .collect(), diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()), - deferred_operations: self - .deferred_ops - .iter() - .map(proto::serialize_operation) - .chain( - self.text - .deferred_ops() - .map(|op| proto::serialize_operation(&Operation::Buffer(op.clone()))), - ) - .collect(), completion_triggers: self.completion_triggers.clone(), } } @@ -557,6 +504,7 @@ impl Buffer { selections_update_count: 0, diagnostics: Default::default(), diagnostics_update_count: 0, + file_update_count: 0, language_server: None, completion_triggers: Default::default(), deferred_ops: OperationQueue::new(), @@ -569,9 +517,11 @@ impl Buffer { BufferSnapshot { text: self.text.snapshot(), tree: self.syntax_tree(), + path: self.file.as_ref().map(|f| f.path().clone()), remote_selections: self.remote_selections.clone(), diagnostics: self.diagnostics.clone(), diagnostics_update_count: self.diagnostics_update_count, + file_update_count: self.file_update_count, is_parsing: self.parsing_in_background, language: self.language.clone(), parse_count: self.parse_count, @@ -583,52 +533,6 @@ impl Buffer { self.file.as_deref() } - pub fn format(&mut self, cx: &mut ModelContext) -> Task> { - let file = if let Some(file) = self.file.as_ref() { - file - } else { - return Task::ready(Err(anyhow!("buffer has no file"))); - }; - - if let Some(LanguageServerState { server, .. }) = self.language_server.as_ref() { - let server = server.clone(); - let abs_path = file.as_local().unwrap().abs_path(cx); - let version = self.version(); - cx.spawn(|this, mut cx| async move { - let edits = server - .request::(lsp::DocumentFormattingParams { - text_document: lsp::TextDocumentIdentifier::new( - lsp::Url::from_file_path(&abs_path).unwrap(), - ), - options: Default::default(), - work_done_progress_params: Default::default(), - }) - .await?; - - if let Some(edits) = edits { - this.update(&mut cx, |this, cx| { - if this.version == version { - this.apply_lsp_edits(edits, cx)?; - Ok(()) - } else { - Err(anyhow!("buffer edited since starting to format")) - } - }) - } else { - Ok(()) - } - }) - } else { - let format = file.format_remote(self.remote_id(), cx.as_mut()); - cx.spawn(|_, _| async move { - if let Some(format) = format { - format.await?; - } - Ok(()) - }) - } - } - pub fn save( &mut self, cx: &mut ModelContext, @@ -660,76 +564,21 @@ impl Buffer { language_server: Option>, cx: &mut ModelContext, ) { - self.language_server = if let Some(server) = language_server { + self.language_server = if let Some((server, file)) = + language_server.zip(self.file.as_ref().and_then(|f| f.as_local())) + { + let initial_snapshot = LanguageServerSnapshot { + buffer_snapshot: self.text.snapshot(), + version: 0, + path: file.abs_path(cx).into(), + }; let (latest_snapshot_tx, mut latest_snapshot_rx) = - watch::channel::>(); - - let maintain_changes = cx.background().spawn({ - let server = server.clone(); - async move { - let mut prev_snapshot: Option = None; - while let Some(snapshot) = latest_snapshot_rx.recv().await { - if let Some(snapshot) = snapshot { - let uri = lsp::Url::from_file_path(&snapshot.path).unwrap(); - if let Some(prev_snapshot) = prev_snapshot { - let changes = lsp::DidChangeTextDocumentParams { - text_document: lsp::VersionedTextDocumentIdentifier::new( - uri, - snapshot.version as i32, - ), - content_changes: snapshot - .buffer_snapshot - .edits_since::<(PointUtf16, usize)>( - prev_snapshot.buffer_snapshot.version(), - ) - .map(|edit| { - let edit_start = edit.new.start.0; - let edit_end = - edit_start + (edit.old.end.0 - edit.old.start.0); - let new_text = snapshot - .buffer_snapshot - .text_for_range(edit.new.start.1..edit.new.end.1) - .collect(); - lsp::TextDocumentContentChangeEvent { - range: Some(lsp::Range::new( - edit_start.to_lsp_position(), - edit_end.to_lsp_position(), - )), - range_length: None, - text: new_text, - } - }) - .collect(), - }; - server - .notify::(changes) - .await?; - } else { - server - .notify::( - lsp::DidOpenTextDocumentParams { - text_document: lsp::TextDocumentItem::new( - uri, - Default::default(), - snapshot.version as i32, - snapshot.buffer_snapshot.text().to_string(), - ), - }, - ) - .await?; - } - - prev_snapshot = Some(snapshot); - } - } - Ok(()) - } - }); + watch::channel_with::(initial_snapshot.clone()); Some(LanguageServerState { latest_snapshot: latest_snapshot_tx, - pending_snapshots: Default::default(), - next_version: 0, + pending_snapshots: BTreeMap::from_iter([(0, initial_snapshot)]), + next_version: 1, server: server.clone(), _maintain_server: cx.spawn_weak(|this, mut cx| async move { let mut capabilities = server.capabilities(); @@ -741,9 +590,13 @@ impl Buffer { .and_then(|c| c.trigger_characters) .unwrap_or_default(); this.update(&mut cx, |this, cx| { + let lamport_timestamp = this.text.lamport_clock.tick(); this.completion_triggers = triggers.clone(); this.send_operation( - Operation::UpdateCompletionTriggers { triggers }, + Operation::UpdateCompletionTriggers { + triggers, + lamport_timestamp, + }, cx, ); cx.notify(); @@ -756,14 +609,69 @@ impl Buffer { } } + let maintain_changes = cx.background().spawn(async move { + let initial_snapshot = + latest_snapshot_rx.recv().await.ok_or_else(|| { + anyhow!("buffer dropped before sending DidOpenTextDocument") + })?; + server + .notify::( + lsp::DidOpenTextDocumentParams { + text_document: lsp::TextDocumentItem::new( + lsp::Url::from_file_path(initial_snapshot.path).unwrap(), + Default::default(), + initial_snapshot.version as i32, + initial_snapshot.buffer_snapshot.text(), + ), + }, + ) + .await?; + + let mut prev_version = initial_snapshot.buffer_snapshot.version().clone(); + while let Some(snapshot) = latest_snapshot_rx.recv().await { + let uri = lsp::Url::from_file_path(&snapshot.path).unwrap(); + let buffer_snapshot = snapshot.buffer_snapshot.clone(); + let content_changes = buffer_snapshot + .edits_since::<(PointUtf16, usize)>(&prev_version) + .map(|edit| { + let edit_start = edit.new.start.0; + let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0); + let new_text = buffer_snapshot + .text_for_range(edit.new.start.1..edit.new.end.1) + .collect(); + lsp::TextDocumentContentChangeEvent { + range: Some(lsp::Range::new( + edit_start.to_lsp_position(), + edit_end.to_lsp_position(), + )), + range_length: None, + text: new_text, + } + }) + .collect(); + let changes = lsp::DidChangeTextDocumentParams { + text_document: lsp::VersionedTextDocumentIdentifier::new( + uri, + snapshot.version as i32, + ), + content_changes, + }; + server + .notify::(changes) + .await?; + + prev_version = snapshot.buffer_snapshot.version().clone(); + } + + Ok::<_, anyhow::Error>(()) + }); + maintain_changes.log_err().await; }), }) } else { None }; - - self.update_language_server(cx); } pub fn did_save( @@ -777,6 +685,7 @@ impl Buffer { self.saved_version = version; if let Some(new_file) = new_file { self.file = Some(new_file); + self.file_update_count += 1; } if let Some((state, local_file)) = &self .language_server @@ -799,6 +708,7 @@ impl Buffer { .detach() } cx.emit(Event::Saved); + cx.notify(); } pub fn did_reload( @@ -874,7 +784,9 @@ impl Buffer { } if file_changed { + self.file_update_count += 1; cx.emit(Event::FileHandleChanged); + cx.notify(); } self.file = Some(new_file); task @@ -904,6 +816,10 @@ impl Buffer { self.diagnostics_update_count } + pub fn file_update_count(&self) -> usize { + self.file_update_count + } + pub(crate) fn syntax_tree(&self) -> Option { if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() { self.interpolate_tree(syntax_tree); @@ -1001,8 +917,8 @@ impl Buffer { pub fn update_diagnostics( &mut self, - version: Option, mut diagnostics: Vec>, + version: Option, cx: &mut ModelContext, ) -> Result<()> where @@ -1019,14 +935,7 @@ impl Buffer { let version = version.map(|version| version as usize); let content = if let Some((version, language_server)) = version.zip(self.language_server.as_mut()) { - language_server - .pending_snapshots - .retain(|&v, _| v >= version); - let snapshot = language_server - .pending_snapshots - .get(&version) - .ok_or_else(|| anyhow!("missing snapshot"))?; - &snapshot.buffer_snapshot + language_server.snapshot_for_version(version)? } else { self.deref() }; @@ -1278,6 +1187,7 @@ impl Buffer { base_version, new_text, changes, + start_offset: 0, } }) } @@ -1285,16 +1195,21 @@ impl Buffer { pub(crate) fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext) -> bool { if self.version == diff.base_version { self.start_transaction(); - let mut offset = 0; + let mut offset = diff.start_offset; for (tag, len) in diff.changes { let range = offset..(offset + len); match tag { ChangeTag::Equal => offset += len, ChangeTag::Delete => { - self.edit(Some(range), "", cx); + self.edit([range], "", cx); } ChangeTag::Insert => { - self.edit(Some(offset..offset), &diff.new_text[range], cx); + self.edit( + [offset..offset], + &diff.new_text + [range.start - diff.start_offset..range.end - diff.start_offset], + cx, + ); offset += len; } } @@ -1349,8 +1264,23 @@ impl Buffer { } } - pub fn avoid_grouping_next_transaction(&mut self) { - self.text.avoid_grouping_next_transaction(); + pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) { + self.text.push_transaction(transaction, now); + } + + pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> { + self.text.finalize_last_transaction() + } + + pub fn forget_transaction(&mut self, transaction_id: TransactionId) { + self.text.forget_transaction(transaction_id); + } + + pub fn wait_for_edits( + &mut self, + edit_ids: impl IntoIterator, + ) -> impl Future { + self.text.wait_for_edits(edit_ids) } pub fn set_active_selections( @@ -1368,7 +1298,6 @@ impl Buffer { ); self.send_operation( Operation::UpdateSelections { - replica_id: self.text.replica_id(), selections, lamport_timestamp, }, @@ -1386,24 +1315,22 @@ impl Buffer { } else { return; }; - let abs_path = self - .file - .as_ref() - .and_then(|f| f.as_local()) - .map_or(Path::new("/").to_path_buf(), |file| file.abs_path(cx)); + let file = if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) { + file + } else { + return; + }; let version = post_inc(&mut language_server.next_version); let snapshot = LanguageServerSnapshot { buffer_snapshot: self.text.snapshot(), version, - path: Arc::from(abs_path), + path: Arc::from(file.abs_path(cx)), }; language_server .pending_snapshots .insert(version, snapshot.clone()); - let _ = language_server - .latest_snapshot - .blocking_send(Some(snapshot)); + let _ = language_server.latest_snapshot.blocking_send(snapshot); } pub fn edit( @@ -1492,7 +1419,7 @@ impl Buffer { let new_text_len = new_text.len(); let edit = self.text.edit(ranges.iter().cloned(), new_text); - let edit_id = edit.timestamp.local(); + let edit_id = edit.local_timestamp(); if let Some((before_edit, edited)) = autoindent_request { let mut inserted = None; @@ -1521,34 +1448,117 @@ impl Buffer { } self.end_transaction(cx); - self.send_operation(Operation::Buffer(text::Operation::Edit(edit)), cx); + self.send_operation(Operation::Buffer(edit), cx); Some(edit_id) } - fn apply_lsp_edits( + pub fn edits_from_lsp( &mut self, - edits: Vec, + lsp_edits: impl 'static + Send + IntoIterator, + version: Option, cx: &mut ModelContext, - ) -> Result> { - for edit in &edits { - let range = range_from_lsp(edit.range); - if self.clip_point_utf16(range.start, Bias::Left) != range.start - || self.clip_point_utf16(range.end, Bias::Left) != range.end - { - return Err(anyhow!( - "invalid formatting edits received from language server" - )); + ) -> Task, String)>>> { + let snapshot = if let Some((version, state)) = version.zip(self.language_server.as_mut()) { + state + .snapshot_for_version(version as usize) + .map(Clone::clone) + } else { + Ok(TextBuffer::deref(self).clone()) + }; + + cx.background().spawn(async move { + let snapshot = snapshot?; + let mut lsp_edits = lsp_edits + .into_iter() + .map(|edit| (range_from_lsp(edit.range), edit.new_text)) + .peekable(); + + let mut edits = Vec::new(); + while let Some((mut range, mut new_text)) = lsp_edits.next() { + // Combine any LSP edits that are adjacent. + // + // Also, combine LSP edits that are separated from each other by only + // a newline. This is important because for some code actions, + // Rust-analyzer rewrites the entire buffer via a series of edits that + // are separated by unchanged newline characters. + // + // In order for the diffing logic below to work properly, any edits that + // cancel each other out must be combined into one. + while let Some((next_range, next_text)) = lsp_edits.peek() { + if next_range.start > range.end { + if next_range.start.row > range.end.row + 1 + || next_range.start.column > 0 + || snapshot.clip_point_utf16( + PointUtf16::new(range.end.row, u32::MAX), + Bias::Left, + ) > range.end + { + break; + } + new_text.push('\n'); + } + range.end = next_range.end; + new_text.push_str(&next_text); + lsp_edits.next(); + } + + if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start + || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end + { + return Err(anyhow!("invalid edits received from language server")); + } + + // For multiline edits, perform a diff of the old and new text so that + // we can identify the changes more precisely, preserving the locations + // of any anchors positioned in the unchanged regions. + if range.end.row > range.start.row { + let mut offset = range.start.to_offset(&snapshot); + let old_text = snapshot.text_for_range(range).collect::(); + + let diff = TextDiff::from_lines(old_text.as_str(), &new_text); + let mut moved_since_edit = true; + for change in diff.iter_all_changes() { + let tag = change.tag(); + let value = change.value(); + match tag { + ChangeTag::Equal => { + offset += value.len(); + moved_since_edit = true; + } + ChangeTag::Delete => { + let start = snapshot.anchor_after(offset); + let end = snapshot.anchor_before(offset + value.len()); + if moved_since_edit { + edits.push((start..end, String::new())); + } else { + edits.last_mut().unwrap().0.end = end; + } + offset += value.len(); + moved_since_edit = false; + } + ChangeTag::Insert => { + if moved_since_edit { + let anchor = snapshot.anchor_after(offset); + edits.push((anchor.clone()..anchor, value.to_string())); + } else { + edits.last_mut().unwrap().1.push_str(value); + } + moved_since_edit = false; + } + } + } + } else if range.end == range.start { + let anchor = snapshot.anchor_after(range.start); + edits.push((anchor.clone()..anchor, new_text)); + } else { + let edit_start = snapshot.anchor_after(range.start); + let edit_end = snapshot.anchor_before(range.end); + edits.push((edit_start..edit_end, new_text)); + } } - } - self.start_transaction(); - let edit_ids = edits - .into_iter() - .rev() - .filter_map(|edit| self.edit([range_from_lsp(edit.range)], edit.new_text, cx)) - .collect(); - self.end_transaction(cx); - Ok(edit_ids) + Ok(edits) + }) } fn did_edit( @@ -1655,18 +1665,17 @@ impl Buffer { ); } Operation::UpdateSelections { - replica_id, selections, lamport_timestamp, } => { - if let Some(set) = self.remote_selections.get(&replica_id) { + if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) { if set.lamport_timestamp > lamport_timestamp { return; } } self.remote_selections.insert( - replica_id, + lamport_timestamp.replica_id, SelectionSet { selections, lamport_timestamp, @@ -1675,8 +1684,12 @@ impl Buffer { self.text.lamport_clock.observe(lamport_timestamp); self.selections_update_count += 1; } - Operation::UpdateCompletionTriggers { triggers } => { + Operation::UpdateCompletionTriggers { + triggers, + lamport_timestamp, + } => { self.completion_triggers = triggers; + self.text.lamport_clock.observe(lamport_timestamp); } } } @@ -1718,7 +1731,7 @@ impl Buffer { } } - pub fn undo_transaction( + pub fn undo_to_transaction( &mut self, transaction_id: TransactionId, cx: &mut ModelContext, @@ -1726,13 +1739,15 @@ impl Buffer { let was_dirty = self.is_dirty(); let old_version = self.version.clone(); - if let Some(operation) = self.text.undo_transaction(transaction_id) { + let operations = self.text.undo_to_transaction(transaction_id); + let undone = !operations.is_empty(); + for operation in operations { self.send_operation(Operation::Buffer(operation), cx); - self.did_edit(&old_version, was_dirty, cx); - true - } else { - false } + if undone { + self.did_edit(&old_version, was_dirty, cx) + } + undone } pub fn redo(&mut self, cx: &mut ModelContext) -> Option { @@ -1748,7 +1763,7 @@ impl Buffer { } } - pub fn redo_transaction( + pub fn redo_to_transaction( &mut self, transaction_id: TransactionId, cx: &mut ModelContext, @@ -1756,158 +1771,15 @@ impl Buffer { let was_dirty = self.is_dirty(); let old_version = self.version.clone(); - if let Some(operation) = self.text.redo_transaction(transaction_id) { + let operations = self.text.redo_to_transaction(transaction_id); + let redone = !operations.is_empty(); + for operation in operations { self.send_operation(Operation::Buffer(operation), cx); - self.did_edit(&old_version, was_dirty, cx); - true - } else { - false } - } - - pub fn completions( - &self, - position: T, - cx: &mut ModelContext, - ) -> Task>>> - where - T: ToOffset, - { - let file = if let Some(file) = self.file.as_ref() { - file - } else { - return Task::ready(Ok(Default::default())); - }; - let language = self.language.clone(); - - if let Some(file) = file.as_local() { - let server = if let Some(language_server) = self.language_server.as_ref() { - language_server.server.clone() - } else { - return Task::ready(Ok(Default::default())); - }; - let abs_path = file.abs_path(cx); - let position = self.offset_to_point_utf16(position.to_offset(self)); - - cx.spawn(|this, cx| async move { - let completions = server - .request::(lsp::CompletionParams { - text_document_position: lsp::TextDocumentPositionParams::new( - lsp::TextDocumentIdentifier::new( - lsp::Url::from_file_path(abs_path).unwrap(), - ), - position.to_lsp_position(), - ), - context: Default::default(), - work_done_progress_params: Default::default(), - partial_result_params: Default::default(), - }) - .await?; - - let completions = if let Some(completions) = completions { - match completions { - lsp::CompletionResponse::Array(completions) => completions, - lsp::CompletionResponse::List(list) => list.items, - } - } else { - Default::default() - }; - - this.read_with(&cx, |this, _| { - Ok(completions.into_iter().filter_map(|lsp_completion| { - let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? { - lsp::CompletionTextEdit::Edit(edit) => (range_from_lsp(edit.range), edit.new_text.clone()), - lsp::CompletionTextEdit::InsertAndReplace(_) => { - log::info!("received an insert and replace completion but we don't yet support that"); - return None - }, - }; - - let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left); - let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left) ; - if clipped_start == old_range.start && clipped_end == old_range.end { - Some(Completion { - old_range: this.anchor_before(old_range.start)..this.anchor_after(old_range.end), - new_text, - label: language.as_ref().and_then(|l| l.label_for_completion(&lsp_completion)).unwrap_or_else(|| CompletionLabel::plain(&lsp_completion)), - lsp_completion, - }) - } else { - None - } - }).collect()) - }) - }) - } else { - file.completions( - self.remote_id(), - self.anchor_before(position), - language, - cx.as_mut(), - ) - } - } - - pub fn apply_additional_edits_for_completion( - &mut self, - completion: Completion, - push_to_history: bool, - cx: &mut ModelContext, - ) -> Task>> { - let file = if let Some(file) = self.file.as_ref() { - file - } else { - return Task::ready(Ok(Default::default())); - }; - - if file.is_local() { - let server = if let Some(lang) = self.language_server.as_ref() { - lang.server.clone() - } else { - return Task::ready(Ok(Default::default())); - }; - - cx.spawn(|this, mut cx| async move { - let resolved_completion = server - .request::(completion.lsp_completion) - .await?; - if let Some(additional_edits) = resolved_completion.additional_text_edits { - this.update(&mut cx, |this, cx| { - if !push_to_history { - this.avoid_grouping_next_transaction(); - } - this.start_transaction(); - let edit_ids = this.apply_lsp_edits(additional_edits, cx); - if let Some(transaction_id) = this.end_transaction(cx) { - if !push_to_history { - this.text.forget_transaction(transaction_id); - } - } - edit_ids - }) - } else { - Ok(Default::default()) - } - }) - } else { - let apply_edits = file.apply_additional_edits_for_completion( - self.remote_id(), - completion, - cx.as_mut(), - ); - cx.spawn(|this, mut cx| async move { - let edit_ids = apply_edits.await?; - this.update(&mut cx, |this, _| this.text.wait_for_edits(&edit_ids)) - .await; - if push_to_history { - this.update(&mut cx, |this, _| { - this.text - .push_transaction(edit_ids.iter().copied(), Instant::now()); - }); - } - Ok(edit_ids) - }) + if redone { + self.did_edit(&old_version, was_dirty, cx) } + redone } pub fn completion_triggers(&self) -> &[String] { @@ -1970,6 +1842,18 @@ impl Entity for Buffer { fn release(&mut self, cx: &mut gpui::MutableAppContext) { if let Some(file) = self.file.as_ref() { file.buffer_removed(self.remote_id(), cx); + if let Some((lang_server, file)) = self.language_server.as_ref().zip(file.as_local()) { + let request = lang_server + .server + .notify::( + lsp::DidCloseTextDocumentParams { + text_document: lsp::TextDocumentIdentifier::new( + lsp::Url::from_file_path(file.abs_path(cx)).unwrap(), + ), + }, + ); + cx.foreground().spawn(request).detach_and_log_err(cx); + } } } } @@ -2401,6 +2285,14 @@ impl BufferSnapshot { pub fn selections_update_count(&self) -> usize { self.selections_update_count } + + pub fn path(&self) -> Option<&Arc> { + self.path.as_ref() + } + + pub fn file_update_count(&self) -> usize { + self.file_update_count + } } impl Clone for BufferSnapshot { @@ -2408,10 +2300,12 @@ impl Clone for BufferSnapshot { Self { text: self.text.clone(), tree: self.tree.clone(), + path: self.path.clone(), remote_selections: self.remote_selections.clone(), diagnostics: self.diagnostics.clone(), selections_update_count: self.selections_update_count, diagnostics_update_count: self.diagnostics_update_count, + file_update_count: self.file_update_count, is_parsing: self.is_parsing, language: self.language.clone(), parse_count: self.parse_count, @@ -2686,14 +2580,28 @@ impl operation_queue::Operation for Operation { } | Operation::UpdateSelections { lamport_timestamp, .. - } => *lamport_timestamp, - Operation::UpdateCompletionTriggers { .. } => { - unreachable!("updating completion triggers should never be deferred") } + | Operation::UpdateCompletionTriggers { + lamport_timestamp, .. + } => *lamport_timestamp, } } } +impl LanguageServerState { + fn snapshot_for_version(&mut self, version: usize) -> Result<&text::BufferSnapshot> { + const OLD_VERSIONS_TO_RETAIN: usize = 10; + + self.pending_snapshots + .retain(|&v, _| v + OLD_VERSIONS_TO_RETAIN >= version); + let snapshot = self + .pending_snapshots + .get(&version) + .ok_or_else(|| anyhow!("missing snapshot"))?; + Ok(&snapshot.buffer_snapshot) + } +} + impl Default for Diagnostic { fn default() -> Self { Self { @@ -2708,7 +2616,7 @@ impl Default for Diagnostic { } } -impl Completion { +impl Completion { pub fn sort_key(&self) -> (usize, &str) { let kind_key = match self.lsp_completion.kind { Some(lsp::CompletionItemKind::VARIABLE) => 0, diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index e4d22346bf623687ef40ae572d876ce4ba2ccd35..bf24118aff1ddbacb87513408b37f79a10e205e9 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -7,19 +7,20 @@ pub mod proto; mod tests; use anyhow::{anyhow, Result}; -pub use buffer::Operation; -pub use buffer::*; use collections::HashSet; -pub use diagnostic_set::DiagnosticEntry; use gpui::AppContext; use highlight_map::HighlightMap; use lazy_static::lazy_static; -pub use outline::{Outline, OutlineItem}; use parking_lot::Mutex; use serde::Deserialize; use std::{cell::RefCell, ops::Range, path::Path, str, sync::Arc}; use theme::SyntaxTheme; use tree_sitter::{self, Query}; + +pub use buffer::Operation; +pub use buffer::*; +pub use diagnostic_set::DiagnosticEntry; +pub use outline::{Outline, OutlineItem}; pub use tree_sitter::{Parser, Tree}; thread_local! { @@ -39,10 +40,6 @@ lazy_static! { )); } -pub trait ToPointUtf16 { - fn to_point_utf16(self) -> PointUtf16; -} - pub trait ToLspPosition { fn to_lsp_position(self) -> lsp::Position; } @@ -360,18 +357,15 @@ impl CompletionLabel { #[cfg(any(test, feature = "test-support"))] impl LanguageServerConfig { - pub async fn fake( - executor: Arc, - ) -> (Self, lsp::FakeLanguageServer) { - Self::fake_with_capabilities(Default::default(), executor).await + pub async fn fake(cx: &gpui::TestAppContext) -> (Self, lsp::FakeLanguageServer) { + Self::fake_with_capabilities(Default::default(), cx).await } pub async fn fake_with_capabilities( capabilites: lsp::ServerCapabilities, - executor: Arc, + cx: &gpui::TestAppContext, ) -> (Self, lsp::FakeLanguageServer) { - let (server, fake) = - lsp::LanguageServer::fake_with_capabilities(capabilites, executor).await; + let (server, fake) = lsp::LanguageServer::fake_with_capabilities(capabilites, cx).await; fake.started .store(false, std::sync::atomic::Ordering::SeqCst); let started = fake.started.clone(); @@ -386,18 +380,16 @@ impl LanguageServerConfig { } } -impl ToPointUtf16 for lsp::Position { - fn to_point_utf16(self) -> PointUtf16 { - PointUtf16::new(self.line, self.character) - } -} - impl ToLspPosition for PointUtf16 { fn to_lsp_position(self) -> lsp::Position { lsp::Position::new(self.row, self.column) } } +pub fn point_from_lsp(point: lsp::Position) -> PointUtf16 { + PointUtf16::new(point.line, point.character) +} + pub fn range_from_lsp(range: lsp::Range) -> Range { let start = PointUtf16::new(range.start.line, range.start.character); let end = PointUtf16::new(range.end.line, range.end.character); diff --git a/crates/language/src/proto.rs b/crates/language/src/proto.rs index 82787ec5712291041665d8de4280d12598d61866..a4267cfda2f37bba51829222c2f9f4be121bfe54 100644 --- a/crates/language/src/proto.rs +++ b/crates/language/src/proto.rs @@ -1,12 +1,13 @@ use crate::{ - diagnostic_set::DiagnosticEntry, Completion, CompletionLabel, Diagnostic, Language, Operation, + diagnostic_set::DiagnosticEntry, CodeAction, Completion, CompletionLabel, Diagnostic, Language, + Operation, }; use anyhow::{anyhow, Result}; use clock::ReplicaId; use collections::HashSet; use lsp::DiagnosticSeverity; use rpc::proto; -use std::sync::Arc; +use std::{ops::Range, sync::Arc}; use text::*; pub use proto::{Buffer, BufferState, SelectionSet}; @@ -24,14 +25,7 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation { replica_id: undo.id.replica_id as u32, local_timestamp: undo.id.value, lamport_timestamp: lamport_timestamp.value, - ranges: undo - .ranges - .iter() - .map(|r| proto::Range { - start: r.start.0 as u64, - end: r.end.0 as u64, - }) - .collect(), + ranges: undo.ranges.iter().map(serialize_range).collect(), counts: undo .counts .iter() @@ -44,11 +38,10 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation { version: From::from(&undo.version), }), Operation::UpdateSelections { - replica_id, selections, lamport_timestamp, } => proto::operation::Variant::UpdateSelections(proto::operation::UpdateSelections { - replica_id: *replica_id as u32, + replica_id: lamport_timestamp.replica_id as u32, lamport_timestamp: lamport_timestamp.value, selections: serialize_selections(selections), }), @@ -60,32 +53,27 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation { lamport_timestamp: lamport_timestamp.value, diagnostics: serialize_diagnostics(diagnostics.iter()), }), - Operation::UpdateCompletionTriggers { triggers } => { - proto::operation::Variant::UpdateCompletionTriggers( - proto::operation::UpdateCompletionTriggers { - triggers: triggers.clone(), - }, - ) - } + Operation::UpdateCompletionTriggers { + triggers, + lamport_timestamp, + } => proto::operation::Variant::UpdateCompletionTriggers( + proto::operation::UpdateCompletionTriggers { + replica_id: lamport_timestamp.replica_id as u32, + lamport_timestamp: lamport_timestamp.value, + triggers: triggers.clone(), + }, + ), }), } } pub fn serialize_edit_operation(operation: &EditOperation) -> proto::operation::Edit { - let ranges = operation - .ranges - .iter() - .map(|range| proto::Range { - start: range.start.0 as u64, - end: range.end.0 as u64, - }) - .collect(); proto::operation::Edit { replica_id: operation.timestamp.replica_id as u32, local_timestamp: operation.timestamp.local, lamport_timestamp: operation.timestamp.lamport, version: From::from(&operation.version), - ranges, + ranges: operation.ranges.iter().map(serialize_range).collect(), new_text: operation.new_text.clone(), } } @@ -208,11 +196,7 @@ pub fn deserialize_operation(message: proto::Operation) -> Result { ) }) .collect(), - ranges: undo - .ranges - .into_iter() - .map(|r| FullOffset(r.start as usize)..FullOffset(r.end as usize)) - .collect(), + ranges: undo.ranges.into_iter().map(deserialize_range).collect(), version: undo.version.into(), }, }), @@ -232,7 +216,6 @@ pub fn deserialize_operation(message: proto::Operation) -> Result { .collect::>(); Operation::UpdateSelections { - replica_id: message.replica_id as ReplicaId, lamport_timestamp: clock::Lamport { replica_id: message.replica_id as ReplicaId, value: message.lamport_timestamp, @@ -250,6 +233,10 @@ pub fn deserialize_operation(message: proto::Operation) -> Result { proto::operation::Variant::UpdateCompletionTriggers(message) => { Operation::UpdateCompletionTriggers { triggers: message.triggers, + lamport_timestamp: clock::Lamport { + replica_id: message.replica_id as ReplicaId, + value: message.lamport_timestamp, + }, } } }, @@ -257,11 +244,6 @@ pub fn deserialize_operation(message: proto::Operation) -> Result { } pub fn deserialize_edit_operation(edit: proto::operation::Edit) -> EditOperation { - let ranges = edit - .ranges - .into_iter() - .map(|range| FullOffset(range.start as usize)..FullOffset(range.end as usize)) - .collect(); EditOperation { timestamp: InsertionTimestamp { replica_id: edit.replica_id as ReplicaId, @@ -269,7 +251,7 @@ pub fn deserialize_edit_operation(edit: proto::operation::Edit) -> EditOperation lamport: edit.lamport_timestamp, }, version: edit.version.into(), - ranges, + ranges: edit.ranges.into_iter().map(deserialize_range).collect(), new_text: edit.new_text, } } @@ -380,7 +362,39 @@ pub fn deserialize_anchor(anchor: proto::Anchor) -> Option { }) } -pub fn serialize_completion(completion: &Completion) -> proto::Completion { +pub fn lamport_timestamp_for_operation(operation: &proto::Operation) -> Option { + let replica_id; + let value; + match operation.variant.as_ref()? { + proto::operation::Variant::Edit(op) => { + replica_id = op.replica_id; + value = op.lamport_timestamp; + } + proto::operation::Variant::Undo(op) => { + replica_id = op.replica_id; + value = op.lamport_timestamp; + } + proto::operation::Variant::UpdateDiagnostics(op) => { + replica_id = op.replica_id; + value = op.lamport_timestamp; + } + proto::operation::Variant::UpdateSelections(op) => { + replica_id = op.replica_id; + value = op.lamport_timestamp; + } + proto::operation::Variant::UpdateCompletionTriggers(op) => { + replica_id = op.replica_id; + value = op.lamport_timestamp; + } + } + + Some(clock::Lamport { + replica_id: replica_id as ReplicaId, + value, + }) +} + +pub fn serialize_completion(completion: &Completion) -> proto::Completion { proto::Completion { old_start: Some(serialize_anchor(&completion.old_range.start)), old_end: Some(serialize_anchor(&completion.old_range.end)), @@ -392,7 +406,7 @@ pub fn serialize_completion(completion: &Completion) -> proto::Completio pub fn deserialize_completion( completion: proto::Completion, language: Option<&Arc>, -) -> Result> { +) -> Result { let old_start = completion .old_start .and_then(deserialize_anchor) @@ -411,3 +425,89 @@ pub fn deserialize_completion( lsp_completion, }) } + +pub fn serialize_code_action(action: &CodeAction) -> proto::CodeAction { + proto::CodeAction { + start: Some(serialize_anchor(&action.range.start)), + end: Some(serialize_anchor(&action.range.end)), + lsp_action: serde_json::to_vec(&action.lsp_action).unwrap(), + } +} + +pub fn deserialize_code_action(action: proto::CodeAction) -> Result { + let start = action + .start + .and_then(deserialize_anchor) + .ok_or_else(|| anyhow!("invalid start"))?; + let end = action + .end + .and_then(deserialize_anchor) + .ok_or_else(|| anyhow!("invalid end"))?; + let lsp_action = serde_json::from_slice(&action.lsp_action)?; + Ok(CodeAction { + range: start..end, + lsp_action, + }) +} + +pub fn serialize_transaction(transaction: &Transaction) -> proto::Transaction { + proto::Transaction { + id: Some(serialize_local_timestamp(transaction.id)), + edit_ids: transaction + .edit_ids + .iter() + .copied() + .map(serialize_local_timestamp) + .collect(), + start: (&transaction.start).into(), + end: (&transaction.end).into(), + ranges: transaction.ranges.iter().map(serialize_range).collect(), + } +} + +pub fn deserialize_transaction(transaction: proto::Transaction) -> Result { + Ok(Transaction { + id: deserialize_local_timestamp( + transaction + .id + .ok_or_else(|| anyhow!("missing transaction id"))?, + ), + edit_ids: transaction + .edit_ids + .into_iter() + .map(deserialize_local_timestamp) + .collect(), + start: transaction.start.into(), + end: transaction.end.into(), + ranges: transaction + .ranges + .into_iter() + .map(deserialize_range) + .collect(), + }) +} + +pub fn serialize_local_timestamp(timestamp: clock::Local) -> proto::LocalTimestamp { + proto::LocalTimestamp { + replica_id: timestamp.replica_id as u32, + value: timestamp.value, + } +} + +pub fn deserialize_local_timestamp(timestamp: proto::LocalTimestamp) -> clock::Local { + clock::Local { + replica_id: timestamp.replica_id as ReplicaId, + value: timestamp.value, + } +} + +pub fn serialize_range(range: &Range) -> proto::Range { + proto::Range { + start: range.start.0 as u64, + end: range.end.0 as u64, + } +} + +pub fn deserialize_range(range: proto::Range) -> Range { + FullOffset(range.start as usize)..FullOffset(range.end as usize) +} diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index 40dfd06e94fe11ba09aea0e44e31c561bc314092..c7ea90714cdb474bd42018f6a249e5c8029c47c4 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -557,7 +557,7 @@ fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut MutableAppConte #[gpui::test] async fn test_diagnostics(mut cx: gpui::TestAppContext) { - let (language_server, mut fake) = lsp::LanguageServer::fake(cx.background()).await; + let (language_server, mut fake) = lsp::LanguageServer::fake(&cx).await; let mut rust_lang = rust_lang(); rust_lang.config.language_server = Some(LanguageServerConfig { disk_based_diagnostic_sources: HashSet::from_iter(["disk".to_string()]), @@ -572,7 +572,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { .unindent(); let buffer = cx.add_model(|cx| { - Buffer::new(0, text, cx) + Buffer::from_file(0, text, Box::new(FakeFile::new("/some/path")), cx) .with_language(Arc::new(rust_lang), cx) .with_language_server(language_server, cx) }); @@ -592,7 +592,6 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { // Receive diagnostics for an earlier version of the buffer. buffer .update_diagnostics( - Some(open_notification.text_document.version), vec![ DiagnosticEntry { range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10), @@ -628,6 +627,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { }, }, ], + Some(open_notification.text_document.version), cx, ) .unwrap(); @@ -687,7 +687,6 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { // Ensure overlapping diagnostics are highlighted correctly. buffer .update_diagnostics( - Some(open_notification.text_document.version), vec![ DiagnosticEntry { range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10), @@ -711,6 +710,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { }, }, ], + Some(open_notification.text_document.version), cx, ) .unwrap(); @@ -777,7 +777,6 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { buffer.update(&mut cx, |buffer, cx| { buffer .update_diagnostics( - Some(change_notification_2.text_document.version), vec![ DiagnosticEntry { range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11), @@ -802,6 +801,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { }, }, ], + Some(change_notification_2.text_document.version), cx, ) .unwrap(); @@ -838,6 +838,223 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { }); } +#[gpui::test] +async fn test_edits_from_lsp_with_past_version(mut cx: gpui::TestAppContext) { + let (language_server, mut fake) = lsp::LanguageServer::fake(&cx).await; + + let text = " + fn a() { + f1(); + } + fn b() { + f2(); + } + fn c() { + f3(); + } + " + .unindent(); + + let buffer = cx.add_model(|cx| { + Buffer::from_file(0, text, Box::new(FakeFile::new("/some/path")), cx) + .with_language(Arc::new(rust_lang()), cx) + .with_language_server(language_server, cx) + }); + + let lsp_document_version = fake + .receive_notification::() + .await + .text_document + .version; + + // Simulate editing the buffer after the language server computes some edits. + buffer.update(&mut cx, |buffer, cx| { + buffer.edit( + [Point::new(0, 0)..Point::new(0, 0)], + "// above first function\n", + cx, + ); + buffer.edit( + [Point::new(2, 0)..Point::new(2, 0)], + " // inside first function\n", + cx, + ); + buffer.edit( + [Point::new(6, 4)..Point::new(6, 4)], + "// inside second function ", + cx, + ); + + assert_eq!( + buffer.text(), + " + // above first function + fn a() { + // inside first function + f1(); + } + fn b() { + // inside second function f2(); + } + fn c() { + f3(); + } + " + .unindent() + ); + }); + + let edits = buffer + .update(&mut cx, |buffer, cx| { + buffer.edits_from_lsp( + vec![ + // replace body of first function + lsp::TextEdit { + range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)), + new_text: " + fn a() { + f10(); + } + " + .unindent(), + }, + // edit inside second function + lsp::TextEdit { + range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)), + new_text: "00".into(), + }, + // edit inside third function via two distinct edits + lsp::TextEdit { + range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)), + new_text: "4000".into(), + }, + lsp::TextEdit { + range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)), + new_text: "".into(), + }, + ], + Some(lsp_document_version), + cx, + ) + }) + .await + .unwrap(); + + buffer.update(&mut cx, |buffer, cx| { + for (range, new_text) in edits { + buffer.edit([range], new_text, cx); + } + assert_eq!( + buffer.text(), + " + // above first function + fn a() { + // inside first function + f10(); + } + fn b() { + // inside second function f200(); + } + fn c() { + f4000(); + } + " + .unindent() + ); + }); +} + +#[gpui::test] +async fn test_edits_from_lsp_with_edits_on_adjacent_lines(mut cx: gpui::TestAppContext) { + let text = " + use a::b; + use a::c; + + fn f() { + b(); + c(); + } + " + .unindent(); + + let buffer = cx.add_model(|cx| Buffer::new(0, text, cx)); + + // Simulate the language server sending us a small edit in the form of a very large diff. + // Rust-analyzer does this when performing a merge-imports code action. + let edits = buffer + .update(&mut cx, |buffer, cx| { + buffer.edits_from_lsp( + [ + // Replace the first use statement without editing the semicolon. + lsp::TextEdit { + range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)), + new_text: "a::{b, c}".into(), + }, + // Reinsert the remainder of the file between the semicolon and the final + // newline of the file. + lsp::TextEdit { + range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)), + new_text: "\n\n".into(), + }, + lsp::TextEdit { + range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)), + new_text: " + fn f() { + b(); + c(); + }" + .unindent(), + }, + // Delete everything after the first newline of the file. + lsp::TextEdit { + range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)), + new_text: "".into(), + }, + ], + None, + cx, + ) + }) + .await + .unwrap(); + + buffer.update(&mut cx, |buffer, cx| { + let edits = edits + .into_iter() + .map(|(range, text)| { + ( + range.start.to_point(&buffer)..range.end.to_point(&buffer), + text, + ) + }) + .collect::>(); + + assert_eq!( + edits, + [ + (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()), + (Point::new(1, 0)..Point::new(2, 0), "".into()) + ] + ); + + for (range, new_text) in edits { + buffer.edit([range], new_text, cx); + } + assert_eq!( + buffer.text(), + " + use a::{b, c}; + + fn f() { + b(); + c(); + } + " + .unindent() + ); + }); +} + #[gpui::test] async fn test_empty_diagnostic_ranges(mut cx: gpui::TestAppContext) { cx.add_model(|cx| { @@ -851,7 +1068,6 @@ async fn test_empty_diagnostic_ranges(mut cx: gpui::TestAppContext) { buffer.set_language(Some(Arc::new(rust_lang())), cx); buffer .update_diagnostics( - None, vec![ DiagnosticEntry { range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10), @@ -870,6 +1086,7 @@ async fn test_empty_diagnostic_ranges(mut cx: gpui::TestAppContext) { }, }, ], + None, cx, ) .unwrap(); @@ -1073,15 +1290,15 @@ fn test_random_collaboration(cx: &mut MutableAppContext, mut rng: StdRng) { for buffer in &buffers { let buffer = buffer.read(cx).snapshot(); + let actual_remote_selections = buffer + .remote_selections_in_range(Anchor::min()..Anchor::max()) + .map(|(replica_id, selections)| (replica_id, selections.collect::>())) + .collect::>(); let expected_remote_selections = active_selections .iter() .filter(|(replica_id, _)| **replica_id != buffer.replica_id()) .map(|(replica_id, selections)| (*replica_id, selections.iter().collect::>())) .collect::>(); - let actual_remote_selections = buffer - .remote_selections_in_range(Anchor::min()..Anchor::max()) - .map(|(replica_id, selections)| (replica_id, selections.collect::>())) - .collect::>(); assert_eq!(actual_remote_selections, expected_remote_selections); } } diff --git a/crates/lsp/Cargo.toml b/crates/lsp/Cargo.toml index 3d453054f84fa354248293702bf5cadcf555e7c6..fca48970d21712284f129e0b260492cabfe75365 100644 --- a/crates/lsp/Cargo.toml +++ b/crates/lsp/Cargo.toml @@ -27,5 +27,6 @@ smol = "1.2" gpui = { path = "../gpui", features = ["test-support"] } util = { path = "../util", features = ["test-support"] } async-pipe = { git = "https://github.com/routerify/async-pipe-rs", rev = "feeb77e83142a9ff837d0767652ae41bfc5d8e47" } -simplelog = "0.9" +ctor = "0.1" +env_logger = "0.8" unindent = "0.1.7" diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index 567bc435ead7bbb49c5c500fecd186f4f3f79bee..73f4fe698b8182dccdcdcaa49a9d699bd357ee94 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -56,6 +56,18 @@ struct Request<'a, T> { params: T, } +#[cfg(any(test, feature = "test-support"))] +#[derive(Deserialize)] +struct AnyRequest<'a> { + id: usize, + #[serde(borrow)] + jsonrpc: &'a str, + #[serde(borrow)] + method: &'a str, + #[serde(borrow)] + params: &'a RawValue, +} + #[derive(Serialize, Deserialize)] struct AnyResponse<'a> { id: usize, @@ -238,6 +250,21 @@ impl LanguageServer { link_support: Some(true), ..Default::default() }), + code_action: Some(CodeActionClientCapabilities { + code_action_literal_support: Some(CodeActionLiteralSupport { + code_action_kind: CodeActionKindLiteralSupport { + value_set: vec![ + CodeActionKind::REFACTOR.as_str().into(), + CodeActionKind::QUICKFIX.as_str().into(), + ], + }, + }), + data_support: Some(true), + resolve_support: Some(CodeActionCapabilityResolveSupport { + properties: vec!["edit".to_string()], + }), + ..Default::default() + }), completion: Some(CompletionClientCapabilities { completion_item: Some(CompletionItemCapability { snippet_support: Some(true), @@ -454,48 +481,41 @@ impl Drop for Subscription { #[cfg(any(test, feature = "test-support"))] pub struct FakeLanguageServer { - buffer: Vec, - stdin: smol::io::BufReader, - stdout: smol::io::BufWriter, + handlers: Arc< + Mutex< + HashMap< + &'static str, + Box (Vec, barrier::Sender)>, + >, + >, + >, + outgoing_tx: channel::Sender>, + incoming_rx: channel::Receiver>, pub started: Arc, } -#[cfg(any(test, feature = "test-support"))] -pub struct RequestId { - id: usize, - _type: std::marker::PhantomData, -} - #[cfg(any(test, feature = "test-support"))] impl LanguageServer { - pub async fn fake(executor: Arc) -> (Arc, FakeLanguageServer) { - Self::fake_with_capabilities(Default::default(), executor).await + pub async fn fake(cx: &gpui::TestAppContext) -> (Arc, FakeLanguageServer) { + Self::fake_with_capabilities(Default::default(), cx).await } pub async fn fake_with_capabilities( capabilities: ServerCapabilities, - executor: Arc, + cx: &gpui::TestAppContext, ) -> (Arc, FakeLanguageServer) { - let stdin = async_pipe::pipe(); - let stdout = async_pipe::pipe(); - let mut fake = FakeLanguageServer { - stdin: smol::io::BufReader::new(stdin.1), - stdout: smol::io::BufWriter::new(stdout.0), - buffer: Vec::new(), - started: Arc::new(std::sync::atomic::AtomicBool::new(true)), - }; + let (stdin_writer, stdin_reader) = async_pipe::pipe(); + let (stdout_writer, stdout_reader) = async_pipe::pipe(); - let server = Self::new_internal(stdin.0, stdout.1, Path::new("/"), executor).unwrap(); + let mut fake = FakeLanguageServer::new(cx, stdin_reader, stdout_writer); + fake.handle_request::(move |_| InitializeResult { + capabilities, + ..Default::default() + }); - let (init_id, _) = fake.receive_request::().await; - fake.respond( - init_id, - InitializeResult { - capabilities, - ..Default::default() - }, - ) - .await; + let server = + Self::new_internal(stdin_writer, stdout_reader, Path::new("/"), cx.background()) + .unwrap(); fake.receive_notification::() .await; @@ -505,6 +525,75 @@ impl LanguageServer { #[cfg(any(test, feature = "test-support"))] impl FakeLanguageServer { + fn new( + cx: &gpui::TestAppContext, + stdin: async_pipe::PipeReader, + stdout: async_pipe::PipeWriter, + ) -> Self { + use futures::StreamExt as _; + + let (incoming_tx, incoming_rx) = channel::unbounded(); + let (outgoing_tx, mut outgoing_rx) = channel::unbounded(); + let this = Self { + outgoing_tx: outgoing_tx.clone(), + incoming_rx, + handlers: Default::default(), + started: Arc::new(std::sync::atomic::AtomicBool::new(true)), + }; + + // Receive incoming messages + let handlers = this.handlers.clone(); + cx.background() + .spawn(async move { + let mut buffer = Vec::new(); + let mut stdin = smol::io::BufReader::new(stdin); + while Self::receive(&mut stdin, &mut buffer).await.is_ok() { + if let Ok(request) = serde_json::from_slice::(&mut buffer) { + assert_eq!(request.jsonrpc, JSON_RPC_VERSION); + + let handler = handlers.lock().remove(request.method); + if let Some(handler) = handler { + let (response, sent) = + handler(request.id, request.params.get().as_bytes()); + log::debug!("handled lsp request. method:{}", request.method); + outgoing_tx.send(response).await.unwrap(); + drop(sent); + } else { + log::debug!("unhandled lsp request. method:{}", request.method); + outgoing_tx + .send( + serde_json::to_vec(&AnyResponse { + id: request.id, + error: Some(Error { + message: "no handler".to_string(), + }), + result: None, + }) + .unwrap(), + ) + .await + .unwrap(); + } + } else { + incoming_tx.send(buffer.clone()).await.unwrap(); + } + } + }) + .detach(); + + // Send outgoing messages + cx.background() + .spawn(async move { + let mut stdout = smol::io::BufWriter::new(stdout); + while let Some(notification) = outgoing_rx.next().await { + Self::send(&mut stdout, ¬ification).await; + } + }) + .detach(); + + this + } + pub async fn notify(&mut self, params: T::Params) { if !self.started.load(std::sync::atomic::Ordering::SeqCst) { panic!("can't simulate an LSP notification before the server has been started"); @@ -515,51 +604,53 @@ impl FakeLanguageServer { params, }) .unwrap(); - self.send(message).await; + self.outgoing_tx.send(message).await.unwrap(); } - pub async fn respond<'a, T: request::Request>( - &mut self, - request_id: RequestId, - result: T::Result, - ) { - let result = serde_json::to_string(&result).unwrap(); - let message = serde_json::to_vec(&AnyResponse { - id: request_id.id, - error: None, - result: Some(&RawValue::from_string(result).unwrap()), - }) - .unwrap(); - self.send(message).await; - } + pub async fn receive_notification(&mut self) -> T::Params { + use futures::StreamExt as _; - pub async fn receive_request(&mut self) -> (RequestId, T::Params) { loop { - self.receive().await; - if let Ok(request) = serde_json::from_slice::>(&self.buffer) { - assert_eq!(request.method, T::METHOD); - assert_eq!(request.jsonrpc, JSON_RPC_VERSION); - return ( - RequestId { - id: request.id, - _type: std::marker::PhantomData, - }, - request.params, - ); + let bytes = self.incoming_rx.next().await.unwrap(); + if let Ok(notification) = serde_json::from_slice::>(&bytes) { + assert_eq!(notification.method, T::METHOD); + return notification.params; } else { log::info!( "skipping message in fake language server {:?}", - std::str::from_utf8(&self.buffer) + std::str::from_utf8(&bytes) ); } } } - pub async fn receive_notification(&mut self) -> T::Params { - self.receive().await; - let notification = serde_json::from_slice::>(&self.buffer).unwrap(); - assert_eq!(notification.method, T::METHOD); - notification.params + pub fn handle_request(&mut self, handler: F) -> barrier::Receiver + where + T: 'static + request::Request, + F: 'static + Send + FnOnce(T::Params) -> T::Result, + { + let (responded_tx, responded_rx) = barrier::channel(); + let prev_handler = self.handlers.lock().insert( + T::METHOD, + Box::new(|id, params| { + let result = handler(serde_json::from_slice::(params).unwrap()); + let result = serde_json::to_string(&result).unwrap(); + let result = serde_json::from_str::<&RawValue>(&result).unwrap(); + let response = AnyResponse { + id, + error: None, + result: Some(result), + }; + (serde_json::to_vec(&response).unwrap(), responded_tx) + }), + ); + if prev_handler.is_some() { + panic!( + "registered a new handler for LSP method '{}' before the previous handler was called", + T::METHOD + ); + } + responded_rx } pub async fn start_progress(&mut self, token: impl Into) { @@ -578,39 +669,37 @@ impl FakeLanguageServer { .await; } - async fn send(&mut self, message: Vec) { - self.stdout + async fn send(stdout: &mut smol::io::BufWriter, message: &[u8]) { + stdout .write_all(CONTENT_LEN_HEADER.as_bytes()) .await .unwrap(); - self.stdout + stdout .write_all((format!("{}", message.len())).as_bytes()) .await .unwrap(); - self.stdout.write_all("\r\n\r\n".as_bytes()).await.unwrap(); - self.stdout.write_all(&message).await.unwrap(); - self.stdout.flush().await.unwrap(); + stdout.write_all("\r\n\r\n".as_bytes()).await.unwrap(); + stdout.write_all(&message).await.unwrap(); + stdout.flush().await.unwrap(); } - async fn receive(&mut self) { - self.buffer.clear(); - self.stdin - .read_until(b'\n', &mut self.buffer) - .await - .unwrap(); - self.stdin - .read_until(b'\n', &mut self.buffer) - .await - .unwrap(); - let message_len: usize = std::str::from_utf8(&self.buffer) + async fn receive( + stdin: &mut smol::io::BufReader, + buffer: &mut Vec, + ) -> Result<()> { + buffer.clear(); + stdin.read_until(b'\n', buffer).await?; + stdin.read_until(b'\n', buffer).await?; + let message_len: usize = std::str::from_utf8(buffer) .unwrap() .strip_prefix(CONTENT_LEN_HEADER) .unwrap() .trim_end() .parse() .unwrap(); - self.buffer.resize(message_len, 0); - self.stdin.read_exact(&mut self.buffer).await.unwrap(); + buffer.resize(message_len, 0); + stdin.read_exact(buffer).await?; + Ok(()) } } @@ -618,10 +707,16 @@ impl FakeLanguageServer { mod tests { use super::*; use gpui::TestAppContext; - use simplelog::SimpleLogger; use unindent::Unindent; use util::test::temp_tree; + #[ctor::ctor] + fn init_logger() { + if std::env::var("RUST_LOG").is_ok() { + env_logger::init(); + } + } + #[gpui::test] async fn test_rust_analyzer(cx: TestAppContext) { let lib_source = r#" @@ -643,14 +738,9 @@ mod tests { })); let lib_file_uri = Url::from_file_path(root_dir.path().join("src/lib.rs")).unwrap(); - let server = cx.read(|cx| { - LanguageServer::new( - Path::new("rust-analyzer"), - root_dir.path(), - cx.background().clone(), - ) - .unwrap() - }); + let server = + LanguageServer::new(Path::new("rust-analyzer"), root_dir.path(), cx.background()) + .unwrap(); server.next_idle_notification().await; server @@ -687,9 +777,7 @@ mod tests { #[gpui::test] async fn test_fake(cx: TestAppContext) { - SimpleLogger::init(log::LevelFilter::Info, Default::default()).unwrap(); - - let (server, mut fake) = LanguageServer::fake(cx.background()).await; + let (server, mut fake) = LanguageServer::fake(&cx).await; let (message_tx, message_rx) = channel::unbounded(); let (diagnostics_tx, diagnostics_rx) = channel::unbounded(); @@ -741,9 +829,9 @@ mod tests { "file://b/c" ); + fake.handle_request::(|_| ()); + drop(server); - let (shutdown_request, _) = fake.receive_request::().await; - fake.respond(shutdown_request, ()).await; fake.receive_notification::().await; } diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index d302be874f810f460e17bd110bea0b82eb3aad0b..1130282e0e9257195995818229922894c3da1455 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -7,7 +7,11 @@ edition = "2021" path = "src/project.rs" [features] -test-support = ["language/test-support", "text/test-support"] +test-support = [ + "client/test-support", + "language/test-support", + "text/test-support", +] [dependencies] text = { path = "../text" } @@ -45,6 +49,5 @@ lsp = { path = "../lsp", features = ["test-support"] } util = { path = "../util", features = ["test-support"] } rpc = { path = "../rpc", features = ["test-support"] } rand = "0.8.3" -simplelog = "0.9" tempdir = { version = "0.3.7" } unindent = "0.1.7" diff --git a/crates/project/src/fs.rs b/crates/project/src/fs.rs index 8ef076abc33ce5db6172356718cf8a81b9e3ab1f..7ba2076fcb6515dac4951ddd83824365ebd52a98 100644 --- a/crates/project/src/fs.rs +++ b/crates/project/src/fs.rs @@ -13,6 +13,11 @@ use text::Rope; #[async_trait::async_trait] pub trait Fs: Send + Sync { + async fn create_dir(&self, path: &Path) -> Result<()>; + async fn create_file(&self, path: &Path, options: CreateOptions) -> Result<()>; + async fn rename(&self, source: &Path, target: &Path, options: RenameOptions) -> Result<()>; + async fn remove_dir(&self, path: &Path, options: RemoveOptions) -> Result<()>; + async fn remove_file(&self, path: &Path, options: RemoveOptions) -> Result<()>; async fn load(&self, path: &Path) -> Result; async fn save(&self, path: &Path, text: &Rope) -> Result<()>; async fn canonicalize(&self, path: &Path) -> Result; @@ -32,6 +37,24 @@ pub trait Fs: Send + Sync { fn as_fake(&self) -> &FakeFs; } +#[derive(Copy, Clone, Default)] +pub struct CreateOptions { + pub overwrite: bool, + pub ignore_if_exists: bool, +} + +#[derive(Copy, Clone, Default)] +pub struct RenameOptions { + pub overwrite: bool, + pub ignore_if_exists: bool, +} + +#[derive(Copy, Clone, Default)] +pub struct RemoveOptions { + pub recursive: bool, + pub ignore_if_not_exists: bool, +} + #[derive(Clone, Debug)] pub struct Metadata { pub inode: u64, @@ -44,6 +67,60 @@ pub struct RealFs; #[async_trait::async_trait] impl Fs for RealFs { + async fn create_dir(&self, path: &Path) -> Result<()> { + Ok(smol::fs::create_dir_all(path).await?) + } + + async fn create_file(&self, path: &Path, options: CreateOptions) -> Result<()> { + let mut open_options = smol::fs::OpenOptions::new(); + open_options.write(true).create(true); + if options.overwrite { + open_options.truncate(true); + } else if !options.ignore_if_exists { + open_options.create_new(true); + } + open_options.open(path).await?; + Ok(()) + } + + async fn rename(&self, source: &Path, target: &Path, options: RenameOptions) -> Result<()> { + if !options.overwrite && smol::fs::metadata(target).await.is_ok() { + if options.ignore_if_exists { + return Ok(()); + } else { + return Err(anyhow!("{target:?} already exists")); + } + } + + smol::fs::rename(source, target).await?; + Ok(()) + } + + async fn remove_dir(&self, path: &Path, options: RemoveOptions) -> Result<()> { + let result = if options.recursive { + smol::fs::remove_dir_all(path).await + } else { + smol::fs::remove_dir(path).await + }; + match result { + Ok(()) => Ok(()), + Err(err) if err.kind() == io::ErrorKind::NotFound && options.ignore_if_not_exists => { + Ok(()) + } + Err(err) => Err(err)?, + } + } + + async fn remove_file(&self, path: &Path, options: RemoveOptions) -> Result<()> { + match smol::fs::remove_file(path).await { + Ok(()) => Ok(()), + Err(err) if err.kind() == io::ErrorKind::NotFound && options.ignore_if_not_exists => { + Ok(()) + } + Err(err) => Err(err)?, + } + } + async fn load(&self, path: &Path) -> Result { let mut file = smol::fs::File::open(path).await?; let mut text = String::new(); @@ -162,15 +239,19 @@ impl FakeFsState { } } - async fn emit_event(&mut self, paths: &[&Path]) { + async fn emit_event(&mut self, paths: I) + where + I: IntoIterator, + T: Into, + { use postage::prelude::Sink as _; let events = paths - .iter() + .into_iter() .map(|path| fsevent::Event { event_id: 0, flags: fsevent::StreamFlags::empty(), - path: path.to_path_buf(), + path: path.into(), }) .collect(); @@ -292,46 +373,163 @@ impl FakeFs { } .boxed() } +} - pub async fn remove(&self, path: &Path) -> Result<()> { +#[cfg(any(test, feature = "test-support"))] +#[async_trait::async_trait] +impl Fs for FakeFs { + async fn create_dir(&self, path: &Path) -> Result<()> { + self.executor.simulate_random_delay().await; + let state = &mut *self.state.lock().await; + let mut ancestor_path = PathBuf::new(); + let mut created_dir_paths = Vec::new(); + for component in path.components() { + ancestor_path.push(component); + let entry = state + .entries + .entry(ancestor_path.clone()) + .or_insert_with(|| { + let inode = state.next_inode; + state.next_inode += 1; + created_dir_paths.push(ancestor_path.clone()); + FakeFsEntry { + metadata: Metadata { + inode, + mtime: SystemTime::now(), + is_dir: true, + is_symlink: false, + }, + content: None, + } + }); + if !entry.metadata.is_dir { + return Err(anyhow!( + "cannot create directory because {:?} is a file", + ancestor_path + )); + } + } + state.emit_event(&created_dir_paths).await; + + Ok(()) + } + + async fn create_file(&self, path: &Path, options: CreateOptions) -> Result<()> { + self.executor.simulate_random_delay().await; let mut state = self.state.lock().await; state.validate_path(path)?; - state.entries.retain(|path, _| !path.starts_with(path)); + if let Some(entry) = state.entries.get_mut(path) { + if entry.metadata.is_dir || entry.metadata.is_symlink { + return Err(anyhow!( + "cannot create file because {:?} is a dir or a symlink", + path + )); + } + + if options.overwrite { + entry.metadata.mtime = SystemTime::now(); + entry.content = Some(Default::default()); + } else if !options.ignore_if_exists { + return Err(anyhow!( + "cannot create file because {:?} already exists", + path + )); + } + } else { + let inode = state.next_inode; + state.next_inode += 1; + let entry = FakeFsEntry { + metadata: Metadata { + inode, + mtime: SystemTime::now(), + is_dir: false, + is_symlink: false, + }, + content: Some(Default::default()), + }; + state.entries.insert(path.to_path_buf(), entry); + } state.emit_event(&[path]).await; + Ok(()) } - pub async fn rename(&self, source: &Path, target: &Path) -> Result<()> { + async fn rename(&self, source: &Path, target: &Path, options: RenameOptions) -> Result<()> { let mut state = self.state.lock().await; state.validate_path(source)?; state.validate_path(target)?; - if state.entries.contains_key(target) { - Err(anyhow!("target path already exists")) - } else { - let mut removed = Vec::new(); - state.entries.retain(|path, entry| { - if let Ok(relative_path) = path.strip_prefix(source) { - removed.push((relative_path.to_path_buf(), entry.clone())); - false - } else { - true + + if !options.overwrite && state.entries.contains_key(target) { + if options.ignore_if_exists { + return Ok(()); + } else { + return Err(anyhow!("{target:?} already exists")); + } + } + + let mut removed = Vec::new(); + state.entries.retain(|path, entry| { + if let Ok(relative_path) = path.strip_prefix(source) { + removed.push((relative_path.to_path_buf(), entry.clone())); + false + } else { + true + } + }); + + for (relative_path, entry) in removed { + let new_path = target.join(relative_path); + state.entries.insert(new_path, entry); + } + + state.emit_event(&[source, target]).await; + Ok(()) + } + + async fn remove_dir(&self, path: &Path, options: RemoveOptions) -> Result<()> { + let mut state = self.state.lock().await; + state.validate_path(path)?; + if let Some(entry) = state.entries.get(path) { + if !entry.metadata.is_dir { + return Err(anyhow!("cannot remove {path:?} because it is not a dir")); + } + + if !options.recursive { + let descendants = state + .entries + .keys() + .filter(|path| path.starts_with(path)) + .count(); + if descendants > 1 { + return Err(anyhow!("{path:?} is not empty")); } - }); + } + + state.entries.retain(|path, _| !path.starts_with(path)); + state.emit_event(&[path]).await; + } else if !options.ignore_if_not_exists { + return Err(anyhow!("{path:?} does not exist")); + } - for (relative_path, entry) in removed { - let new_path = target.join(relative_path); - state.entries.insert(new_path, entry); + Ok(()) + } + + async fn remove_file(&self, path: &Path, options: RemoveOptions) -> Result<()> { + let mut state = self.state.lock().await; + state.validate_path(path)?; + if let Some(entry) = state.entries.get(path) { + if entry.metadata.is_dir { + return Err(anyhow!("cannot remove {path:?} because it is not a file")); } - state.emit_event(&[source, target]).await; - Ok(()) + state.entries.remove(path); + state.emit_event(&[path]).await; + } else if !options.ignore_if_not_exists { + return Err(anyhow!("{path:?} does not exist")); } + Ok(()) } -} -#[cfg(any(test, feature = "test-support"))] -#[async_trait::async_trait] -impl Fs for FakeFs { async fn load(&self, path: &Path) -> Result { self.executor.simulate_random_delay().await; let state = self.state.lock().await; diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index a9a9470005bd09a1364aef7c45d6f39b1c26e0d8..4c54bbef6d30cc2fbec98771ff9d0c7b0824345b 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -13,9 +13,11 @@ use gpui::{ WeakModelHandle, }; use language::{ + point_from_lsp, proto::{deserialize_anchor, serialize_anchor}, - range_from_lsp, Bias, Buffer, Diagnostic, DiagnosticEntry, File as _, Language, - LanguageRegistry, PointUtf16, ToOffset, ToPointUtf16, + range_from_lsp, AnchorRangeExt, Bias, Buffer, CodeAction, Completion, CompletionLabel, + Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, PointUtf16, ToLspPosition, + ToOffset, ToPointUtf16, Transaction, }; use lsp::{DiagnosticSeverity, LanguageServer}; use postage::{prelude::Stream, watch}; @@ -25,6 +27,7 @@ use std::{ ops::Range, path::{Path, PathBuf}, sync::{atomic::AtomicBool, Arc}, + time::Instant, }; use util::{post_inc, ResultExt, TryFutureExt as _}; @@ -107,6 +110,9 @@ pub struct Definition { pub target_range: Range, } +#[derive(Default)] +pub struct ProjectTransaction(pub HashMap, language::Transaction>); + impl DiagnosticSummary { fn new<'a, T: 'a>(diagnostics: impl IntoIterator>) -> Self { let mut this = Self { @@ -274,31 +280,43 @@ impl Project { user_store, fs, subscriptions: vec![ - client.subscribe_to_entity(remote_id, cx, Self::handle_unshare_project), - client.subscribe_to_entity(remote_id, cx, Self::handle_add_collaborator), - client.subscribe_to_entity(remote_id, cx, Self::handle_remove_collaborator), - client.subscribe_to_entity(remote_id, cx, Self::handle_share_worktree), - client.subscribe_to_entity(remote_id, cx, Self::handle_unregister_worktree), - client.subscribe_to_entity(remote_id, cx, Self::handle_update_worktree), - client.subscribe_to_entity( + client.add_entity_message_handler(remote_id, cx, Self::handle_unshare_project), + client.add_entity_message_handler(remote_id, cx, Self::handle_add_collaborator), + client.add_entity_message_handler( + remote_id, + cx, + Self::handle_remove_collaborator, + ), + client.add_entity_message_handler(remote_id, cx, Self::handle_share_worktree), + client.add_entity_message_handler( + remote_id, + cx, + Self::handle_unregister_worktree, + ), + client.add_entity_message_handler(remote_id, cx, Self::handle_update_worktree), + client.add_entity_message_handler( remote_id, cx, Self::handle_update_diagnostic_summary, ), - client.subscribe_to_entity( + client.add_entity_message_handler( remote_id, cx, Self::handle_disk_based_diagnostics_updating, ), - client.subscribe_to_entity( + client.add_entity_message_handler( remote_id, cx, Self::handle_disk_based_diagnostics_updated, ), - client.subscribe_to_entity(remote_id, cx, Self::handle_update_buffer), - client.subscribe_to_entity(remote_id, cx, Self::handle_update_buffer_file), - client.subscribe_to_entity(remote_id, cx, Self::handle_buffer_reloaded), - client.subscribe_to_entity(remote_id, cx, Self::handle_buffer_saved), + client.add_entity_message_handler(remote_id, cx, Self::handle_update_buffer), + client.add_entity_message_handler( + remote_id, + cx, + Self::handle_update_buffer_file, + ), + client.add_entity_message_handler(remote_id, cx, Self::handle_buffer_reloaded), + client.add_entity_message_handler(remote_id, cx, Self::handle_buffer_saved), ], client, client_state: ProjectClientState::Remote { @@ -316,6 +334,15 @@ impl Project { })) } + #[cfg(any(test, feature = "test-support"))] + pub fn test(fs: Arc, cx: &mut gpui::TestAppContext) -> ModelHandle { + let languages = Arc::new(LanguageRegistry::new()); + let http_client = client::test::FakeHttpClient::with_404_response(); + let client = client::Client::new(http_client.clone()); + let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx)); + cx.update(|cx| Project::local(client, user_store, languages, fs, cx)) + } + fn set_remote_id(&mut self, remote_id: Option, cx: &mut ModelContext) { if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state { *remote_id_tx.borrow_mut() = remote_id; @@ -325,22 +352,24 @@ impl Project { if let Some(remote_id) = remote_id { let client = &self.client; self.subscriptions.extend([ - client.subscribe_to_entity(remote_id, cx, Self::handle_open_buffer), - client.subscribe_to_entity(remote_id, cx, Self::handle_close_buffer), - client.subscribe_to_entity(remote_id, cx, Self::handle_add_collaborator), - client.subscribe_to_entity(remote_id, cx, Self::handle_remove_collaborator), - client.subscribe_to_entity(remote_id, cx, Self::handle_update_worktree), - client.subscribe_to_entity(remote_id, cx, Self::handle_update_buffer), - client.subscribe_to_entity(remote_id, cx, Self::handle_save_buffer), - client.subscribe_to_entity(remote_id, cx, Self::handle_buffer_saved), - client.subscribe_to_entity(remote_id, cx, Self::handle_format_buffer), - client.subscribe_to_entity(remote_id, cx, Self::handle_get_completions), - client.subscribe_to_entity( + client.add_entity_request_handler(remote_id, cx, Self::handle_open_buffer), + client.add_entity_message_handler(remote_id, cx, Self::handle_close_buffer), + client.add_entity_message_handler(remote_id, cx, Self::handle_add_collaborator), + client.add_entity_message_handler(remote_id, cx, Self::handle_remove_collaborator), + client.add_entity_message_handler(remote_id, cx, Self::handle_update_worktree), + client.add_entity_message_handler(remote_id, cx, Self::handle_update_buffer), + client.add_entity_request_handler(remote_id, cx, Self::handle_save_buffer), + client.add_entity_message_handler(remote_id, cx, Self::handle_buffer_saved), + client.add_entity_request_handler(remote_id, cx, Self::handle_format_buffers), + client.add_entity_request_handler(remote_id, cx, Self::handle_get_completions), + client.add_entity_request_handler( remote_id, cx, Self::handle_apply_additional_edits_for_completion, ), - client.subscribe_to_entity(remote_id, cx, Self::handle_get_definition), + client.add_entity_request_handler(remote_id, cx, Self::handle_get_code_actions), + client.add_entity_request_handler(remote_id, cx, Self::handle_apply_code_action), + client.add_entity_request_handler(remote_id, cx, Self::handle_get_definition), ]); } } @@ -596,9 +625,44 @@ impl Project { }) .await?; let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?; - this.update(&mut cx, |this, cx| { - this.deserialize_remote_buffer(buffer, cx) - }) + this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx)) + }) + } + + fn open_local_buffer_from_lsp_path( + &mut self, + abs_path: lsp::Url, + lang_name: String, + lang_server: Arc, + cx: &mut ModelContext, + ) -> Task>> { + cx.spawn(|this, mut cx| async move { + let abs_path = abs_path + .to_file_path() + .map_err(|_| anyhow!("can't convert URI to path"))?; + let (worktree, relative_path) = if let Some(result) = + this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx)) + { + result + } else { + let worktree = this + .update(&mut cx, |this, cx| { + this.create_local_worktree(&abs_path, true, cx) + }) + .await?; + this.update(&mut cx, |this, cx| { + this.language_servers + .insert((worktree.read(cx).id(), lang_name), lang_server); + }); + (worktree, PathBuf::new()) + }; + + let project_path = ProjectPath { + worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()), + path: relative_path.into(), + }; + this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx)) + .await }) } @@ -730,7 +794,7 @@ impl Project { if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) { if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) { buffer.update(cx, |buffer, cx| { - buffer.update_diagnostics(None, diagnostics, cx).log_err(); + buffer.update_diagnostics(diagnostics, None, cx).log_err(); }); } } @@ -815,7 +879,7 @@ impl Project { // Process all the LSP events. cx.spawn_weak(|this, mut cx| async move { while let Ok(message) = diagnostics_rx.recv().await { - let this = cx.read(|cx| this.upgrade(cx))?; + let this = this.upgrade(&cx)?; match message { LspEvent::DiagnosticsStart => { this.update(&mut cx, |this, cx| { @@ -975,7 +1039,7 @@ impl Project { .map_or(false, |file| *file.path() == project_path.path) { buffer.update(cx, |buffer, cx| { - buffer.update_diagnostics(version, diagnostics.clone(), cx) + buffer.update_diagnostics(diagnostics.clone(), version, cx) })?; break; } @@ -991,7 +1055,107 @@ impl Project { Ok(()) } - pub fn definition( + pub fn format( + &self, + buffers: HashSet>, + push_to_history: bool, + cx: &mut ModelContext, + ) -> Task> { + let mut local_buffers = Vec::new(); + let mut remote_buffers = None; + for buffer_handle in buffers { + let buffer = buffer_handle.read(cx); + let worktree; + if let Some(file) = File::from_dyn(buffer.file()) { + worktree = file.worktree.clone(); + if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) { + let lang_server; + if let Some(lang) = buffer.language() { + if let Some(server) = self + .language_servers + .get(&(worktree.read(cx).id(), lang.name().to_string())) + { + lang_server = server.clone(); + } else { + return Task::ready(Ok(Default::default())); + }; + } else { + return Task::ready(Ok(Default::default())); + } + + local_buffers.push((buffer_handle, buffer_abs_path, lang_server)); + } else { + remote_buffers.get_or_insert(Vec::new()).push(buffer_handle); + } + } else { + return Task::ready(Ok(Default::default())); + } + } + + let remote_buffers = self.remote_id().zip(remote_buffers); + let client = self.client.clone(); + + cx.spawn(|this, mut cx| async move { + let mut project_transaction = ProjectTransaction::default(); + + if let Some((project_id, remote_buffers)) = remote_buffers { + let response = client + .request(proto::FormatBuffers { + project_id, + buffer_ids: remote_buffers + .iter() + .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id())) + .collect(), + }) + .await? + .transaction + .ok_or_else(|| anyhow!("missing transaction"))?; + project_transaction = this + .update(&mut cx, |this, cx| { + this.deserialize_project_transaction(response, push_to_history, cx) + }) + .await?; + } + + for (buffer, buffer_abs_path, lang_server) in local_buffers { + let lsp_edits = lang_server + .request::(lsp::DocumentFormattingParams { + text_document: lsp::TextDocumentIdentifier::new( + lsp::Url::from_file_path(&buffer_abs_path).unwrap(), + ), + options: Default::default(), + work_done_progress_params: Default::default(), + }) + .await?; + + if let Some(lsp_edits) = lsp_edits { + let edits = buffer + .update(&mut cx, |buffer, cx| { + buffer.edits_from_lsp(lsp_edits, None, cx) + }) + .await?; + buffer.update(&mut cx, |buffer, cx| { + buffer.finalize_last_transaction(); + buffer.start_transaction(); + for (range, text) in edits { + buffer.edit([range], text, cx); + } + if buffer.end_transaction(cx).is_some() { + let transaction = buffer.finalize_last_transaction().unwrap().clone(); + if !push_to_history { + buffer.forget_transaction(transaction.id); + } + project_transaction.0.insert(cx.handle(), transaction); + } + }); + } + } + + Ok(project_transaction) + }) + } + + pub fn definition( &self, source_buffer_handle: &ModelHandle, position: T, @@ -1005,11 +1169,12 @@ impl Project { worktree = file.worktree.clone(); buffer_abs_path = file.as_local().map(|f| f.abs_path(cx)); } else { - return Task::ready(Err(anyhow!("buffer does not belong to any worktree"))); + return Task::ready(Ok(Default::default())); }; + let position = position.to_point_utf16(source_buffer); + if worktree.read(cx).as_local().is_some() { - let point = source_buffer.offset_to_point_utf16(position.to_offset(source_buffer)); let buffer_abs_path = buffer_abs_path.unwrap(); let lang_name; let lang_server; @@ -1021,10 +1186,10 @@ impl Project { { lang_server = server.clone(); } else { - return Task::ready(Err(anyhow!("buffer does not have a language server"))); + return Task::ready(Ok(Default::default())); }; } else { - return Task::ready(Err(anyhow!("buffer does not have a language"))); + return Task::ready(Ok(Default::default())); } cx.spawn(|this, mut cx| async move { @@ -1034,7 +1199,7 @@ impl Project { text_document: lsp::TextDocumentIdentifier::new( lsp::Url::from_file_path(&buffer_abs_path).unwrap(), ), - position: lsp::Position::new(point.row, point.column), + position: lsp::Position::new(position.row, position.column), }, work_done_progress_params: Default::default(), partial_result_params: Default::default(), @@ -1061,42 +1226,23 @@ impl Project { } for (target_uri, target_range) in unresolved_locations { - let abs_path = target_uri - .to_file_path() - .map_err(|_| anyhow!("invalid target path"))?; - - let (worktree, relative_path) = if let Some(result) = - this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx)) - { - result - } else { - let worktree = this - .update(&mut cx, |this, cx| { - this.create_local_worktree(&abs_path, true, cx) - }) - .await?; - this.update(&mut cx, |this, cx| { - this.language_servers.insert( - (worktree.read(cx).id(), lang_name.clone()), - lang_server.clone(), - ); - }); - (worktree, PathBuf::new()) - }; - - let project_path = ProjectPath { - worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()), - path: relative_path.into(), - }; let target_buffer_handle = this - .update(&mut cx, |this, cx| this.open_buffer(project_path, cx)) + .update(&mut cx, |this, cx| { + this.open_local_buffer_from_lsp_path( + target_uri, + lang_name.clone(), + lang_server.clone(), + cx, + ) + }) .await?; + cx.read(|cx| { let target_buffer = target_buffer_handle.read(cx); let target_start = target_buffer - .clip_point_utf16(target_range.start.to_point_utf16(), Bias::Left); + .clip_point_utf16(point_from_lsp(target_range.start), Bias::Left); let target_end = target_buffer - .clip_point_utf16(target_range.end.to_point_utf16(), Bias::Left); + .clip_point_utf16(point_from_lsp(target_range.end), Bias::Left); definitions.push(Definition { target_buffer: target_buffer_handle, target_range: target_buffer.anchor_after(target_start) @@ -1120,7 +1266,7 @@ impl Project { this.update(&mut cx, |this, cx| { let mut definitions = Vec::new(); for definition in response.definitions { - let target_buffer = this.deserialize_remote_buffer( + let target_buffer = this.deserialize_buffer( definition.buffer.ok_or_else(|| anyhow!("missing buffer"))?, cx, )?; @@ -1141,6 +1287,479 @@ impl Project { Ok(definitions) }) }) + } else { + Task::ready(Ok(Default::default())) + } + } + + pub fn completions( + &self, + source_buffer_handle: &ModelHandle, + position: T, + cx: &mut ModelContext, + ) -> Task>> { + let source_buffer_handle = source_buffer_handle.clone(); + let source_buffer = source_buffer_handle.read(cx); + let buffer_id = source_buffer.remote_id(); + let language = source_buffer.language().cloned(); + let worktree; + let buffer_abs_path; + if let Some(file) = File::from_dyn(source_buffer.file()) { + worktree = file.worktree.clone(); + buffer_abs_path = file.as_local().map(|f| f.abs_path(cx)); + } else { + return Task::ready(Ok(Default::default())); + }; + + let position = position.to_point_utf16(source_buffer); + let anchor = source_buffer.anchor_after(position); + + if worktree.read(cx).as_local().is_some() { + let buffer_abs_path = buffer_abs_path.unwrap(); + let lang_server = if let Some(server) = source_buffer.language_server().cloned() { + server + } else { + return Task::ready(Ok(Default::default())); + }; + + cx.spawn(|_, cx| async move { + let completions = lang_server + .request::(lsp::CompletionParams { + text_document_position: lsp::TextDocumentPositionParams::new( + lsp::TextDocumentIdentifier::new( + lsp::Url::from_file_path(buffer_abs_path).unwrap(), + ), + position.to_lsp_position(), + ), + context: Default::default(), + work_done_progress_params: Default::default(), + partial_result_params: Default::default(), + }) + .await?; + + let completions = if let Some(completions) = completions { + match completions { + lsp::CompletionResponse::Array(completions) => completions, + lsp::CompletionResponse::List(list) => list.items, + } + } else { + Default::default() + }; + + source_buffer_handle.read_with(&cx, |this, _| { + Ok(completions.into_iter().filter_map(|lsp_completion| { + let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? { + lsp::CompletionTextEdit::Edit(edit) => (range_from_lsp(edit.range), edit.new_text.clone()), + lsp::CompletionTextEdit::InsertAndReplace(_) => { + log::info!("received an insert and replace completion but we don't yet support that"); + return None + }, + }; + + let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left); + let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left) ; + if clipped_start == old_range.start && clipped_end == old_range.end { + Some(Completion { + old_range: this.anchor_before(old_range.start)..this.anchor_after(old_range.end), + new_text, + label: language.as_ref().and_then(|l| l.label_for_completion(&lsp_completion)).unwrap_or_else(|| CompletionLabel::plain(&lsp_completion)), + lsp_completion, + }) + } else { + None + } + }).collect()) + }) + + }) + } else if let Some(project_id) = self.remote_id() { + let rpc = self.client.clone(); + cx.foreground().spawn(async move { + let response = rpc + .request(proto::GetCompletions { + project_id, + buffer_id, + position: Some(language::proto::serialize_anchor(&anchor)), + }) + .await?; + response + .completions + .into_iter() + .map(|completion| { + language::proto::deserialize_completion(completion, language.as_ref()) + }) + .collect() + }) + } else { + Task::ready(Ok(Default::default())) + } + } + + pub fn apply_additional_edits_for_completion( + &self, + buffer_handle: ModelHandle, + completion: Completion, + push_to_history: bool, + cx: &mut ModelContext, + ) -> Task>> { + let buffer = buffer_handle.read(cx); + let buffer_id = buffer.remote_id(); + + if self.is_local() { + let lang_server = if let Some(language_server) = buffer.language_server() { + language_server.clone() + } else { + return Task::ready(Err(anyhow!("buffer does not have a language server"))); + }; + + cx.spawn(|_, mut cx| async move { + let resolved_completion = lang_server + .request::(completion.lsp_completion) + .await?; + if let Some(edits) = resolved_completion.additional_text_edits { + let edits = buffer_handle + .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx)) + .await?; + buffer_handle.update(&mut cx, |buffer, cx| { + buffer.finalize_last_transaction(); + buffer.start_transaction(); + for (range, text) in edits { + buffer.edit([range], text, cx); + } + let transaction = if buffer.end_transaction(cx).is_some() { + let transaction = buffer.finalize_last_transaction().unwrap().clone(); + if !push_to_history { + buffer.forget_transaction(transaction.id); + } + Some(transaction) + } else { + None + }; + Ok(transaction) + }) + } else { + Ok(None) + } + }) + } else if let Some(project_id) = self.remote_id() { + let client = self.client.clone(); + cx.spawn(|_, mut cx| async move { + let response = client + .request(proto::ApplyCompletionAdditionalEdits { + project_id, + buffer_id, + completion: Some(language::proto::serialize_completion(&completion)), + }) + .await?; + + if let Some(transaction) = response.transaction { + let transaction = language::proto::deserialize_transaction(transaction)?; + buffer_handle + .update(&mut cx, |buffer, _| { + buffer.wait_for_edits(transaction.edit_ids.iter().copied()) + }) + .await; + if push_to_history { + buffer_handle.update(&mut cx, |buffer, _| { + buffer.push_transaction(transaction.clone(), Instant::now()); + }); + } + Ok(Some(transaction)) + } else { + Ok(None) + } + }) + } else { + Task::ready(Err(anyhow!("project does not have a remote id"))) + } + } + + pub fn code_actions( + &self, + buffer_handle: &ModelHandle, + range: Range, + cx: &mut ModelContext, + ) -> Task>> { + let buffer_handle = buffer_handle.clone(); + let buffer = buffer_handle.read(cx); + let buffer_id = buffer.remote_id(); + let worktree; + let buffer_abs_path; + if let Some(file) = File::from_dyn(buffer.file()) { + worktree = file.worktree.clone(); + buffer_abs_path = file.as_local().map(|f| f.abs_path(cx)); + } else { + return Task::ready(Ok(Default::default())); + }; + let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end); + + if worktree.read(cx).as_local().is_some() { + let buffer_abs_path = buffer_abs_path.unwrap(); + let lang_name; + let lang_server; + if let Some(lang) = buffer.language() { + lang_name = lang.name().to_string(); + if let Some(server) = self + .language_servers + .get(&(worktree.read(cx).id(), lang_name.clone())) + { + lang_server = server.clone(); + } else { + return Task::ready(Ok(Default::default())); + }; + } else { + return Task::ready(Ok(Default::default())); + } + + let actions = + lang_server.request::(lsp::CodeActionParams { + text_document: lsp::TextDocumentIdentifier::new( + lsp::Url::from_file_path(buffer_abs_path).unwrap(), + ), + range: lsp::Range::new( + range.start.to_point_utf16(buffer).to_lsp_position(), + range.end.to_point_utf16(buffer).to_lsp_position(), + ), + work_done_progress_params: Default::default(), + partial_result_params: Default::default(), + context: lsp::CodeActionContext { + diagnostics: Default::default(), + only: Some(vec![ + lsp::CodeActionKind::QUICKFIX, + lsp::CodeActionKind::REFACTOR, + lsp::CodeActionKind::REFACTOR_EXTRACT, + ]), + }, + }); + cx.foreground().spawn(async move { + Ok(actions + .await? + .unwrap_or_default() + .into_iter() + .filter_map(|entry| { + if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry { + Some(CodeAction { + range: range.clone(), + lsp_action, + }) + } else { + None + } + }) + .collect()) + }) + } else if let Some(project_id) = self.remote_id() { + let rpc = self.client.clone(); + cx.foreground().spawn(async move { + let response = rpc + .request(proto::GetCodeActions { + project_id, + buffer_id, + start: Some(language::proto::serialize_anchor(&range.start)), + end: Some(language::proto::serialize_anchor(&range.end)), + }) + .await?; + response + .actions + .into_iter() + .map(language::proto::deserialize_code_action) + .collect() + }) + } else { + Task::ready(Ok(Default::default())) + } + } + + pub fn apply_code_action( + &self, + buffer_handle: ModelHandle, + mut action: CodeAction, + push_to_history: bool, + cx: &mut ModelContext, + ) -> Task> { + if self.is_local() { + let buffer = buffer_handle.read(cx); + let lang_name = if let Some(lang) = buffer.language() { + lang.name().to_string() + } else { + return Task::ready(Ok(Default::default())); + }; + let lang_server = if let Some(language_server) = buffer.language_server() { + language_server.clone() + } else { + return Task::ready(Err(anyhow!("buffer does not have a language server"))); + }; + let range = action.range.to_point_utf16(buffer); + let fs = self.fs.clone(); + + cx.spawn(|this, mut cx| async move { + if let Some(lsp_range) = action + .lsp_action + .data + .as_mut() + .and_then(|d| d.get_mut("codeActionParams")) + .and_then(|d| d.get_mut("range")) + { + *lsp_range = serde_json::to_value(&lsp::Range::new( + range.start.to_lsp_position(), + range.end.to_lsp_position(), + )) + .unwrap(); + action.lsp_action = lang_server + .request::(action.lsp_action) + .await?; + } else { + let actions = this + .update(&mut cx, |this, cx| { + this.code_actions(&buffer_handle, action.range, cx) + }) + .await?; + action.lsp_action = actions + .into_iter() + .find(|a| a.lsp_action.title == action.lsp_action.title) + .ok_or_else(|| anyhow!("code action is outdated"))? + .lsp_action; + } + + let mut operations = Vec::new(); + if let Some(edit) = action.lsp_action.edit { + if let Some(document_changes) = edit.document_changes { + match document_changes { + lsp::DocumentChanges::Edits(edits) => operations + .extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit)), + lsp::DocumentChanges::Operations(ops) => operations = ops, + } + } else if let Some(changes) = edit.changes { + operations.extend(changes.into_iter().map(|(uri, edits)| { + lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit { + text_document: lsp::OptionalVersionedTextDocumentIdentifier { + uri, + version: None, + }, + edits: edits.into_iter().map(lsp::OneOf::Left).collect(), + }) + })); + } + } + + let mut project_transaction = ProjectTransaction::default(); + for operation in operations { + match operation { + lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => { + let abs_path = op + .uri + .to_file_path() + .map_err(|_| anyhow!("can't convert URI to path"))?; + + if let Some(parent_path) = abs_path.parent() { + fs.create_dir(parent_path).await?; + } + if abs_path.ends_with("/") { + fs.create_dir(&abs_path).await?; + } else { + fs.create_file( + &abs_path, + op.options.map(Into::into).unwrap_or_default(), + ) + .await?; + } + } + lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => { + let source_abs_path = op + .old_uri + .to_file_path() + .map_err(|_| anyhow!("can't convert URI to path"))?; + let target_abs_path = op + .new_uri + .to_file_path() + .map_err(|_| anyhow!("can't convert URI to path"))?; + fs.rename( + &source_abs_path, + &target_abs_path, + op.options.map(Into::into).unwrap_or_default(), + ) + .await?; + } + lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => { + let abs_path = op + .uri + .to_file_path() + .map_err(|_| anyhow!("can't convert URI to path"))?; + let options = op.options.map(Into::into).unwrap_or_default(); + if abs_path.ends_with("/") { + fs.remove_dir(&abs_path, options).await?; + } else { + fs.remove_file(&abs_path, options).await?; + } + } + lsp::DocumentChangeOperation::Edit(op) => { + let buffer_to_edit = this + .update(&mut cx, |this, cx| { + this.open_local_buffer_from_lsp_path( + op.text_document.uri, + lang_name.clone(), + lang_server.clone(), + cx, + ) + }) + .await?; + + let edits = buffer_to_edit + .update(&mut cx, |buffer, cx| { + let edits = op.edits.into_iter().map(|edit| match edit { + lsp::OneOf::Left(edit) => edit, + lsp::OneOf::Right(edit) => edit.text_edit, + }); + buffer.edits_from_lsp(edits, op.text_document.version, cx) + }) + .await?; + + let transaction = buffer_to_edit.update(&mut cx, |buffer, cx| { + buffer.finalize_last_transaction(); + buffer.start_transaction(); + for (range, text) in edits { + buffer.edit([range], text, cx); + } + let transaction = if buffer.end_transaction(cx).is_some() { + let transaction = + buffer.finalize_last_transaction().unwrap().clone(); + if !push_to_history { + buffer.forget_transaction(transaction.id); + } + Some(transaction) + } else { + None + }; + + transaction + }); + if let Some(transaction) = transaction { + project_transaction.0.insert(buffer_to_edit, transaction); + } + } + } + } + + Ok(project_transaction) + }) + } else if let Some(project_id) = self.remote_id() { + let client = self.client.clone(); + let request = proto::ApplyCodeAction { + project_id, + buffer_id: buffer_handle.read(cx).remote_id(), + action: Some(language::proto::serialize_code_action(&action)), + }; + cx.spawn(|this, mut cx| async move { + let response = client + .request(request) + .await? + .transaction + .ok_or_else(|| anyhow!("missing transaction"))?; + this.update(&mut cx, |this, cx| { + this.deserialize_project_transaction(response, push_to_history, cx) + }) + .await + }) } else { Task::ready(Err(anyhow!("project does not have a remote id"))) } @@ -1392,469 +2011,541 @@ impl Project { // RPC message handlers - fn handle_unshare_project( - &mut self, + async fn handle_unshare_project( + this: ModelHandle, _: TypedEnvelope, _: Arc, - cx: &mut ModelContext, + mut cx: AsyncAppContext, ) -> Result<()> { - if let ProjectClientState::Remote { - sharing_has_stopped, - .. - } = &mut self.client_state - { - *sharing_has_stopped = true; - self.collaborators.clear(); - cx.notify(); - Ok(()) - } else { - unreachable!() - } + this.update(&mut cx, |this, cx| { + if let ProjectClientState::Remote { + sharing_has_stopped, + .. + } = &mut this.client_state + { + *sharing_has_stopped = true; + this.collaborators.clear(); + cx.notify(); + } else { + unreachable!() + } + }); + + Ok(()) } - fn handle_add_collaborator( - &mut self, + async fn handle_add_collaborator( + this: ModelHandle, mut envelope: TypedEnvelope, _: Arc, - cx: &mut ModelContext, + mut cx: AsyncAppContext, ) -> Result<()> { - let user_store = self.user_store.clone(); + let user_store = this.read_with(&cx, |this, _| this.user_store.clone()); let collaborator = envelope .payload .collaborator .take() .ok_or_else(|| anyhow!("empty collaborator"))?; - cx.spawn(|this, mut cx| { - async move { - let collaborator = - Collaborator::from_proto(collaborator, &user_store, &mut cx).await?; - this.update(&mut cx, |this, cx| { - this.collaborators - .insert(collaborator.peer_id, collaborator); - cx.notify(); - }); - Ok(()) - } - .log_err() - }) - .detach(); + let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?; + this.update(&mut cx, |this, cx| { + this.collaborators + .insert(collaborator.peer_id, collaborator); + cx.notify(); + }); Ok(()) } - fn handle_remove_collaborator( - &mut self, + async fn handle_remove_collaborator( + this: ModelHandle, envelope: TypedEnvelope, _: Arc, - cx: &mut ModelContext, + mut cx: AsyncAppContext, ) -> Result<()> { - let peer_id = PeerId(envelope.payload.peer_id); - let replica_id = self - .collaborators - .remove(&peer_id) - .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))? - .replica_id; - self.shared_buffers.remove(&peer_id); - for (_, buffer) in &self.open_buffers { - if let Some(buffer) = buffer.upgrade(cx) { - buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx)); + this.update(&mut cx, |this, cx| { + let peer_id = PeerId(envelope.payload.peer_id); + let replica_id = this + .collaborators + .remove(&peer_id) + .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))? + .replica_id; + this.shared_buffers.remove(&peer_id); + for (_, buffer) in &this.open_buffers { + if let Some(buffer) = buffer.upgrade(cx) { + buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx)); + } } - } - cx.notify(); - Ok(()) + cx.notify(); + Ok(()) + }) } - fn handle_share_worktree( - &mut self, + async fn handle_share_worktree( + this: ModelHandle, envelope: TypedEnvelope, client: Arc, - cx: &mut ModelContext, + mut cx: AsyncAppContext, ) -> Result<()> { - let remote_id = self.remote_id().ok_or_else(|| anyhow!("invalid project"))?; - let replica_id = self.replica_id(); - let worktree = envelope - .payload - .worktree - .ok_or_else(|| anyhow!("invalid worktree"))?; - let (worktree, load_task) = Worktree::remote(remote_id, replica_id, worktree, client, cx); - self.add_worktree(&worktree, cx); - load_task.detach(); - Ok(()) + this.update(&mut cx, |this, cx| { + let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?; + let replica_id = this.replica_id(); + let worktree = envelope + .payload + .worktree + .ok_or_else(|| anyhow!("invalid worktree"))?; + let (worktree, load_task) = + Worktree::remote(remote_id, replica_id, worktree, client, cx); + this.add_worktree(&worktree, cx); + load_task.detach(); + Ok(()) + }) } - fn handle_unregister_worktree( - &mut self, + async fn handle_unregister_worktree( + this: ModelHandle, envelope: TypedEnvelope, _: Arc, - cx: &mut ModelContext, + mut cx: AsyncAppContext, ) -> Result<()> { - let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); - self.remove_worktree(worktree_id, cx); - Ok(()) + this.update(&mut cx, |this, cx| { + let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); + this.remove_worktree(worktree_id, cx); + Ok(()) + }) } - fn handle_update_worktree( - &mut self, + async fn handle_update_worktree( + this: ModelHandle, envelope: TypedEnvelope, _: Arc, - cx: &mut ModelContext, + mut cx: AsyncAppContext, ) -> Result<()> { - let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); - if let Some(worktree) = self.worktree_for_id(worktree_id, cx) { - worktree.update(cx, |worktree, cx| { - let worktree = worktree.as_remote_mut().unwrap(); - worktree.update_from_remote(envelope, cx) - })?; - } - Ok(()) + this.update(&mut cx, |this, cx| { + let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); + if let Some(worktree) = this.worktree_for_id(worktree_id, cx) { + worktree.update(cx, |worktree, cx| { + let worktree = worktree.as_remote_mut().unwrap(); + worktree.update_from_remote(envelope, cx) + })?; + } + Ok(()) + }) } - fn handle_update_diagnostic_summary( - &mut self, + async fn handle_update_diagnostic_summary( + this: ModelHandle, envelope: TypedEnvelope, _: Arc, - cx: &mut ModelContext, + mut cx: AsyncAppContext, ) -> Result<()> { - let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); - if let Some(worktree) = self.worktree_for_id(worktree_id, cx) { - if let Some(summary) = envelope.payload.summary { - let project_path = ProjectPath { - worktree_id, - path: Path::new(&summary.path).into(), - }; - worktree.update(cx, |worktree, _| { - worktree - .as_remote_mut() - .unwrap() - .update_diagnostic_summary(project_path.path.clone(), &summary); - }); - cx.emit(Event::DiagnosticsUpdated(project_path)); - } - } - Ok(()) + this.update(&mut cx, |this, cx| { + let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); + if let Some(worktree) = this.worktree_for_id(worktree_id, cx) { + if let Some(summary) = envelope.payload.summary { + let project_path = ProjectPath { + worktree_id, + path: Path::new(&summary.path).into(), + }; + worktree.update(cx, |worktree, _| { + worktree + .as_remote_mut() + .unwrap() + .update_diagnostic_summary(project_path.path.clone(), &summary); + }); + cx.emit(Event::DiagnosticsUpdated(project_path)); + } + } + Ok(()) + }) } - fn handle_disk_based_diagnostics_updating( - &mut self, + async fn handle_disk_based_diagnostics_updating( + this: ModelHandle, _: TypedEnvelope, _: Arc, - cx: &mut ModelContext, + mut cx: AsyncAppContext, ) -> Result<()> { - self.disk_based_diagnostics_started(cx); + this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx)); Ok(()) } - fn handle_disk_based_diagnostics_updated( - &mut self, + async fn handle_disk_based_diagnostics_updated( + this: ModelHandle, _: TypedEnvelope, _: Arc, - cx: &mut ModelContext, + mut cx: AsyncAppContext, ) -> Result<()> { - self.disk_based_diagnostics_finished(cx); + this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx)); Ok(()) } - pub fn handle_update_buffer( - &mut self, + async fn handle_update_buffer( + this: ModelHandle, envelope: TypedEnvelope, _: Arc, - cx: &mut ModelContext, + mut cx: AsyncAppContext, ) -> Result<()> { - let payload = envelope.payload.clone(); - let buffer_id = payload.buffer_id as usize; - let ops = payload - .operations - .into_iter() - .map(|op| language::proto::deserialize_operation(op)) - .collect::, _>>()?; - if let Some(buffer) = self.open_buffers.get_mut(&buffer_id) { - if let Some(buffer) = buffer.upgrade(cx) { - buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?; + this.update(&mut cx, |this, cx| { + let payload = envelope.payload.clone(); + let buffer_id = payload.buffer_id as usize; + let ops = payload + .operations + .into_iter() + .map(|op| language::proto::deserialize_operation(op)) + .collect::, _>>()?; + if let Some(buffer) = this.open_buffers.get_mut(&buffer_id) { + if let Some(buffer) = buffer.upgrade(cx) { + buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?; + } } - } - Ok(()) + Ok(()) + }) } - pub fn handle_update_buffer_file( - &mut self, + async fn handle_update_buffer_file( + this: ModelHandle, envelope: TypedEnvelope, _: Arc, - cx: &mut ModelContext, + mut cx: AsyncAppContext, ) -> Result<()> { - let payload = envelope.payload.clone(); - let buffer_id = payload.buffer_id as usize; - let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?; - let worktree = self - .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx) - .ok_or_else(|| anyhow!("no such worktree"))?; - let file = File::from_proto(file, worktree.clone(), cx)?; - let buffer = self - .open_buffers - .get_mut(&buffer_id) - .and_then(|b| b.upgrade(cx)) - .ok_or_else(|| anyhow!("no such buffer"))?; - buffer.update(cx, |buffer, cx| { - buffer.file_updated(Box::new(file), cx).detach(); - }); - - Ok(()) + this.update(&mut cx, |this, cx| { + let payload = envelope.payload.clone(); + let buffer_id = payload.buffer_id as usize; + let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?; + let worktree = this + .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx) + .ok_or_else(|| anyhow!("no such worktree"))?; + let file = File::from_proto(file, worktree.clone(), cx)?; + let buffer = this + .open_buffers + .get_mut(&buffer_id) + .and_then(|b| b.upgrade(cx)) + .ok_or_else(|| anyhow!("no such buffer"))?; + buffer.update(cx, |buffer, cx| { + buffer.file_updated(Box::new(file), cx).detach(); + }); + Ok(()) + }) } - pub fn handle_save_buffer( - &mut self, + async fn handle_save_buffer( + this: ModelHandle, envelope: TypedEnvelope, - rpc: Arc, - cx: &mut ModelContext, - ) -> Result<()> { - let sender_id = envelope.original_sender_id()?; - let project_id = self.remote_id().ok_or_else(|| anyhow!("not connected"))?; - let buffer = self - .shared_buffers - .get(&sender_id) - .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned()) - .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?; - let receipt = envelope.receipt(); + _: Arc, + mut cx: AsyncAppContext, + ) -> Result { let buffer_id = envelope.payload.buffer_id; - let save = cx.spawn(|_, mut cx| async move { - buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await - }); - - cx.background() - .spawn( - async move { - let (version, mtime) = save.await?; - - rpc.respond( - receipt, - proto::BufferSaved { - project_id, - buffer_id, - version: (&version).into(), - mtime: Some(mtime.into()), - }, - )?; + let sender_id = envelope.original_sender_id()?; + let (project_id, save) = this.update(&mut cx, |this, cx| { + let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?; + let buffer = this + .shared_buffers + .get(&sender_id) + .and_then(|shared_buffers| shared_buffers.get(&buffer_id).cloned()) + .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?; + Ok::<_, anyhow::Error>((project_id, buffer.update(cx, |buffer, cx| buffer.save(cx)))) + })?; - Ok(()) - } - .log_err(), - ) - .detach(); - Ok(()) + let (version, mtime) = save.await?; + Ok(proto::BufferSaved { + project_id, + buffer_id, + version: (&version).into(), + mtime: Some(mtime.into()), + }) } - pub fn handle_format_buffer( - &mut self, - envelope: TypedEnvelope, - rpc: Arc, - cx: &mut ModelContext, - ) -> Result<()> { - let receipt = envelope.receipt(); + async fn handle_format_buffers( + this: ModelHandle, + envelope: TypedEnvelope, + _: Arc, + mut cx: AsyncAppContext, + ) -> Result { let sender_id = envelope.original_sender_id()?; - let buffer = self - .shared_buffers - .get(&sender_id) - .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned()) - .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?; - cx.spawn(|_, mut cx| async move { - let format = buffer.update(&mut cx, |buffer, cx| buffer.format(cx)).await; - // We spawn here in order to enqueue the sending of `Ack` *after* transmission of edits - // associated with formatting. - cx.spawn(|_| async move { - match format { - Ok(()) => rpc.respond(receipt, proto::Ack {})?, - Err(error) => rpc.respond_with_error( - receipt, - proto::Error { - message: error.to_string(), - }, - )?, - } - Ok::<_, anyhow::Error>(()) - }) - .await - .log_err(); + let format = this.update(&mut cx, |this, cx| { + let shared_buffers = this + .shared_buffers + .get(&sender_id) + .ok_or_else(|| anyhow!("peer has no buffers"))?; + let mut buffers = HashSet::default(); + for buffer_id in &envelope.payload.buffer_ids { + buffers.insert( + shared_buffers + .get(buffer_id) + .cloned() + .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?, + ); + } + Ok::<_, anyhow::Error>(this.format(buffers, false, cx)) + })?; + + let project_transaction = format.await?; + let project_transaction = this.update(&mut cx, |this, cx| { + this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx) + }); + Ok(proto::FormatBuffersResponse { + transaction: Some(project_transaction), }) - .detach(); - Ok(()) } - fn handle_get_completions( - &mut self, + async fn handle_get_completions( + this: ModelHandle, envelope: TypedEnvelope, - rpc: Arc, - cx: &mut ModelContext, - ) -> Result<()> { - let receipt = envelope.receipt(); + _: Arc, + mut cx: AsyncAppContext, + ) -> Result { let sender_id = envelope.original_sender_id()?; - let buffer = self - .shared_buffers - .get(&sender_id) - .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned()) - .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?; let position = envelope .payload .position .and_then(language::proto::deserialize_anchor) .ok_or_else(|| anyhow!("invalid position"))?; - cx.spawn(|_, mut cx| async move { - match buffer - .update(&mut cx, |buffer, cx| buffer.completions(position, cx)) - .await - { - Ok(completions) => rpc.respond( - receipt, - proto::GetCompletionsResponse { - completions: completions - .iter() - .map(language::proto::serialize_completion) - .collect(), - }, - ), - Err(error) => rpc.respond_with_error( - receipt, - proto::Error { - message: error.to_string(), - }, - ), - } + let completions = this.update(&mut cx, |this, cx| { + let buffer = this + .shared_buffers + .get(&sender_id) + .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned()) + .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?; + Ok::<_, anyhow::Error>(this.completions(&buffer, position, cx)) + })?; + + Ok(proto::GetCompletionsResponse { + completions: completions + .await? + .iter() + .map(language::proto::serialize_completion) + .collect(), }) - .detach_and_log_err(cx); - Ok(()) } - fn handle_apply_additional_edits_for_completion( - &mut self, + async fn handle_apply_additional_edits_for_completion( + this: ModelHandle, envelope: TypedEnvelope, - rpc: Arc, - cx: &mut ModelContext, - ) -> Result<()> { - let receipt = envelope.receipt(); + _: Arc, + mut cx: AsyncAppContext, + ) -> Result { + let sender_id = envelope.original_sender_id()?; + let apply_additional_edits = this.update(&mut cx, |this, cx| { + let buffer = this + .shared_buffers + .get(&sender_id) + .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned()) + .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?; + let language = buffer.read(cx).language(); + let completion = language::proto::deserialize_completion( + envelope + .payload + .completion + .ok_or_else(|| anyhow!("invalid completion"))?, + language, + )?; + Ok::<_, anyhow::Error>( + this.apply_additional_edits_for_completion(buffer, completion, false, cx), + ) + })?; + + Ok(proto::ApplyCompletionAdditionalEditsResponse { + transaction: apply_additional_edits + .await? + .as_ref() + .map(language::proto::serialize_transaction), + }) + } + + async fn handle_get_code_actions( + this: ModelHandle, + envelope: TypedEnvelope, + _: Arc, + mut cx: AsyncAppContext, + ) -> Result { + let sender_id = envelope.original_sender_id()?; + let start = envelope + .payload + .start + .and_then(language::proto::deserialize_anchor) + .ok_or_else(|| anyhow!("invalid start"))?; + let end = envelope + .payload + .end + .and_then(language::proto::deserialize_anchor) + .ok_or_else(|| anyhow!("invalid end"))?; + let code_actions = this.update(&mut cx, |this, cx| { + let buffer = this + .shared_buffers + .get(&sender_id) + .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned()) + .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?; + Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx)) + })?; + + Ok(proto::GetCodeActionsResponse { + actions: code_actions + .await? + .iter() + .map(language::proto::serialize_code_action) + .collect(), + }) + } + + async fn handle_apply_code_action( + this: ModelHandle, + envelope: TypedEnvelope, + _: Arc, + mut cx: AsyncAppContext, + ) -> Result { let sender_id = envelope.original_sender_id()?; - let buffer = self - .shared_buffers - .get(&sender_id) - .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned()) - .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?; - let language = buffer.read(cx).language(); - let completion = language::proto::deserialize_completion( + let action = language::proto::deserialize_code_action( envelope .payload - .completion - .ok_or_else(|| anyhow!("invalid position"))?, - language, + .action + .ok_or_else(|| anyhow!("invalid action"))?, )?; - cx.spawn(|_, mut cx| async move { - match buffer - .update(&mut cx, |buffer, cx| { - buffer.apply_additional_edits_for_completion(completion, false, cx) - }) - .await - { - Ok(edit_ids) => rpc.respond( - receipt, - proto::ApplyCompletionAdditionalEditsResponse { - additional_edits: edit_ids - .into_iter() - .map(|edit_id| proto::AdditionalEdit { - replica_id: edit_id.replica_id as u32, - local_timestamp: edit_id.value, - }) - .collect(), - }, - ), - Err(error) => rpc.respond_with_error( - receipt, - proto::Error { - message: error.to_string(), - }, - ), - } + let apply_code_action = this.update(&mut cx, |this, cx| { + let buffer = this + .shared_buffers + .get(&sender_id) + .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned()) + .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?; + Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx)) + })?; + + let project_transaction = apply_code_action.await?; + let project_transaction = this.update(&mut cx, |this, cx| { + this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx) + }); + Ok(proto::ApplyCodeActionResponse { + transaction: Some(project_transaction), }) - .detach_and_log_err(cx); - Ok(()) } - pub fn handle_get_definition( - &mut self, + async fn handle_get_definition( + this: ModelHandle, envelope: TypedEnvelope, - rpc: Arc, - cx: &mut ModelContext, - ) -> Result<()> { - let receipt = envelope.receipt(); + _: Arc, + mut cx: AsyncAppContext, + ) -> Result { let sender_id = envelope.original_sender_id()?; - let source_buffer = self - .shared_buffers - .get(&sender_id) - .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned()) - .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?; let position = envelope .payload .position .and_then(deserialize_anchor) .ok_or_else(|| anyhow!("invalid position"))?; - if !source_buffer.read(cx).can_resolve(&position) { - return Err(anyhow!("cannot resolve position")); - } + let definitions = this.update(&mut cx, |this, cx| { + let source_buffer = this + .shared_buffers + .get(&sender_id) + .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned()) + .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?; + if source_buffer.read(cx).can_resolve(&position) { + Ok(this.definition(&source_buffer, position, cx)) + } else { + Err(anyhow!("cannot resolve position")) + } + })?; - let definitions = self.definition(&source_buffer, position, cx); - cx.spawn(|this, mut cx| async move { - let definitions = definitions.await?; + let definitions = definitions.await?; + + this.update(&mut cx, |this, cx| { let mut response = proto::GetDefinitionResponse { definitions: Default::default(), }; - this.update(&mut cx, |this, cx| { - for definition in definitions { - let buffer = - this.serialize_buffer_for_peer(&definition.target_buffer, sender_id, cx); - response.definitions.push(proto::Definition { - target_start: Some(serialize_anchor(&definition.target_range.start)), - target_end: Some(serialize_anchor(&definition.target_range.end)), - buffer: Some(buffer), - }); - } - }); - rpc.respond(receipt, response)?; - Ok::<_, anyhow::Error>(()) + for definition in definitions { + let buffer = + this.serialize_buffer_for_peer(&definition.target_buffer, sender_id, cx); + response.definitions.push(proto::Definition { + target_start: Some(serialize_anchor(&definition.target_range.start)), + target_end: Some(serialize_anchor(&definition.target_range.end)), + buffer: Some(buffer), + }); + } + Ok(response) }) - .detach_and_log_err(cx); - - Ok(()) } - pub fn handle_open_buffer( - &mut self, + async fn handle_open_buffer( + this: ModelHandle, envelope: TypedEnvelope, - rpc: Arc, - cx: &mut ModelContext, - ) -> anyhow::Result<()> { - let receipt = envelope.receipt(); + _: Arc, + mut cx: AsyncAppContext, + ) -> anyhow::Result { let peer_id = envelope.original_sender_id()?; let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); - let open_buffer = self.open_buffer( - ProjectPath { - worktree_id, - path: PathBuf::from(envelope.payload.path).into(), - }, - cx, - ); - cx.spawn(|this, mut cx| { - async move { - let buffer = open_buffer.await?; - let buffer = this.update(&mut cx, |this, cx| { - this.serialize_buffer_for_peer(&buffer, peer_id, cx) - }); - rpc.respond( - receipt, - proto::OpenBufferResponse { - buffer: Some(buffer), - }, - ) + let open_buffer = this.update(&mut cx, |this, cx| { + this.open_buffer( + ProjectPath { + worktree_id, + path: PathBuf::from(envelope.payload.path).into(), + }, + cx, + ) + }); + + let buffer = open_buffer.await?; + this.update(&mut cx, |this, cx| { + Ok(proto::OpenBufferResponse { + buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)), + }) + }) + } + + fn serialize_project_transaction_for_peer( + &mut self, + project_transaction: ProjectTransaction, + peer_id: PeerId, + cx: &AppContext, + ) -> proto::ProjectTransaction { + let mut serialized_transaction = proto::ProjectTransaction { + buffers: Default::default(), + transactions: Default::default(), + }; + for (buffer, transaction) in project_transaction.0 { + serialized_transaction + .buffers + .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx)); + serialized_transaction + .transactions + .push(language::proto::serialize_transaction(&transaction)); + } + serialized_transaction + } + + fn deserialize_project_transaction( + &mut self, + message: proto::ProjectTransaction, + push_to_history: bool, + cx: &mut ModelContext, + ) -> Task> { + let mut project_transaction = ProjectTransaction::default(); + for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) { + let buffer = match self.deserialize_buffer(buffer, cx) { + Ok(buffer) => buffer, + Err(error) => return Task::ready(Err(error)), + }; + let transaction = match language::proto::deserialize_transaction(transaction) { + Ok(transaction) => transaction, + Err(error) => return Task::ready(Err(error)), + }; + project_transaction.0.insert(buffer, transaction); + } + + cx.spawn_weak(|_, mut cx| async move { + for (buffer, transaction) in &project_transaction.0 { + buffer + .update(&mut cx, |buffer, _| { + buffer.wait_for_edits(transaction.edit_ids.iter().copied()) + }) + .await; + + if push_to_history { + buffer.update(&mut cx, |buffer, _| { + buffer.push_transaction(transaction.clone(), Instant::now()); + }); + } } - .log_err() + + Ok(project_transaction) }) - .detach(); - Ok(()) } fn serialize_buffer_for_peer( @@ -1878,7 +2569,7 @@ impl Project { } } - fn deserialize_remote_buffer( + fn deserialize_buffer( &mut self, buffer: proto::Buffer, cx: &mut ModelContext, @@ -1911,67 +2602,74 @@ impl Project { } } - pub fn handle_close_buffer( - &mut self, + async fn handle_close_buffer( + this: ModelHandle, envelope: TypedEnvelope, _: Arc, - cx: &mut ModelContext, + mut cx: AsyncAppContext, ) -> anyhow::Result<()> { - if let Some(shared_buffers) = self.shared_buffers.get_mut(&envelope.original_sender_id()?) { - shared_buffers.remove(&envelope.payload.buffer_id); - cx.notify(); - } - Ok(()) + this.update(&mut cx, |this, cx| { + if let Some(shared_buffers) = + this.shared_buffers.get_mut(&envelope.original_sender_id()?) + { + shared_buffers.remove(&envelope.payload.buffer_id); + cx.notify(); + } + Ok(()) + }) } - pub fn handle_buffer_saved( - &mut self, + async fn handle_buffer_saved( + this: ModelHandle, envelope: TypedEnvelope, _: Arc, - cx: &mut ModelContext, + mut cx: AsyncAppContext, ) -> Result<()> { - let payload = envelope.payload.clone(); - let buffer = self - .open_buffers - .get(&(payload.buffer_id as usize)) - .and_then(|buffer| buffer.upgrade(cx)); - if let Some(buffer) = buffer { - buffer.update(cx, |buffer, cx| { - let version = payload.version.try_into()?; - let mtime = payload - .mtime - .ok_or_else(|| anyhow!("missing mtime"))? - .into(); - buffer.did_save(version, mtime, None, cx); - Result::<_, anyhow::Error>::Ok(()) - })?; - } - Ok(()) + let version = envelope.payload.version.try_into()?; + let mtime = envelope + .payload + .mtime + .ok_or_else(|| anyhow!("missing mtime"))? + .into(); + + this.update(&mut cx, |this, cx| { + let buffer = this + .open_buffers + .get(&(envelope.payload.buffer_id as usize)) + .and_then(|buffer| buffer.upgrade(cx)); + if let Some(buffer) = buffer { + buffer.update(cx, |buffer, cx| { + buffer.did_save(version, mtime, None, cx); + }); + } + Ok(()) + }) } - pub fn handle_buffer_reloaded( - &mut self, + async fn handle_buffer_reloaded( + this: ModelHandle, envelope: TypedEnvelope, _: Arc, - cx: &mut ModelContext, + mut cx: AsyncAppContext, ) -> Result<()> { let payload = envelope.payload.clone(); - let buffer = self - .open_buffers - .get(&(payload.buffer_id as usize)) - .and_then(|buffer| buffer.upgrade(cx)); - if let Some(buffer) = buffer { - buffer.update(cx, |buffer, cx| { - let version = payload.version.try_into()?; - let mtime = payload - .mtime - .ok_or_else(|| anyhow!("missing mtime"))? - .into(); - buffer.did_reload(version, mtime, cx); - Result::<_, anyhow::Error>::Ok(()) - })?; - } - Ok(()) + let version = payload.version.try_into()?; + let mtime = payload + .mtime + .ok_or_else(|| anyhow!("missing mtime"))? + .into(); + this.update(&mut cx, |this, cx| { + let buffer = this + .open_buffers + .get(&(payload.buffer_id as usize)) + .and_then(|buffer| buffer.upgrade(cx)); + if let Some(buffer) = buffer { + buffer.update(cx, |buffer, cx| { + buffer.did_reload(version, mtime, cx); + }); + } + Ok(()) + }) } pub fn match_paths<'a>( @@ -2151,13 +2849,40 @@ impl> From<(WorktreeId, P)> for ProjectPath { } } +impl From for fs::CreateOptions { + fn from(options: lsp::CreateFileOptions) -> Self { + Self { + overwrite: options.overwrite.unwrap_or(false), + ignore_if_exists: options.ignore_if_exists.unwrap_or(false), + } + } +} + +impl From for fs::RenameOptions { + fn from(options: lsp::RenameFileOptions) -> Self { + Self { + overwrite: options.overwrite.unwrap_or(false), + ignore_if_exists: options.ignore_if_exists.unwrap_or(false), + } + } +} + +impl From for fs::RemoveOptions { + fn from(options: lsp::DeleteFileOptions) -> Self { + Self { + recursive: options.recursive.unwrap_or(false), + ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false), + } + } +} + #[cfg(test)] mod tests { use super::{Event, *}; use client::test::FakeHttpClient; use fs::RealFs; use futures::StreamExt; - use gpui::{test::subscribe, TestAppContext}; + use gpui::test::subscribe; use language::{ tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageRegistry, LanguageServerConfig, Point, @@ -2194,7 +2919,7 @@ mod tests { ) .unwrap(); - let project = build_project(Arc::new(RealFs), &mut cx); + let project = Project::test(Arc::new(RealFs), &mut cx); let (tree, _) = project .update(&mut cx, |project, cx| { @@ -2234,8 +2959,7 @@ mod tests { #[gpui::test] async fn test_language_server_diagnostics(mut cx: gpui::TestAppContext) { - let (language_server_config, mut fake_server) = - LanguageServerConfig::fake(cx.background()).await; + let (language_server_config, mut fake_server) = LanguageServerConfig::fake(&cx).await; let progress_token = language_server_config .disk_based_diagnostics_progress_token .clone() @@ -2376,7 +3100,7 @@ mod tests { } })); - let project = build_project(Arc::new(RealFs), &mut cx); + let project = Project::test(Arc::new(RealFs), &mut cx); let (tree, _) = project .update(&mut cx, |project, cx| { project.find_or_create_local_worktree(&dir.path(), false, cx) @@ -2399,8 +3123,7 @@ mod tests { #[gpui::test] async fn test_definition(mut cx: gpui::TestAppContext) { - let (language_server_config, mut fake_server) = - LanguageServerConfig::fake(cx.background()).await; + let (language_server_config, mut fake_server) = LanguageServerConfig::fake(&cx).await; let mut languages = LanguageRegistry::new(); languages.add(Arc::new(Language::new( @@ -2417,6 +3140,7 @@ mod tests { "a.rs": "const fn a() { A }", "b.rs": "const y: i32 = crate::a()", })); + let dir_path = dir.path().to_path_buf(); let http_client = FakeHttpClient::with_404_response(); let client = Client::new(http_client.clone()); @@ -2441,7 +3165,6 @@ mod tests { cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) .await; - // Cause worktree to start the fake language server let buffer = project .update(&mut cx, |project, cx| { project.open_buffer( @@ -2454,28 +3177,26 @@ mod tests { }) .await .unwrap(); - let definitions = - project.update(&mut cx, |project, cx| project.definition(&buffer, 22, cx)); - let (request_id, request) = fake_server - .receive_request::() - .await; - let request_params = request.text_document_position_params; - assert_eq!( - request_params.text_document.uri.to_file_path().unwrap(), - dir.path().join("b.rs") - ); - assert_eq!(request_params.position, lsp::Position::new(0, 22)); - fake_server - .respond( - request_id, - Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new( - lsp::Url::from_file_path(dir.path().join("a.rs")).unwrap(), - lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)), - ))), - ) - .await; - let mut definitions = definitions.await.unwrap(); + fake_server.handle_request::(move |params| { + let params = params.text_document_position_params; + assert_eq!( + params.text_document.uri.to_file_path().unwrap(), + dir_path.join("b.rs") + ); + assert_eq!(params.position, lsp::Position::new(0, 22)); + + Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new( + lsp::Url::from_file_path(dir_path.join("a.rs")).unwrap(), + lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)), + ))) + }); + + let mut definitions = project + .update(&mut cx, |project, cx| project.definition(&buffer, 22, cx)) + .await + .unwrap(); + assert_eq!(definitions.len(), 1); let definition = definitions.pop().unwrap(); cx.update(|cx| { @@ -2533,7 +3254,7 @@ mod tests { ) .await; - let project = build_project(fs.clone(), &mut cx); + let project = Project::test(fs.clone(), &mut cx); let worktree_id = project .update(&mut cx, |p, cx| { p.find_or_create_local_worktree("/dir", false, cx) @@ -2571,7 +3292,7 @@ mod tests { ) .await; - let project = build_project(fs.clone(), &mut cx); + let project = Project::test(fs.clone(), &mut cx); let worktree_id = project .update(&mut cx, |p, cx| { p.find_or_create_local_worktree("/dir/file1", false, cx) @@ -2613,7 +3334,7 @@ mod tests { } })); - let project = build_project(Arc::new(RealFs), &mut cx); + let project = Project::test(Arc::new(RealFs), &mut cx); let rpc = project.read_with(&cx, |p, _| p.client.clone()); let (tree, _) = project @@ -2759,7 +3480,7 @@ mod tests { ) .await; - let project = build_project(fs.clone(), &mut cx); + let project = Project::test(fs.clone(), &mut cx); let worktree_id = project .update(&mut cx, |p, cx| { p.find_or_create_local_worktree("/the-dir", false, cx) @@ -2809,7 +3530,7 @@ mod tests { "file3": "ghi", })); - let project = build_project(Arc::new(RealFs), &mut cx); + let project = Project::test(Arc::new(RealFs), &mut cx); let (worktree, _) = project .update(&mut cx, |p, cx| { p.find_or_create_local_worktree(dir.path(), false, cx) @@ -2943,7 +3664,7 @@ mod tests { let initial_contents = "aaa\nbbbbb\nc\n"; let dir = temp_tree(json!({ "the-file": initial_contents })); - let project = build_project(Arc::new(RealFs), &mut cx); + let project = Project::test(Arc::new(RealFs), &mut cx); let (worktree, _) = project .update(&mut cx, |p, cx| { p.find_or_create_local_worktree(dir.path(), false, cx) @@ -3054,7 +3775,7 @@ mod tests { ) .await; - let project = build_project(fs.clone(), &mut cx); + let project = Project::test(fs.clone(), &mut cx); let (worktree, _) = project .update(&mut cx, |p, cx| { p.find_or_create_local_worktree("/the-dir", false, cx) @@ -3294,12 +4015,4 @@ mod tests { ] ); } - - fn build_project(fs: Arc, cx: &mut TestAppContext) -> ModelHandle { - let languages = Arc::new(LanguageRegistry::new()); - let http_client = FakeHttpClient::with_404_response(); - let client = client::Client::new(http_client.clone()); - let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx)); - cx.update(|cx| Project::local(client, user_store, languages, fs, cx)) - } } diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 643c26aa719855a6b3d34647ce6679b653d8ed69..79e3a7e528b0b310978d46cb26f64f5bf2be546d 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -14,9 +14,7 @@ use gpui::{ executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, }; -use language::{ - Anchor, Buffer, Completion, DiagnosticEntry, Language, Operation, PointUtf16, Rope, -}; +use language::{Buffer, DiagnosticEntry, Operation, PointUtf16, Rope}; use lazy_static::lazy_static; use parking_lot::Mutex; use postage::{ @@ -293,7 +291,7 @@ impl Worktree { let this = worktree_handle.downgrade(); cx.spawn(|mut cx| async move { while let Some(_) = snapshot_rx.recv().await { - if let Some(this) = cx.read(|cx| this.upgrade(cx)) { + if let Some(this) = this.upgrade(&cx) { this.update(&mut cx, |this, cx| this.poll_snapshot(cx)); } else { break; @@ -518,7 +516,7 @@ impl LocalWorktree { cx.spawn_weak(|this, mut cx| async move { while let Ok(scan_state) = scan_states_rx.recv().await { - if let Some(handle) = cx.read(|cx| this.upgrade(cx)) { + if let Some(handle) = this.upgrade(&cx) { let to_send = handle.update(&mut cx, |this, cx| { last_scan_state_tx.blocking_send(scan_state).ok(); this.poll_snapshot(cx); @@ -820,7 +818,7 @@ impl RemoteWorktree { ) -> Result<()> { let mut tx = self.updates_tx.clone(); let payload = envelope.payload.clone(); - cx.background() + cx.foreground() .spawn(async move { tx.send(payload).await.expect("receiver runs to completion"); }) @@ -1387,96 +1385,6 @@ impl language::File for File { }) } - fn format_remote( - &self, - buffer_id: u64, - cx: &mut MutableAppContext, - ) -> Option>> { - let worktree = self.worktree.read(cx); - let worktree = worktree.as_remote()?; - let rpc = worktree.client.clone(); - let project_id = worktree.project_id; - Some(cx.foreground().spawn(async move { - rpc.request(proto::FormatBuffer { - project_id, - buffer_id, - }) - .await?; - Ok(()) - })) - } - - fn completions( - &self, - buffer_id: u64, - position: Anchor, - language: Option>, - cx: &mut MutableAppContext, - ) -> Task>>> { - let worktree = self.worktree.read(cx); - let worktree = if let Some(worktree) = worktree.as_remote() { - worktree - } else { - return Task::ready(Err(anyhow!( - "remote completions requested on a local worktree" - ))); - }; - let rpc = worktree.client.clone(); - let project_id = worktree.project_id; - cx.foreground().spawn(async move { - let response = rpc - .request(proto::GetCompletions { - project_id, - buffer_id, - position: Some(language::proto::serialize_anchor(&position)), - }) - .await?; - response - .completions - .into_iter() - .map(|completion| { - language::proto::deserialize_completion(completion, language.as_ref()) - }) - .collect() - }) - } - - fn apply_additional_edits_for_completion( - &self, - buffer_id: u64, - completion: Completion, - cx: &mut MutableAppContext, - ) -> Task>> { - let worktree = self.worktree.read(cx); - let worktree = if let Some(worktree) = worktree.as_remote() { - worktree - } else { - return Task::ready(Err(anyhow!( - "remote additional edits application requested on a local worktree" - ))); - }; - let rpc = worktree.client.clone(); - let project_id = worktree.project_id; - cx.foreground().spawn(async move { - let response = rpc - .request(proto::ApplyCompletionAdditionalEdits { - project_id, - buffer_id, - completion: Some(language::proto::serialize_completion(&completion)), - }) - .await?; - - Ok(response - .additional_edits - .into_iter() - .map(|edit| clock::Local { - replica_id: edit.replica_id as ReplicaId, - value: edit.local_timestamp, - }) - .collect()) - }) - } - fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext) { self.worktree.update(cx, |worktree, cx| { worktree.send_buffer_update(buffer_id, operation, cx); @@ -2216,7 +2124,7 @@ struct UpdateIgnoreStatusJob { } pub trait WorktreeHandle { - #[cfg(test)] + #[cfg(any(test, feature = "test-support"))] fn flush_fs_events<'a>( &self, cx: &'a gpui::TestAppContext, @@ -2230,7 +2138,7 @@ impl WorktreeHandle for ModelHandle { // // This function mutates the worktree's directory and waits for those mutations to be picked up, // to ensure that all redundant FS events have already been processed. - #[cfg(test)] + #[cfg(any(test, feature = "test-support"))] fn flush_fs_events<'a>( &self, cx: &'a gpui::TestAppContext, @@ -2238,14 +2146,22 @@ impl WorktreeHandle for ModelHandle { use smol::future::FutureExt; let filename = "fs-event-sentinel"; - let root_path = cx.read(|cx| self.read(cx).as_local().unwrap().abs_path().clone()); let tree = self.clone(); + let (fs, root_path) = self.read_with(cx, |tree, _| { + let tree = tree.as_local().unwrap(); + (tree.fs.clone(), tree.abs_path().clone()) + }); + async move { - std::fs::write(root_path.join(filename), "").unwrap(); + fs.create_file(&root_path.join(filename), Default::default()) + .await + .unwrap(); tree.condition(&cx, |tree, _| tree.entry_for_path(filename).is_some()) .await; - std::fs::remove_file(root_path.join(filename)).unwrap(); + fs.remove_file(&root_path.join(filename), Default::default()) + .await + .unwrap(); tree.condition(&cx, |tree, _| tree.entry_for_path(filename).is_none()) .await; diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index 2f2364fc245d5d8534c0dbe842624869e8beb46d..cc28e507e97ab517f632bd4b303c082bf0722708 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -39,27 +39,32 @@ message Envelope { SaveBuffer save_buffer = 31; BufferSaved buffer_saved = 32; BufferReloaded buffer_reloaded = 33; - FormatBuffer format_buffer = 34; - GetCompletions get_completions = 35; - GetCompletionsResponse get_completions_response = 36; - ApplyCompletionAdditionalEdits apply_completion_additional_edits = 37; - ApplyCompletionAdditionalEditsResponse apply_completion_additional_edits_response = 38; - - GetChannels get_channels = 39; - GetChannelsResponse get_channels_response = 40; - JoinChannel join_channel = 41; - JoinChannelResponse join_channel_response = 42; - LeaveChannel leave_channel = 43; - SendChannelMessage send_channel_message = 44; - SendChannelMessageResponse send_channel_message_response = 45; - ChannelMessageSent channel_message_sent = 46; - GetChannelMessages get_channel_messages = 47; - GetChannelMessagesResponse get_channel_messages_response = 48; - - UpdateContacts update_contacts = 49; - - GetUsers get_users = 50; - GetUsersResponse get_users_response = 51; + FormatBuffers format_buffers = 34; + FormatBuffersResponse format_buffers_response = 35; + GetCompletions get_completions = 36; + GetCompletionsResponse get_completions_response = 37; + ApplyCompletionAdditionalEdits apply_completion_additional_edits = 38; + ApplyCompletionAdditionalEditsResponse apply_completion_additional_edits_response = 39; + GetCodeActions get_code_actions = 40; + GetCodeActionsResponse get_code_actions_response = 41; + ApplyCodeAction apply_code_action = 42; + ApplyCodeActionResponse apply_code_action_response = 43; + + GetChannels get_channels = 44; + GetChannelsResponse get_channels_response = 45; + JoinChannel join_channel = 46; + JoinChannelResponse join_channel_response = 47; + LeaveChannel leave_channel = 48; + SendChannelMessage send_channel_message = 49; + SendChannelMessageResponse send_channel_message_response = 50; + ChannelMessageSent channel_message_sent = 51; + GetChannelMessages get_channel_messages = 52; + GetChannelMessagesResponse get_channel_messages_response = 53; + + UpdateContacts update_contacts = 54; + + GetUsers get_users = 55; + GetUsersResponse get_users_response = 56; } } @@ -202,9 +207,13 @@ message BufferReloaded { Timestamp mtime = 4; } -message FormatBuffer { +message FormatBuffers { uint64 project_id = 1; - uint64 buffer_id = 2; + repeated uint64 buffer_ids = 2; +} + +message FormatBuffersResponse { + ProjectTransaction transaction = 1; } message GetCompletions { @@ -224,12 +233,7 @@ message ApplyCompletionAdditionalEdits { } message ApplyCompletionAdditionalEditsResponse { - repeated AdditionalEdit additional_edits = 1; -} - -message AdditionalEdit { - uint32 replica_id = 1; - uint32 local_timestamp = 2; + Transaction transaction = 1; } message Completion { @@ -239,6 +243,51 @@ message Completion { bytes lsp_completion = 4; } +message GetCodeActions { + uint64 project_id = 1; + uint64 buffer_id = 2; + Anchor start = 3; + Anchor end = 4; +} + +message GetCodeActionsResponse { + repeated CodeAction actions = 1; +} + +message ApplyCodeAction { + uint64 project_id = 1; + uint64 buffer_id = 2; + CodeAction action = 3; +} + +message ApplyCodeActionResponse { + ProjectTransaction transaction = 1; +} + +message CodeAction { + Anchor start = 1; + Anchor end = 2; + bytes lsp_action = 3; +} + +message ProjectTransaction { + repeated Buffer buffers = 1; + repeated Transaction transactions = 2; +} + +message Transaction { + LocalTimestamp id = 1; + repeated LocalTimestamp edit_ids = 2; + repeated VectorClockEntry start = 3; + repeated VectorClockEntry end = 4; + repeated Range ranges = 5; +} + +message LocalTimestamp { + uint32 replica_id = 1; + uint32 value = 2; +} + message UpdateDiagnosticSummary { uint64 project_id = 1; uint64 worktree_id = 2; @@ -366,16 +415,11 @@ message Buffer { message BufferState { uint64 id = 1; optional File file = 2; - string visible_text = 3; - string deleted_text = 4; - repeated BufferFragment fragments = 5; - repeated UndoMapEntry undo_map = 6; - repeated VectorClockEntry version = 7; - repeated SelectionSet selections = 8; - repeated Diagnostic diagnostics = 9; - uint32 lamport_timestamp = 10; - repeated Operation deferred_operations = 11; - repeated string completion_triggers = 12; + string base_text = 3; + repeated Operation operations = 4; + repeated SelectionSet selections = 5; + repeated Diagnostic diagnostics = 6; + repeated string completion_triggers = 7; } message BufferFragment { @@ -474,7 +518,9 @@ message Operation { } message UpdateCompletionTriggers { - repeated string triggers = 1; + uint32 replica_id = 1; + uint32 lamport_timestamp = 2; + repeated string triggers = 3; } } diff --git a/crates/rpc/src/peer.rs b/crates/rpc/src/peer.rs index 77e9bb4db4d1d8ffc33faa30016b9c92a6276432..0a614e0bed4cab518418c42b4e12b28b3f69b08c 100644 --- a/crates/rpc/src/peer.rs +++ b/crates/rpc/src/peer.rs @@ -179,7 +179,16 @@ impl Peer { let channel = response_channels.lock().as_mut()?.remove(&responding_to); if let Some(mut tx) = channel { let mut requester_resumed = barrier::channel(); - tx.send((incoming, requester_resumed.0)).await.ok(); + if let Err(error) = tx.send((incoming, requester_resumed.0)).await { + log::debug!( + "received RPC but request future was dropped {:?}", + error.0 .0 + ); + } + // Drop response channel before awaiting on the barrier. This allows the + // barrier to get dropped even if the request's future is dropped before it + // has a chance to observe the response. + drop(tx); requester_resumed.1.recv().await; } else { log::warn!("received RPC response to unknown request {}", responding_to); @@ -337,7 +346,7 @@ mod tests { use async_tungstenite::tungstenite::Message as WebSocketMessage; use gpui::TestAppContext; - #[gpui::test(iterations = 10)] + #[gpui::test(iterations = 50)] async fn test_request_response(cx: TestAppContext) { let executor = cx.foreground(); @@ -478,7 +487,7 @@ mod tests { } } - #[gpui::test(iterations = 10)] + #[gpui::test(iterations = 50)] async fn test_order_of_response_and_incoming(cx: TestAppContext) { let executor = cx.foreground(); let server = Peer::new(); @@ -576,7 +585,119 @@ mod tests { ); } - #[gpui::test(iterations = 10)] + #[gpui::test(iterations = 50)] + async fn test_dropping_request_before_completion(cx: TestAppContext) { + let executor = cx.foreground(); + let server = Peer::new(); + let client = Peer::new(); + + let (client_to_server_conn, server_to_client_conn, _) = + Connection::in_memory(cx.background()); + let (client_to_server_conn_id, io_task1, mut client_incoming) = + client.add_connection(client_to_server_conn).await; + let (server_to_client_conn_id, io_task2, mut server_incoming) = + server.add_connection(server_to_client_conn).await; + + executor.spawn(io_task1).detach(); + executor.spawn(io_task2).detach(); + + executor + .spawn(async move { + let request1 = server_incoming + .next() + .await + .unwrap() + .into_any() + .downcast::>() + .unwrap(); + let request2 = server_incoming + .next() + .await + .unwrap() + .into_any() + .downcast::>() + .unwrap(); + + server + .send( + server_to_client_conn_id, + proto::Error { + message: "message 1".to_string(), + }, + ) + .unwrap(); + server + .send( + server_to_client_conn_id, + proto::Error { + message: "message 2".to_string(), + }, + ) + .unwrap(); + server.respond(request1.receipt(), proto::Ack {}).unwrap(); + server.respond(request2.receipt(), proto::Ack {}).unwrap(); + + // Prevent the connection from being dropped + server_incoming.next().await; + }) + .detach(); + + let events = Arc::new(Mutex::new(Vec::new())); + + let request1 = client.request(client_to_server_conn_id, proto::Ping {}); + let request1_task = executor.spawn(request1); + let request2 = client.request(client_to_server_conn_id, proto::Ping {}); + let request2_task = executor.spawn({ + let events = events.clone(); + async move { + request2.await.unwrap(); + events.lock().push("response 2".to_string()); + } + }); + + executor + .spawn({ + let events = events.clone(); + async move { + let incoming1 = client_incoming + .next() + .await + .unwrap() + .into_any() + .downcast::>() + .unwrap(); + events.lock().push(incoming1.payload.message); + let incoming2 = client_incoming + .next() + .await + .unwrap() + .into_any() + .downcast::>() + .unwrap(); + events.lock().push(incoming2.payload.message); + + // Prevent the connection from being dropped + client_incoming.next().await; + } + }) + .detach(); + + // Allow the request to make some progress before dropping it. + cx.background().simulate_random_delay().await; + drop(request1_task); + + request2_task.await; + assert_eq!( + &*events.lock(), + &[ + "message 1".to_string(), + "message 2".to_string(), + "response 2".to_string() + ] + ); + } + + #[gpui::test(iterations = 50)] async fn test_disconnect(cx: TestAppContext) { let executor = cx.foreground(); @@ -611,7 +732,7 @@ mod tests { .is_err()); } - #[gpui::test(iterations = 10)] + #[gpui::test(iterations = 50)] async fn test_io_error(cx: TestAppContext) { let executor = cx.foreground(); let (client_conn, mut server_conn, _) = Connection::in_memory(cx.background()); diff --git a/crates/rpc/src/proto.rs b/crates/rpc/src/proto.rs index a94079526516d468537de754ea2fbdcec13d3878..9aa9eb61b3e3a1eec866f0f43c224c443b98c360 100644 --- a/crates/rpc/src/proto.rs +++ b/crates/rpc/src/proto.rs @@ -122,6 +122,8 @@ macro_rules! entity_messages { messages!( Ack, AddProjectCollaborator, + ApplyCodeAction, + ApplyCodeActionResponse, ApplyCompletionAdditionalEdits, ApplyCompletionAdditionalEditsResponse, BufferReloaded, @@ -131,11 +133,14 @@ messages!( DiskBasedDiagnosticsUpdated, DiskBasedDiagnosticsUpdating, Error, - FormatBuffer, + FormatBuffers, + FormatBuffersResponse, GetChannelMessages, GetChannelMessagesResponse, GetChannels, GetChannelsResponse, + GetCodeActions, + GetCodeActionsResponse, GetCompletions, GetCompletionsResponse, GetDefinition, @@ -171,13 +176,15 @@ messages!( ); request_messages!( + (ApplyCodeAction, ApplyCodeActionResponse), ( ApplyCompletionAdditionalEdits, ApplyCompletionAdditionalEditsResponse ), - (FormatBuffer, Ack), + (FormatBuffers, FormatBuffersResponse), (GetChannelMessages, GetChannelMessagesResponse), (GetChannels, GetChannelsResponse), + (GetCodeActions, GetCodeActionsResponse), (GetCompletions, GetCompletionsResponse), (GetDefinition, GetDefinitionResponse), (GetUsers, GetUsersResponse), @@ -197,13 +204,15 @@ request_messages!( entity_messages!( project_id, AddProjectCollaborator, + ApplyCodeAction, ApplyCompletionAdditionalEdits, BufferReloaded, BufferSaved, CloseBuffer, DiskBasedDiagnosticsUpdated, DiskBasedDiagnosticsUpdating, - FormatBuffer, + FormatBuffers, + GetCodeActions, GetCompletions, GetDefinition, JoinProject, diff --git a/crates/rpc/src/rpc.rs b/crates/rpc/src/rpc.rs index 969ec86a83c6ee12bb925179412db2b5ad281a09..6d7ae5bc84080de55d2a1c84c572a0723698ed08 100644 --- a/crates/rpc/src/rpc.rs +++ b/crates/rpc/src/rpc.rs @@ -5,4 +5,4 @@ pub mod proto; pub use conn::Connection; pub use peer::*; -pub const PROTOCOL_VERSION: u32 = 5; +pub const PROTOCOL_VERSION: u32 = 6; diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index bc1ef781f498d0277956d25be3cbe1c15340bf59..c1d36ef3c6075280291f1d3cbd84fed6c458672d 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -13,7 +13,7 @@ use futures::{future::BoxFuture, FutureExt, StreamExt}; use parking_lot::{RwLock, RwLockReadGuard, RwLockWriteGuard}; use postage::{mpsc, prelude::Sink as _}; use rpc::{ - proto::{self, AnyTypedEnvelope, EnvelopedMessage}, + proto::{self, AnyTypedEnvelope, EnvelopedMessage, RequestMessage}, Connection, ConnectionId, Peer, TypedEnvelope, }; use sha1::{Digest as _, Sha1}; @@ -43,7 +43,6 @@ pub struct Server { const MESSAGE_COUNT_PER_PAGE: usize = 100; const MAX_MESSAGE_LEN: usize = 1024; -const NO_SUCH_PROJECT: &'static str = "no such project"; impl Server { pub fn new( @@ -60,42 +59,44 @@ impl Server { }; server - .add_handler(Server::ping) - .add_handler(Server::register_project) - .add_handler(Server::unregister_project) - .add_handler(Server::share_project) - .add_handler(Server::unshare_project) - .add_handler(Server::join_project) - .add_handler(Server::leave_project) - .add_handler(Server::register_worktree) - .add_handler(Server::unregister_worktree) - .add_handler(Server::share_worktree) - .add_handler(Server::update_worktree) - .add_handler(Server::update_diagnostic_summary) - .add_handler(Server::disk_based_diagnostics_updating) - .add_handler(Server::disk_based_diagnostics_updated) - .add_handler(Server::get_definition) - .add_handler(Server::open_buffer) - .add_handler(Server::close_buffer) - .add_handler(Server::update_buffer) - .add_handler(Server::update_buffer_file) - .add_handler(Server::buffer_reloaded) - .add_handler(Server::buffer_saved) - .add_handler(Server::save_buffer) - .add_handler(Server::format_buffer) - .add_handler(Server::get_completions) - .add_handler(Server::apply_additional_edits_for_completion) - .add_handler(Server::get_channels) - .add_handler(Server::get_users) - .add_handler(Server::join_channel) - .add_handler(Server::leave_channel) - .add_handler(Server::send_channel_message) - .add_handler(Server::get_channel_messages); + .add_request_handler(Server::ping) + .add_request_handler(Server::register_project) + .add_message_handler(Server::unregister_project) + .add_request_handler(Server::share_project) + .add_message_handler(Server::unshare_project) + .add_request_handler(Server::join_project) + .add_message_handler(Server::leave_project) + .add_request_handler(Server::register_worktree) + .add_message_handler(Server::unregister_worktree) + .add_request_handler(Server::share_worktree) + .add_message_handler(Server::update_worktree) + .add_message_handler(Server::update_diagnostic_summary) + .add_message_handler(Server::disk_based_diagnostics_updating) + .add_message_handler(Server::disk_based_diagnostics_updated) + .add_request_handler(Server::get_definition) + .add_request_handler(Server::open_buffer) + .add_message_handler(Server::close_buffer) + .add_request_handler(Server::update_buffer) + .add_message_handler(Server::update_buffer_file) + .add_message_handler(Server::buffer_reloaded) + .add_message_handler(Server::buffer_saved) + .add_request_handler(Server::save_buffer) + .add_request_handler(Server::format_buffers) + .add_request_handler(Server::get_completions) + .add_request_handler(Server::apply_additional_edits_for_completion) + .add_request_handler(Server::get_code_actions) + .add_request_handler(Server::apply_code_action) + .add_request_handler(Server::get_channels) + .add_request_handler(Server::get_users) + .add_request_handler(Server::join_channel) + .add_message_handler(Server::leave_channel) + .add_request_handler(Server::send_channel_message) + .add_request_handler(Server::get_channel_messages); Arc::new(server) } - fn add_handler(&mut self, handler: F) -> &mut Self + fn add_message_handler(&mut self, handler: F) -> &mut Self where F: 'static + Send + Sync + Fn(Arc, TypedEnvelope) -> Fut, Fut: 'static + Send + Future>, @@ -114,6 +115,35 @@ impl Server { self } + fn add_request_handler(&mut self, handler: F) -> &mut Self + where + F: 'static + Send + Sync + Fn(Arc, TypedEnvelope) -> Fut, + Fut: 'static + Send + Future>, + M: RequestMessage, + { + self.add_message_handler(move |server, envelope| { + let receipt = envelope.receipt(); + let response = (handler)(server.clone(), envelope); + async move { + match response.await { + Ok(response) => { + server.peer.respond(receipt, response)?; + Ok(()) + } + Err(error) => { + server.peer.respond_with_error( + receipt, + proto::Error { + message: error.to_string(), + }, + )?; + Err(error) + } + } + } + }) + } + pub fn handle_connection( self: &Arc, connection: Connection, @@ -212,25 +242,20 @@ impl Server { Ok(()) } - async fn ping(self: Arc, request: TypedEnvelope) -> tide::Result<()> { - self.peer.respond(request.receipt(), proto::Ack {})?; - Ok(()) + async fn ping(self: Arc, _: TypedEnvelope) -> tide::Result { + Ok(proto::Ack {}) } async fn register_project( mut self: Arc, request: TypedEnvelope, - ) -> tide::Result<()> { + ) -> tide::Result { let project_id = { let mut state = self.state_mut(); let user_id = state.user_id_for_connection(request.sender_id)?; state.register_project(request.sender_id, user_id) }; - self.peer.respond( - request.receipt(), - proto::RegisterProjectResponse { project_id }, - )?; - Ok(()) + Ok(proto::RegisterProjectResponse { project_id }) } async fn unregister_project( @@ -239,8 +264,7 @@ impl Server { ) -> tide::Result<()> { let project = self .state_mut() - .unregister_project(request.payload.project_id, request.sender_id) - .ok_or_else(|| anyhow!("no such project"))?; + .unregister_project(request.payload.project_id, request.sender_id)?; self.update_contacts_for_users(project.authorized_user_ids().iter())?; Ok(()) } @@ -248,11 +272,10 @@ impl Server { async fn share_project( mut self: Arc, request: TypedEnvelope, - ) -> tide::Result<()> { + ) -> tide::Result { self.state_mut() .share_project(request.payload.project_id, request.sender_id); - self.peer.respond(request.receipt(), proto::Ack {})?; - Ok(()) + Ok(proto::Ack {}) } async fn unshare_project( @@ -275,11 +298,11 @@ impl Server { async fn join_project( mut self: Arc, request: TypedEnvelope, - ) -> tide::Result<()> { + ) -> tide::Result { let project_id = request.payload.project_id; let user_id = self.state().user_id_for_connection(request.sender_id)?; - let response_data = self + let (response, connection_ids, contact_user_ids) = self .state_mut() .join_project(request.sender_id, user_id, project_id) .and_then(|joined| { @@ -326,37 +349,23 @@ impl Server { let connection_ids = joined.project.connection_ids(); let contact_user_ids = joined.project.authorized_user_ids(); Ok((response, connection_ids, contact_user_ids)) - }); - - match response_data { - Ok((response, connection_ids, contact_user_ids)) => { - broadcast(request.sender_id, connection_ids, |conn_id| { - self.peer.send( - conn_id, - proto::AddProjectCollaborator { - project_id, - collaborator: Some(proto::Collaborator { - peer_id: request.sender_id.0, - replica_id: response.replica_id, - user_id: user_id.to_proto(), - }), - }, - ) - })?; - self.peer.respond(request.receipt(), response)?; - self.update_contacts_for_users(&contact_user_ids)?; - } - Err(error) => { - self.peer.respond_with_error( - request.receipt(), - proto::Error { - message: error.to_string(), - }, - )?; - } - } + })?; - Ok(()) + broadcast(request.sender_id, connection_ids, |conn_id| { + self.peer.send( + conn_id, + proto::AddProjectCollaborator { + project_id, + collaborator: Some(proto::Collaborator { + peer_id: request.sender_id.0, + replica_id: response.replica_id, + user_id: user_id.to_proto(), + }), + }, + ) + })?; + self.update_contacts_for_users(&contact_user_ids)?; + Ok(response) } async fn leave_project( @@ -365,70 +374,49 @@ impl Server { ) -> tide::Result<()> { let sender_id = request.sender_id; let project_id = request.payload.project_id; - let worktree = self.state_mut().leave_project(sender_id, project_id); - if let Some(worktree) = worktree { - broadcast(sender_id, worktree.connection_ids, |conn_id| { - self.peer.send( - conn_id, - proto::RemoveProjectCollaborator { - project_id, - peer_id: sender_id.0, - }, - ) - })?; - self.update_contacts_for_users(&worktree.authorized_user_ids)?; - } + let worktree = self.state_mut().leave_project(sender_id, project_id)?; + + broadcast(sender_id, worktree.connection_ids, |conn_id| { + self.peer.send( + conn_id, + proto::RemoveProjectCollaborator { + project_id, + peer_id: sender_id.0, + }, + ) + })?; + self.update_contacts_for_users(&worktree.authorized_user_ids)?; + Ok(()) } async fn register_worktree( mut self: Arc, request: TypedEnvelope, - ) -> tide::Result<()> { - let receipt = request.receipt(); + ) -> tide::Result { let host_user_id = self.state().user_id_for_connection(request.sender_id)?; let mut contact_user_ids = HashSet::default(); contact_user_ids.insert(host_user_id); for github_login in request.payload.authorized_logins { - match self.app_state.db.create_user(&github_login, false).await { - Ok(contact_user_id) => { - contact_user_ids.insert(contact_user_id); - } - Err(err) => { - let message = err.to_string(); - self.peer - .respond_with_error(receipt, proto::Error { message })?; - return Ok(()); - } - } + let contact_user_id = self.app_state.db.create_user(&github_login, false).await?; + contact_user_ids.insert(contact_user_id); } let contact_user_ids = contact_user_ids.into_iter().collect::>(); - let ok = self.state_mut().register_worktree( + self.state_mut().register_worktree( request.payload.project_id, request.payload.worktree_id, + request.sender_id, Worktree { authorized_user_ids: contact_user_ids.clone(), root_name: request.payload.root_name, share: None, weak: false, }, - ); - - if ok { - self.peer.respond(receipt, proto::Ack {})?; - self.update_contacts_for_users(&contact_user_ids)?; - } else { - self.peer.respond_with_error( - receipt, - proto::Error { - message: NO_SUCH_PROJECT.to_string(), - }, - )?; - } - - Ok(()) + )?; + self.update_contacts_for_users(&contact_user_ids)?; + Ok(proto::Ack {}) } async fn unregister_worktree( @@ -456,7 +444,7 @@ impl Server { async fn share_worktree( mut self: Arc, mut request: TypedEnvelope, - ) -> tide::Result<()> { + ) -> tide::Result { let worktree = request .payload .worktree @@ -479,46 +467,32 @@ impl Server { request.sender_id, entries, diagnostic_summaries, - ); - if let Some(shared_worktree) = shared_worktree { - broadcast( - request.sender_id, - shared_worktree.connection_ids, - |connection_id| { - self.peer.forward_send( - request.sender_id, - connection_id, - request.payload.clone(), - ) - }, - )?; - self.peer.respond(request.receipt(), proto::Ack {})?; - self.update_contacts_for_users(&shared_worktree.authorized_user_ids)?; - } else { - self.peer.respond_with_error( - request.receipt(), - proto::Error { - message: "no such worktree".to_string(), - }, - )?; - } - Ok(()) + )?; + + broadcast( + request.sender_id, + shared_worktree.connection_ids, + |connection_id| { + self.peer + .forward_send(request.sender_id, connection_id, request.payload.clone()) + }, + )?; + self.update_contacts_for_users(&shared_worktree.authorized_user_ids)?; + + Ok(proto::Ack {}) } async fn update_worktree( mut self: Arc, request: TypedEnvelope, ) -> tide::Result<()> { - let connection_ids = self - .state_mut() - .update_worktree( - request.sender_id, - request.payload.project_id, - request.payload.worktree_id, - &request.payload.removed_entries, - &request.payload.updated_entries, - ) - .ok_or_else(|| anyhow!("no such worktree"))?; + let connection_ids = self.state_mut().update_worktree( + request.sender_id, + request.payload.project_id, + request.payload.worktree_id, + &request.payload.removed_entries, + &request.payload.updated_entries, + )?; broadcast(request.sender_id, connection_ids, |connection_id| { self.peer @@ -532,19 +506,17 @@ impl Server { mut self: Arc, request: TypedEnvelope, ) -> tide::Result<()> { - let receiver_ids = request + let summary = request .payload .summary .clone() - .and_then(|summary| { - self.state_mut().update_diagnostic_summary( - request.payload.project_id, - request.payload.worktree_id, - request.sender_id, - summary, - ) - }) - .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?; + .ok_or_else(|| anyhow!("invalid summary"))?; + let receiver_ids = self.state_mut().update_diagnostic_summary( + request.payload.project_id, + request.payload.worktree_id, + request.sender_id, + summary, + )?; broadcast(request.sender_id, receiver_ids, |connection_id| { self.peer @@ -559,8 +531,7 @@ impl Server { ) -> tide::Result<()> { let receiver_ids = self .state() - .project_connection_ids(request.payload.project_id, request.sender_id) - .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?; + .project_connection_ids(request.payload.project_id, request.sender_id)?; broadcast(request.sender_id, receiver_ids, |connection_id| { self.peer .forward_send(request.sender_id, connection_id, request.payload.clone()) @@ -574,8 +545,7 @@ impl Server { ) -> tide::Result<()> { let receiver_ids = self .state() - .project_connection_ids(request.payload.project_id, request.sender_id) - .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?; + .project_connection_ids(request.payload.project_id, request.sender_id)?; broadcast(request.sender_id, receiver_ids, |connection_id| { self.peer .forward_send(request.sender_id, connection_id, request.payload.clone()) @@ -586,37 +556,29 @@ impl Server { async fn get_definition( self: Arc, request: TypedEnvelope, - ) -> tide::Result<()> { - let receipt = request.receipt(); + ) -> tide::Result { let host_connection_id = self .state() - .read_project(request.payload.project_id, request.sender_id) - .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))? + .read_project(request.payload.project_id, request.sender_id)? .host_connection_id; - let response = self + Ok(self .peer .forward_request(request.sender_id, host_connection_id, request.payload) - .await?; - self.peer.respond(receipt, response)?; - Ok(()) + .await?) } async fn open_buffer( self: Arc, request: TypedEnvelope, - ) -> tide::Result<()> { - let receipt = request.receipt(); + ) -> tide::Result { let host_connection_id = self .state() - .read_project(request.payload.project_id, request.sender_id) - .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))? + .read_project(request.payload.project_id, request.sender_id)? .host_connection_id; - let response = self + Ok(self .peer .forward_request(request.sender_id, host_connection_id, request.payload) - .await?; - self.peer.respond(receipt, response)?; - Ok(()) + .await?) } async fn close_buffer( @@ -625,8 +587,7 @@ impl Server { ) -> tide::Result<()> { let host_connection_id = self .state() - .read_project(request.payload.project_id, request.sender_id) - .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))? + .read_project(request.payload.project_id, request.sender_id)? .host_connection_id; self.peer .forward_send(request.sender_id, host_connection_id, request.payload)?; @@ -636,121 +597,111 @@ impl Server { async fn save_buffer( self: Arc, request: TypedEnvelope, - ) -> tide::Result<()> { + ) -> tide::Result { let host; - let guests; + let mut guests; { let state = self.state(); - let project = state - .read_project(request.payload.project_id, request.sender_id) - .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?; + let project = state.read_project(request.payload.project_id, request.sender_id)?; host = project.host_connection_id; guests = project.guest_connection_ids() } - let sender = request.sender_id; - let receipt = request.receipt(); let response = self .peer - .forward_request(sender, host, request.payload.clone()) + .forward_request(request.sender_id, host, request.payload.clone()) .await?; + guests.retain(|guest_connection_id| *guest_connection_id != request.sender_id); broadcast(host, guests, |conn_id| { - let response = response.clone(); - if conn_id == sender { - self.peer.respond(receipt, response) - } else { - self.peer.forward_send(host, conn_id, response) - } + self.peer.forward_send(host, conn_id, response.clone()) })?; - Ok(()) + Ok(response) } - async fn format_buffer( + async fn format_buffers( self: Arc, - request: TypedEnvelope, - ) -> tide::Result<()> { - let host; - { - let state = self.state(); - let project = state - .read_project(request.payload.project_id, request.sender_id) - .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?; - host = project.host_connection_id; - } - - let sender = request.sender_id; - let receipt = request.receipt(); - let response = self + request: TypedEnvelope, + ) -> tide::Result { + let host = self + .state() + .read_project(request.payload.project_id, request.sender_id)? + .host_connection_id; + Ok(self .peer - .forward_request(sender, host, request.payload.clone()) - .await?; - self.peer.respond(receipt, response)?; - - Ok(()) + .forward_request(request.sender_id, host, request.payload.clone()) + .await?) } async fn get_completions( self: Arc, request: TypedEnvelope, - ) -> tide::Result<()> { - let host; - { - let state = self.state(); - let project = state - .read_project(request.payload.project_id, request.sender_id) - .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?; - host = project.host_connection_id; - } - - let sender = request.sender_id; - let receipt = request.receipt(); - let response = self + ) -> tide::Result { + let host = self + .state() + .read_project(request.payload.project_id, request.sender_id)? + .host_connection_id; + Ok(self .peer - .forward_request(sender, host, request.payload.clone()) - .await?; - self.peer.respond(receipt, response)?; - Ok(()) + .forward_request(request.sender_id, host, request.payload.clone()) + .await?) } async fn apply_additional_edits_for_completion( self: Arc, request: TypedEnvelope, - ) -> tide::Result<()> { - let host; - { - let state = self.state(); - let project = state - .read_project(request.payload.project_id, request.sender_id) - .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?; - host = project.host_connection_id; - } + ) -> tide::Result { + let host = self + .state() + .read_project(request.payload.project_id, request.sender_id)? + .host_connection_id; + Ok(self + .peer + .forward_request(request.sender_id, host, request.payload.clone()) + .await?) + } - let sender = request.sender_id; - let receipt = request.receipt(); - let response = self + async fn get_code_actions( + self: Arc, + request: TypedEnvelope, + ) -> tide::Result { + let host = self + .state() + .read_project(request.payload.project_id, request.sender_id)? + .host_connection_id; + Ok(self .peer - .forward_request(sender, host, request.payload.clone()) - .await?; - self.peer.respond(receipt, response)?; - Ok(()) + .forward_request(request.sender_id, host, request.payload.clone()) + .await?) + } + + async fn apply_code_action( + self: Arc, + request: TypedEnvelope, + ) -> tide::Result { + let host = self + .state() + .read_project(request.payload.project_id, request.sender_id)? + .host_connection_id; + Ok(self + .peer + .forward_request(request.sender_id, host, request.payload.clone()) + .await?) } async fn update_buffer( self: Arc, request: TypedEnvelope, - ) -> tide::Result<()> { + ) -> tide::Result { let receiver_ids = self .state() - .project_connection_ids(request.payload.project_id, request.sender_id) - .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?; + .project_connection_ids(request.payload.project_id, request.sender_id)?; broadcast(request.sender_id, receiver_ids, |connection_id| { self.peer .forward_send(request.sender_id, connection_id, request.payload.clone()) })?; - self.peer.respond(request.receipt(), proto::Ack {})?; - Ok(()) + Ok(proto::Ack {}) } async fn update_buffer_file( @@ -759,8 +710,7 @@ impl Server { ) -> tide::Result<()> { let receiver_ids = self .state() - .project_connection_ids(request.payload.project_id, request.sender_id) - .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?; + .project_connection_ids(request.payload.project_id, request.sender_id)?; broadcast(request.sender_id, receiver_ids, |connection_id| { self.peer .forward_send(request.sender_id, connection_id, request.payload.clone()) @@ -774,8 +724,7 @@ impl Server { ) -> tide::Result<()> { let receiver_ids = self .state() - .project_connection_ids(request.payload.project_id, request.sender_id) - .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?; + .project_connection_ids(request.payload.project_id, request.sender_id)?; broadcast(request.sender_id, receiver_ids, |connection_id| { self.peer .forward_send(request.sender_id, connection_id, request.payload.clone()) @@ -789,8 +738,7 @@ impl Server { ) -> tide::Result<()> { let receiver_ids = self .state() - .project_connection_ids(request.payload.project_id, request.sender_id) - .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?; + .project_connection_ids(request.payload.project_id, request.sender_id)?; broadcast(request.sender_id, receiver_ids, |connection_id| { self.peer .forward_send(request.sender_id, connection_id, request.payload.clone()) @@ -801,29 +749,24 @@ impl Server { async fn get_channels( self: Arc, request: TypedEnvelope, - ) -> tide::Result<()> { + ) -> tide::Result { let user_id = self.state().user_id_for_connection(request.sender_id)?; let channels = self.app_state.db.get_accessible_channels(user_id).await?; - self.peer.respond( - request.receipt(), - proto::GetChannelsResponse { - channels: channels - .into_iter() - .map(|chan| proto::Channel { - id: chan.id.to_proto(), - name: chan.name, - }) - .collect(), - }, - )?; - Ok(()) + Ok(proto::GetChannelsResponse { + channels: channels + .into_iter() + .map(|chan| proto::Channel { + id: chan.id.to_proto(), + name: chan.name, + }) + .collect(), + }) } async fn get_users( self: Arc, request: TypedEnvelope, - ) -> tide::Result<()> { - let receipt = request.receipt(); + ) -> tide::Result { let user_ids = request.payload.user_ids.into_iter().map(UserId::from_proto); let users = self .app_state @@ -837,9 +780,7 @@ impl Server { github_login: user.github_login, }) .collect(); - self.peer - .respond(receipt, proto::GetUsersResponse { users })?; - Ok(()) + Ok(proto::GetUsersResponse { users }) } fn update_contacts_for_users<'a>( @@ -867,7 +808,7 @@ impl Server { async fn join_channel( mut self: Arc, request: TypedEnvelope, - ) -> tide::Result<()> { + ) -> tide::Result { let user_id = self.state().user_id_for_connection(request.sender_id)?; let channel_id = ChannelId::from_proto(request.payload.channel_id); if !self @@ -894,14 +835,10 @@ impl Server { nonce: Some(msg.nonce.as_u128().into()), }) .collect::>(); - self.peer.respond( - request.receipt(), - proto::JoinChannelResponse { - done: messages.len() < MESSAGE_COUNT_PER_PAGE, - messages, - }, - )?; - Ok(()) + Ok(proto::JoinChannelResponse { + done: messages.len() < MESSAGE_COUNT_PER_PAGE, + messages, + }) } async fn leave_channel( @@ -928,54 +865,30 @@ impl Server { async fn send_channel_message( self: Arc, request: TypedEnvelope, - ) -> tide::Result<()> { - let receipt = request.receipt(); + ) -> tide::Result { let channel_id = ChannelId::from_proto(request.payload.channel_id); let user_id; let connection_ids; { let state = self.state(); user_id = state.user_id_for_connection(request.sender_id)?; - if let Some(ids) = state.channel_connection_ids(channel_id) { - connection_ids = ids; - } else { - return Ok(()); - } + connection_ids = state.channel_connection_ids(channel_id)?; } // Validate the message body. let body = request.payload.body.trim().to_string(); if body.len() > MAX_MESSAGE_LEN { - self.peer.respond_with_error( - receipt, - proto::Error { - message: "message is too long".to_string(), - }, - )?; - return Ok(()); + return Err(anyhow!("message is too long"))?; } if body.is_empty() { - self.peer.respond_with_error( - receipt, - proto::Error { - message: "message can't be blank".to_string(), - }, - )?; - return Ok(()); + return Err(anyhow!("message can't be blank"))?; } let timestamp = OffsetDateTime::now_utc(); - let nonce = if let Some(nonce) = request.payload.nonce { - nonce - } else { - self.peer.respond_with_error( - receipt, - proto::Error { - message: "nonce can't be blank".to_string(), - }, - )?; - return Ok(()); - }; + let nonce = request + .payload + .nonce + .ok_or_else(|| anyhow!("nonce can't be blank"))?; let message_id = self .app_state @@ -999,19 +912,15 @@ impl Server { }, ) })?; - self.peer.respond( - receipt, - proto::SendChannelMessageResponse { - message: Some(message), - }, - )?; - Ok(()) + Ok(proto::SendChannelMessageResponse { + message: Some(message), + }) } async fn get_channel_messages( self: Arc, request: TypedEnvelope, - ) -> tide::Result<()> { + ) -> tide::Result { let user_id = self.state().user_id_for_connection(request.sender_id)?; let channel_id = ChannelId::from_proto(request.payload.channel_id); if !self @@ -1041,14 +950,11 @@ impl Server { nonce: Some(msg.nonce.as_u128().into()), }) .collect::>(); - self.peer.respond( - request.receipt(), - proto::GetChannelMessagesResponse { - done: messages.len() < MESSAGE_COUNT_PER_PAGE, - messages, - }, - )?; - Ok(()) + + Ok(proto::GetChannelMessagesResponse { + done: messages.len() < MESSAGE_COUNT_PER_PAGE, + messages, + }) } fn state<'a>(self: &'a Arc) -> RwLockReadGuard<'a, Store> { @@ -1183,14 +1089,18 @@ mod tests { self, test::FakeHttpClient, Channel, ChannelDetails, ChannelList, Client, Credentials, EstablishConnectionError, UserStore, }, - editor::{Editor, EditorSettings, Input, MultiBuffer}, + editor::{ + self, ConfirmCodeAction, ConfirmCompletion, Editor, EditorSettings, Input, MultiBuffer, + Redo, ToggleCodeActions, Undo, + }, fs::{FakeFs, Fs as _}, language::{ tree_sitter_rust, AnchorRangeExt, Diagnostic, DiagnosticEntry, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig, Point, }, lsp, - project::{DiagnosticSummary, Project, ProjectPath}, + project::{worktree::WorktreeHandle, DiagnosticSummary, Project, ProjectPath}, + workspace::{Workspace, WorkspaceParams}, }; #[cfg(test)] @@ -1301,7 +1211,7 @@ mod tests { .unwrap(); let editor_b = cx_b.add_view(window_b, |cx| { - Editor::for_buffer(buffer_b, Arc::new(|cx| EditorSettings::test(cx)), cx) + Editor::for_buffer(buffer_b, Arc::new(|cx| EditorSettings::test(cx)), None, cx) }); // TODO @@ -1560,11 +1470,20 @@ mod tests { buffer_b.read_with(&cx_b, |buf, _| assert!(!buf.is_dirty())); buffer_c.condition(&cx_c, |buf, _| !buf.is_dirty()).await; + // Ensure worktree observes a/file1's change event *before* the rename occurs, otherwise + // when interpreting the change event it will mistakenly think that the file has been + // deleted (because its path has changed) and will subsequently fail to detect the rename. + worktree_a.flush_fs_events(&cx_a).await; + // Make changes on host's file system, see those changes on guest worktrees. - fs.rename("/a/file1".as_ref(), "/a/file1-renamed".as_ref()) - .await - .unwrap(); - fs.rename("/a/file2".as_ref(), "/a/file3".as_ref()) + fs.rename( + "/a/file1".as_ref(), + "/a/file1-renamed".as_ref(), + Default::default(), + ) + .await + .unwrap(); + fs.rename("/a/file2".as_ref(), "/a/file3".as_ref(), Default::default()) .await .unwrap(); fs.insert_file(Path::new("/a/file4"), "4".into()) @@ -1572,38 +1491,29 @@ mod tests { .unwrap(); worktree_a - .condition(&cx_a, |tree, _| tree.file_count() == 4) - .await; - worktree_b - .condition(&cx_b, |tree, _| tree.file_count() == 4) - .await; - worktree_c - .condition(&cx_c, |tree, _| tree.file_count() == 4) - .await; - worktree_a.read_with(&cx_a, |tree, _| { - assert_eq!( + .condition(&cx_a, |tree, _| { tree.paths() .map(|p| p.to_string_lossy()) - .collect::>(), - &[".zed.toml", "file1-renamed", "file3", "file4"] - ) - }); - worktree_b.read_with(&cx_b, |tree, _| { - assert_eq!( + .collect::>() + == [".zed.toml", "file1-renamed", "file3", "file4"] + }) + .await; + worktree_b + .condition(&cx_b, |tree, _| { tree.paths() .map(|p| p.to_string_lossy()) - .collect::>(), - &[".zed.toml", "file1-renamed", "file3", "file4"] - ) - }); - worktree_c.read_with(&cx_c, |tree, _| { - assert_eq!( + .collect::>() + == [".zed.toml", "file1-renamed", "file3", "file4"] + }) + .await; + worktree_c + .condition(&cx_c, |tree, _| { tree.paths() .map(|p| p.to_string_lossy()) - .collect::>(), - &[".zed.toml", "file1-renamed", "file3", "file4"] - ) - }); + .collect::>() + == [".zed.toml", "file1-renamed", "file3", "file4"] + }) + .await; // Ensure buffer files are updated as well. buffer_a @@ -1798,7 +1708,7 @@ mod tests { }); } - #[gpui::test(iterations = 100)] + #[gpui::test(iterations = 10)] async fn test_editing_while_guest_opens_buffer( mut cx_a: TestAppContext, mut cx_b: TestAppContext, @@ -2038,7 +1948,7 @@ mod tests { // Set up a fake language server. let (language_server_config, mut fake_language_server) = - LanguageServerConfig::fake(cx_a.background()).await; + LanguageServerConfig::fake(&cx_a).await; Arc::get_mut(&mut lang_registry) .unwrap() .add(Arc::new(Language::new( @@ -2270,7 +2180,7 @@ mod tests { }), ..Default::default() }, - cx_a.background(), + &cx_a, ) .await; Arc::get_mut(&mut lang_registry) @@ -2349,6 +2259,7 @@ mod tests { Editor::for_buffer( cx.add_model(|cx| MultiBuffer::singleton(buffer_b.clone(), cx)), Arc::new(|cx| EditorSettings::test(cx)), + Some(project_b.clone()), cx, ) }); @@ -2361,52 +2272,46 @@ mod tests { }); // Receive a completion request as the host's language server. - let (request_id, params) = fake_language_server - .receive_request::() - .await; - assert_eq!( - params.text_document_position.text_document.uri, - lsp::Url::from_file_path("/a/main.rs").unwrap(), - ); - assert_eq!( - params.text_document_position.position, - lsp::Position::new(0, 14), - ); - // Return some completions from the host's language server. - fake_language_server - .respond( - request_id, - Some(lsp::CompletionResponse::Array(vec![ - lsp::CompletionItem { - label: "first_method(…)".into(), - detail: Some("fn(&mut self, B) -> C".into()), - text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { - new_text: "first_method($1)".to_string(), - range: lsp::Range::new( - lsp::Position::new(0, 14), - lsp::Position::new(0, 14), - ), - })), - insert_text_format: Some(lsp::InsertTextFormat::SNIPPET), - ..Default::default() - }, - lsp::CompletionItem { - label: "second_method(…)".into(), - detail: Some("fn(&mut self, C) -> D".into()), - text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { - new_text: "second_method()".to_string(), - range: lsp::Range::new( - lsp::Position::new(0, 14), - lsp::Position::new(0, 14), - ), - })), - insert_text_format: Some(lsp::InsertTextFormat::SNIPPET), - ..Default::default() - }, - ])), - ) - .await; + fake_language_server.handle_request::(|params| { + assert_eq!( + params.text_document_position.text_document.uri, + lsp::Url::from_file_path("/a/main.rs").unwrap(), + ); + assert_eq!( + params.text_document_position.position, + lsp::Position::new(0, 14), + ); + + Some(lsp::CompletionResponse::Array(vec![ + lsp::CompletionItem { + label: "first_method(…)".into(), + detail: Some("fn(&mut self, B) -> C".into()), + text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { + new_text: "first_method($1)".to_string(), + range: lsp::Range::new( + lsp::Position::new(0, 14), + lsp::Position::new(0, 14), + ), + })), + insert_text_format: Some(lsp::InsertTextFormat::SNIPPET), + ..Default::default() + }, + lsp::CompletionItem { + label: "second_method(…)".into(), + detail: Some("fn(&mut self, C) -> D".into()), + text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { + new_text: "second_method()".to_string(), + range: lsp::Range::new( + lsp::Position::new(0, 14), + lsp::Position::new(0, 14), + ), + })), + insert_text_format: Some(lsp::InsertTextFormat::SNIPPET), + ..Default::default() + }, + ])) + }); // Open the buffer on the host. let buffer_a = project_a @@ -2422,48 +2327,37 @@ mod tests { // Confirm a completion on the guest. editor_b.next_notification(&cx_b).await; editor_b.update(&mut cx_b, |editor, cx| { - assert!(editor.has_completions()); - editor.confirm_completion(Some(0), cx); + assert!(editor.context_menu_visible()); + editor.confirm_completion(&ConfirmCompletion(Some(0)), cx); assert_eq!(editor.text(cx), "fn main() { a.first_method() }"); }); + // Return a resolved completion from the host's language server. + // The resolved completion has an additional text edit. + fake_language_server.handle_request::(|params| { + assert_eq!(params.label, "first_method(…)"); + lsp::CompletionItem { + label: "first_method(…)".into(), + detail: Some("fn(&mut self, B) -> C".into()), + text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { + new_text: "first_method($1)".to_string(), + range: lsp::Range::new(lsp::Position::new(0, 14), lsp::Position::new(0, 14)), + })), + additional_text_edits: Some(vec![lsp::TextEdit { + new_text: "use d::SomeTrait;\n".to_string(), + range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)), + }]), + insert_text_format: Some(lsp::InsertTextFormat::SNIPPET), + ..Default::default() + } + }); + buffer_a .condition(&cx_a, |buffer, _| { buffer.text() == "fn main() { a.first_method() }" }) .await; - // Receive a request resolve the selected completion on the host's language server. - let (request_id, params) = fake_language_server - .receive_request::() - .await; - assert_eq!(params.label, "first_method(…)"); - - // Return a resolved completion from the host's language server. - // The resolved completion has an additional text edit. - fake_language_server - .respond( - request_id, - lsp::CompletionItem { - label: "first_method(…)".into(), - detail: Some("fn(&mut self, B) -> C".into()), - text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { - new_text: "first_method($1)".to_string(), - range: lsp::Range::new( - lsp::Position::new(0, 14), - lsp::Position::new(0, 14), - ), - })), - additional_text_edits: Some(vec![lsp::TextEdit { - new_text: "use d::SomeTrait;\n".to_string(), - range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)), - }]), - insert_text_format: Some(lsp::InsertTextFormat::SNIPPET), - ..Default::default() - }, - ) - .await; - // The additional edit is applied. buffer_b .condition(&cx_b, |buffer, _| { @@ -2484,7 +2378,7 @@ mod tests { // Set up a fake language server. let (language_server_config, mut fake_language_server) = - LanguageServerConfig::fake(cx_a.background()).await; + LanguageServerConfig::fake(&cx_a).await; Arc::get_mut(&mut lang_registry) .unwrap() .add(Arc::new(Language::new( @@ -2554,25 +2448,23 @@ mod tests { .await .unwrap(); - let format = buffer_b.update(&mut cx_b, |buffer, cx| buffer.format(cx)); - let (request_id, _) = fake_language_server - .receive_request::() - .await; - fake_language_server - .respond( - request_id, - Some(vec![ - lsp::TextEdit { - range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 4)), - new_text: "h".to_string(), - }, - lsp::TextEdit { - range: lsp::Range::new(lsp::Position::new(0, 7), lsp::Position::new(0, 7)), - new_text: "y".to_string(), - }, - ]), - ) - .await; + let format = project_b.update(&mut cx_b, |project, cx| { + project.format(HashSet::from_iter([buffer_b.clone()]), true, cx) + }); + + fake_language_server.handle_request::(|_| { + Some(vec![ + lsp::TextEdit { + range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 4)), + new_text: "h".to_string(), + }, + lsp::TextEdit { + range: lsp::Range::new(lsp::Position::new(0, 7), lsp::Position::new(0, 7)), + new_text: "y".to_string(), + }, + ]) + }); + format.await.unwrap(); assert_eq!( buffer_b.read_with(&cx_b, |buffer, _| buffer.text()), @@ -2603,7 +2495,7 @@ mod tests { // Set up a fake language server. let (language_server_config, mut fake_language_server) = - LanguageServerConfig::fake(cx_a.background()).await; + LanguageServerConfig::fake(&cx_a).await; Arc::get_mut(&mut lang_registry) .unwrap() .add(Arc::new(Language::new( @@ -2659,26 +2551,22 @@ mod tests { .await .unwrap(); - // Open the file to be formatted on client B. + // Open the file on client B. let buffer_b = cx_b .background() .spawn(project_b.update(&mut cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))) .await .unwrap(); + // Request the definition of a symbol as the guest. let definitions_1 = project_b.update(&mut cx_b, |p, cx| p.definition(&buffer_b, 23, cx)); - let (request_id, _) = fake_language_server - .receive_request::() - .await; - fake_language_server - .respond( - request_id, - Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new( - lsp::Url::from_file_path("/root-2/b.rs").unwrap(), - lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)), - ))), - ) - .await; + fake_language_server.handle_request::(|_| { + Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new( + lsp::Url::from_file_path("/root-2/b.rs").unwrap(), + lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)), + ))) + }); + let definitions_1 = definitions_1.await.unwrap(); cx_b.read(|cx| { assert_eq!(definitions_1.len(), 1); @@ -2697,18 +2585,13 @@ mod tests { // Try getting more definitions for the same buffer, ensuring the buffer gets reused from // the previous call to `definition`. let definitions_2 = project_b.update(&mut cx_b, |p, cx| p.definition(&buffer_b, 33, cx)); - let (request_id, _) = fake_language_server - .receive_request::() - .await; - fake_language_server - .respond( - request_id, - Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new( - lsp::Url::from_file_path("/root-2/b.rs").unwrap(), - lsp::Range::new(lsp::Position::new(1, 6), lsp::Position::new(1, 11)), - ))), - ) - .await; + fake_language_server.handle_request::(|_| { + Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new( + lsp::Url::from_file_path("/root-2/b.rs").unwrap(), + lsp::Range::new(lsp::Position::new(1, 6), lsp::Position::new(1, 11)), + ))) + }); + let definitions_2 = definitions_2.await.unwrap(); cx_b.read(|cx| { assert_eq!(definitions_2.len(), 1); @@ -2758,7 +2641,7 @@ mod tests { // Set up a fake language server. let (language_server_config, mut fake_language_server) = - LanguageServerConfig::fake(cx_a.background()).await; + LanguageServerConfig::fake(&cx_a).await; Arc::get_mut(&mut lang_registry) .unwrap() .add(Arc::new(Language::new( @@ -2832,18 +2715,12 @@ mod tests { definitions = project_b.update(&mut cx_b, |p, cx| p.definition(&buffer_b1, 23, cx)); } - let (request_id, _) = fake_language_server - .receive_request::() - .await; - fake_language_server - .respond( - request_id, - Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new( - lsp::Url::from_file_path("/root/b.rs").unwrap(), - lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)), - ))), - ) - .await; + fake_language_server.handle_request::(|_| { + Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new( + lsp::Url::from_file_path("/root/b.rs").unwrap(), + lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)), + ))) + }); let buffer_b2 = buffer_b2.await.unwrap(); let definitions = definitions.await.unwrap(); @@ -2851,6 +2728,247 @@ mod tests { assert_eq!(definitions[0].target_buffer, buffer_b2); } + #[gpui::test(iterations = 10)] + async fn test_collaborating_with_code_actions( + mut cx_a: TestAppContext, + mut cx_b: TestAppContext, + ) { + cx_a.foreground().forbid_parking(); + let mut lang_registry = Arc::new(LanguageRegistry::new()); + let fs = Arc::new(FakeFs::new(cx_a.background())); + let mut path_openers_b = Vec::new(); + cx_b.update(|cx| editor::init(cx, &mut path_openers_b)); + + // Set up a fake language server. + let (language_server_config, mut fake_language_server) = + LanguageServerConfig::fake_with_capabilities( + lsp::ServerCapabilities { + ..Default::default() + }, + &cx_a, + ) + .await; + Arc::get_mut(&mut lang_registry) + .unwrap() + .add(Arc::new(Language::new( + LanguageConfig { + name: "Rust".to_string(), + path_suffixes: vec!["rs".to_string()], + language_server: Some(language_server_config), + ..Default::default() + }, + Some(tree_sitter_rust::language()), + ))); + + // Connect to a server as 2 clients. + let mut server = TestServer::start(cx_a.foreground()).await; + let client_a = server.create_client(&mut cx_a, "user_a").await; + let client_b = server.create_client(&mut cx_b, "user_b").await; + + // Share a project as client A + fs.insert_tree( + "/a", + json!({ + ".zed.toml": r#"collaborators = ["user_b"]"#, + "main.rs": "mod other;\nfn main() { let foo = other::foo(); }", + "other.rs": "pub fn foo() -> usize { 4 }", + }), + ) + .await; + let project_a = cx_a.update(|cx| { + Project::local( + client_a.clone(), + client_a.user_store.clone(), + lang_registry.clone(), + fs.clone(), + cx, + ) + }); + let (worktree_a, _) = project_a + .update(&mut cx_a, |p, cx| { + p.find_or_create_local_worktree("/a", false, cx) + }) + .await + .unwrap(); + worktree_a + .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) + .await; + let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await; + let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id()); + project_a + .update(&mut cx_a, |p, cx| p.share(cx)) + .await + .unwrap(); + + // Join the worktree as client B. + let project_b = Project::remote( + project_id, + client_b.clone(), + client_b.user_store.clone(), + lang_registry.clone(), + fs.clone(), + &mut cx_b.to_async(), + ) + .await + .unwrap(); + let mut params = cx_b.update(WorkspaceParams::test); + params.languages = lang_registry.clone(); + params.client = client_b.client.clone(); + params.user_store = client_b.user_store.clone(); + params.project = project_b; + params.path_openers = path_openers_b.into(); + + let (_window_b, workspace_b) = cx_b.add_window(|cx| Workspace::new(¶ms, cx)); + let editor_b = workspace_b + .update(&mut cx_b, |workspace, cx| { + workspace.open_path((worktree_id, "main.rs").into(), cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + fake_language_server + .handle_request::(|params| { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path("/a/main.rs").unwrap(), + ); + assert_eq!(params.range.start, lsp::Position::new(0, 0)); + assert_eq!(params.range.end, lsp::Position::new(0, 0)); + None + }) + .next() + .await; + + // Move cursor to a location that contains code actions. + editor_b.update(&mut cx_b, |editor, cx| { + editor.select_ranges([Point::new(1, 31)..Point::new(1, 31)], None, cx); + cx.focus(&editor_b); + }); + fake_language_server.handle_request::(|params| { + assert_eq!( + params.text_document.uri, + lsp::Url::from_file_path("/a/main.rs").unwrap(), + ); + assert_eq!(params.range.start, lsp::Position::new(1, 31)); + assert_eq!(params.range.end, lsp::Position::new(1, 31)); + + Some(vec![lsp::CodeActionOrCommand::CodeAction( + lsp::CodeAction { + title: "Inline into all callers".to_string(), + edit: Some(lsp::WorkspaceEdit { + changes: Some( + [ + ( + lsp::Url::from_file_path("/a/main.rs").unwrap(), + vec![lsp::TextEdit::new( + lsp::Range::new( + lsp::Position::new(1, 22), + lsp::Position::new(1, 34), + ), + "4".to_string(), + )], + ), + ( + lsp::Url::from_file_path("/a/other.rs").unwrap(), + vec![lsp::TextEdit::new( + lsp::Range::new( + lsp::Position::new(0, 0), + lsp::Position::new(0, 27), + ), + "".to_string(), + )], + ), + ] + .into_iter() + .collect(), + ), + ..Default::default() + }), + data: Some(json!({ + "codeActionParams": { + "range": { + "start": {"line": 1, "column": 31}, + "end": {"line": 1, "column": 31}, + } + } + })), + ..Default::default() + }, + )]) + }); + + // Toggle code actions and wait for them to display. + editor_b.update(&mut cx_b, |editor, cx| { + editor.toggle_code_actions(&ToggleCodeActions(false), cx); + }); + editor_b + .condition(&cx_b, |editor, _| editor.context_menu_visible()) + .await; + + // Confirming the code action will trigger a resolve request. + let confirm_action = workspace_b + .update(&mut cx_b, |workspace, cx| { + Editor::confirm_code_action(workspace, &ConfirmCodeAction(Some(0)), cx) + }) + .unwrap(); + fake_language_server.handle_request::(|_| { + lsp::CodeAction { + title: "Inline into all callers".to_string(), + edit: Some(lsp::WorkspaceEdit { + changes: Some( + [ + ( + lsp::Url::from_file_path("/a/main.rs").unwrap(), + vec![lsp::TextEdit::new( + lsp::Range::new( + lsp::Position::new(1, 22), + lsp::Position::new(1, 34), + ), + "4".to_string(), + )], + ), + ( + lsp::Url::from_file_path("/a/other.rs").unwrap(), + vec![lsp::TextEdit::new( + lsp::Range::new( + lsp::Position::new(0, 0), + lsp::Position::new(0, 27), + ), + "".to_string(), + )], + ), + ] + .into_iter() + .collect(), + ), + ..Default::default() + }), + ..Default::default() + } + }); + + // After the action is confirmed, an editor containing both modified files is opened. + confirm_action.await.unwrap(); + let code_action_editor = workspace_b.read_with(&cx_b, |workspace, cx| { + workspace + .active_item(cx) + .unwrap() + .downcast::() + .unwrap() + }); + code_action_editor.update(&mut cx_b, |editor, cx| { + assert_eq!(editor.text(cx), "\nmod other;\nfn main() { let foo = 4; }"); + editor.undo(&Undo, cx); + assert_eq!( + editor.text(cx), + "pub fn foo() -> usize { 4 }\nmod other;\nfn main() { let foo = other::foo(); }" + ); + editor.redo(&Redo, cx); + assert_eq!(editor.text(cx), "\nmod other;\nfn main() { let foo = 4; }"); + }); + } + #[gpui::test(iterations = 10)] async fn test_basic_chat(mut cx_a: TestAppContext, mut cx_b: TestAppContext) { cx_a.foreground().forbid_parking(); diff --git a/crates/server/src/rpc/store.rs b/crates/server/src/rpc/store.rs index 6e11f431aca72c2ee44c255bdf36103cf2d1d744..5cb0a0e1db028631c468b1ab3519114483a12f14 100644 --- a/crates/server/src/rpc/store.rs +++ b/crates/server/src/rpc/store.rs @@ -122,10 +122,10 @@ impl Store { let mut result = RemovedConnectionState::default(); for project_id in connection.projects.clone() { - if let Some(project) = self.unregister_project(project_id, connection_id) { + if let Ok(project) = self.unregister_project(project_id, connection_id) { result.contact_ids.extend(project.authorized_user_ids()); result.hosted_projects.insert(project_id, project); - } else if let Some(project) = self.leave_project(connection_id, project_id) { + } else if let Ok(project) = self.leave_project(connection_id, project_id) { result .guest_project_ids .insert(project_id, project.connection_ids); @@ -254,9 +254,14 @@ impl Store { &mut self, project_id: u64, worktree_id: u64, + connection_id: ConnectionId, worktree: Worktree, - ) -> bool { - if let Some(project) = self.projects.get_mut(&project_id) { + ) -> tide::Result<()> { + let project = self + .projects + .get_mut(&project_id) + .ok_or_else(|| anyhow!("no such project"))?; + if project.host_connection_id == connection_id { for authorized_user_id in &worktree.authorized_user_ids { self.visible_projects_by_user_id .entry(*authorized_user_id) @@ -270,9 +275,9 @@ impl Store { #[cfg(test)] self.check_invariants(); - true + Ok(()) } else { - false + Err(anyhow!("no such project"))? } } @@ -280,7 +285,7 @@ impl Store { &mut self, project_id: u64, connection_id: ConnectionId, - ) -> Option { + ) -> tide::Result { match self.projects.entry(project_id) { hash_map::Entry::Occupied(e) => { if e.get().host_connection_id == connection_id { @@ -292,12 +297,12 @@ impl Store { } } - Some(e.remove()) + Ok(e.remove()) } else { - None + Err(anyhow!("no such project"))? } } - hash_map::Entry::Vacant(_) => None, + hash_map::Entry::Vacant(_) => Err(anyhow!("no such project"))?, } } @@ -398,20 +403,26 @@ impl Store { connection_id: ConnectionId, entries: HashMap, diagnostic_summaries: BTreeMap, - ) -> Option { - let project = self.projects.get_mut(&project_id)?; - let worktree = project.worktrees.get_mut(&worktree_id)?; + ) -> tide::Result { + let project = self + .projects + .get_mut(&project_id) + .ok_or_else(|| anyhow!("no such project"))?; + let worktree = project + .worktrees + .get_mut(&worktree_id) + .ok_or_else(|| anyhow!("no such worktree"))?; if project.host_connection_id == connection_id && project.share.is_some() { worktree.share = Some(WorktreeShare { entries, diagnostic_summaries, }); - Some(SharedWorktree { + Ok(SharedWorktree { authorized_user_ids: project.authorized_user_ids(), connection_ids: project.guest_connection_ids(), }) } else { - None + Err(anyhow!("no such worktree"))? } } @@ -421,19 +432,25 @@ impl Store { worktree_id: u64, connection_id: ConnectionId, summary: proto::DiagnosticSummary, - ) -> Option> { - let project = self.projects.get_mut(&project_id)?; - let worktree = project.worktrees.get_mut(&worktree_id)?; + ) -> tide::Result> { + let project = self + .projects + .get_mut(&project_id) + .ok_or_else(|| anyhow!("no such project"))?; + let worktree = project + .worktrees + .get_mut(&worktree_id) + .ok_or_else(|| anyhow!("no such worktree"))?; if project.host_connection_id == connection_id { if let Some(share) = worktree.share.as_mut() { share .diagnostic_summaries .insert(summary.path.clone().into(), summary); - return Some(project.connection_ids()); + return Ok(project.connection_ids()); } } - None + Err(anyhow!("no such worktree"))? } pub fn join_project( @@ -481,10 +498,19 @@ impl Store { &mut self, connection_id: ConnectionId, project_id: u64, - ) -> Option { - let project = self.projects.get_mut(&project_id)?; - let share = project.share.as_mut()?; - let (replica_id, _) = share.guests.remove(&connection_id)?; + ) -> tide::Result { + let project = self + .projects + .get_mut(&project_id) + .ok_or_else(|| anyhow!("no such project"))?; + let share = project + .share + .as_mut() + .ok_or_else(|| anyhow!("project is not shared"))?; + let (replica_id, _) = share + .guests + .remove(&connection_id) + .ok_or_else(|| anyhow!("cannot leave a project before joining it"))?; share.active_replica_ids.remove(&replica_id); if let Some(connection) = self.connections.get_mut(&connection_id) { @@ -497,7 +523,7 @@ impl Store { #[cfg(test)] self.check_invariants(); - Some(LeftProject { + Ok(LeftProject { connection_ids, authorized_user_ids, }) @@ -510,31 +536,40 @@ impl Store { worktree_id: u64, removed_entries: &[u64], updated_entries: &[proto::Entry], - ) -> Option> { + ) -> tide::Result> { let project = self.write_project(project_id, connection_id)?; - let share = project.worktrees.get_mut(&worktree_id)?.share.as_mut()?; + let share = project + .worktrees + .get_mut(&worktree_id) + .ok_or_else(|| anyhow!("no such worktree"))? + .share + .as_mut() + .ok_or_else(|| anyhow!("worktree is not shared"))?; for entry_id in removed_entries { share.entries.remove(&entry_id); } for entry in updated_entries { share.entries.insert(entry.id, entry.clone()); } - Some(project.connection_ids()) + Ok(project.connection_ids()) } pub fn project_connection_ids( &self, project_id: u64, acting_connection_id: ConnectionId, - ) -> Option> { - Some( - self.read_project(project_id, acting_connection_id)? - .connection_ids(), - ) + ) -> tide::Result> { + Ok(self + .read_project(project_id, acting_connection_id)? + .connection_ids()) } - pub fn channel_connection_ids(&self, channel_id: ChannelId) -> Option> { - Some(self.channels.get(&channel_id)?.connection_ids()) + pub fn channel_connection_ids(&self, channel_id: ChannelId) -> tide::Result> { + Ok(self + .channels + .get(&channel_id) + .ok_or_else(|| anyhow!("no such channel"))? + .connection_ids()) } #[cfg(test)] @@ -542,14 +577,26 @@ impl Store { self.projects.get(&project_id) } - pub fn read_project(&self, project_id: u64, connection_id: ConnectionId) -> Option<&Project> { - let project = self.projects.get(&project_id)?; + pub fn read_project( + &self, + project_id: u64, + connection_id: ConnectionId, + ) -> tide::Result<&Project> { + let project = self + .projects + .get(&project_id) + .ok_or_else(|| anyhow!("no such project"))?; if project.host_connection_id == connection_id - || project.share.as_ref()?.guests.contains_key(&connection_id) + || project + .share + .as_ref() + .ok_or_else(|| anyhow!("project is not shared"))? + .guests + .contains_key(&connection_id) { - Some(project) + Ok(project) } else { - None + Err(anyhow!("no such project"))? } } @@ -557,14 +604,22 @@ impl Store { &mut self, project_id: u64, connection_id: ConnectionId, - ) -> Option<&mut Project> { - let project = self.projects.get_mut(&project_id)?; + ) -> tide::Result<&mut Project> { + let project = self + .projects + .get_mut(&project_id) + .ok_or_else(|| anyhow!("no such project"))?; if project.host_connection_id == connection_id - || project.share.as_ref()?.guests.contains_key(&connection_id) + || project + .share + .as_ref() + .ok_or_else(|| anyhow!("project is not shared"))? + .guests + .contains_key(&connection_id) { - Some(project) + Ok(project) } else { - None + Err(anyhow!("no such project"))? } } diff --git a/crates/text/src/anchor.rs b/crates/text/src/anchor.rs index 5a36bf0401e4c4133b38670f07d21f930dae1994..a14a16cbc4f55f991b7ce9a1271fffe5cb5de6ec 100644 --- a/crates/text/src/anchor.rs +++ b/crates/text/src/anchor.rs @@ -1,5 +1,5 @@ use super::{Point, ToOffset}; -use crate::{rope::TextDimension, BufferSnapshot}; +use crate::{rope::TextDimension, BufferSnapshot, PointUtf16, ToPointUtf16}; use anyhow::Result; use std::{cmp::Ordering, fmt::Debug, ops::Range}; use sum_tree::Bias; @@ -78,6 +78,7 @@ pub trait AnchorRangeExt { fn cmp(&self, b: &Range, buffer: &BufferSnapshot) -> Result; fn to_offset(&self, content: &BufferSnapshot) -> Range; fn to_point(&self, content: &BufferSnapshot) -> Range; + fn to_point_utf16(&self, content: &BufferSnapshot) -> Range; } impl AnchorRangeExt for Range { @@ -95,4 +96,8 @@ impl AnchorRangeExt for Range { fn to_point(&self, content: &BufferSnapshot) -> Range { self.start.summary::(&content)..self.end.summary::(&content) } + + fn to_point_utf16(&self, content: &BufferSnapshot) -> Range { + self.start.to_point_utf16(content)..self.end.to_point_utf16(content) + } } diff --git a/crates/text/src/rope.rs b/crates/text/src/rope.rs index d9c900d8bc40541128a619d8cd24219122e6b04b..cd474cc4da1b33991659f20d690702395c33c711 100644 --- a/crates/text/src/rope.rs +++ b/crates/text/src/rope.rs @@ -179,6 +179,19 @@ impl Rope { }) } + pub fn point_to_point_utf16(&self, point: Point) -> PointUtf16 { + if point >= self.summary().lines { + return self.summary().lines_utf16; + } + let mut cursor = self.chunks.cursor::<(Point, PointUtf16)>(); + cursor.seek(&point, Bias::Left, &()); + let overshoot = point - cursor.start().0; + cursor.start().1 + + cursor.item().map_or(PointUtf16::zero(), |chunk| { + chunk.point_to_point_utf16(overshoot) + }) + } + pub fn point_to_offset(&self, point: Point) -> usize { if point >= self.summary().lines { return self.summary().bytes; @@ -580,6 +593,27 @@ impl Chunk { offset } + fn point_to_point_utf16(&self, target: Point) -> PointUtf16 { + let mut point = Point::zero(); + let mut point_utf16 = PointUtf16::new(0, 0); + for ch in self.0.chars() { + if point >= target { + break; + } + + if ch == '\n' { + point_utf16.row += 1; + point_utf16.column = 0; + point.row += 1; + point.column = 0; + } else { + point_utf16.column += ch.len_utf16() as u32; + point.column += ch.len_utf8() as u32; + } + } + point_utf16 + } + fn point_utf16_to_offset(&self, target: PointUtf16) -> usize { let mut offset = 0; let mut point = PointUtf16::new(0, 0); diff --git a/crates/text/src/tests.rs b/crates/text/src/tests.rs index de735051aec32142f44e18f2132deb96470ec78c..4f5e6effb6540b9d399e9bc8b9f69ae88f595ffd 100644 --- a/crates/text/src/tests.rs +++ b/crates/text/src/tests.rs @@ -432,28 +432,28 @@ fn test_undo_redo() { buffer.edit(vec![3..5], "cd"); assert_eq!(buffer.text(), "1abcdef234"); - let transactions = buffer.history.undo_stack.clone(); - assert_eq!(transactions.len(), 3); + let entries = buffer.history.undo_stack.clone(); + assert_eq!(entries.len(), 3); - buffer.undo_or_redo(transactions[0].clone()).unwrap(); + buffer.undo_or_redo(entries[0].transaction.clone()).unwrap(); assert_eq!(buffer.text(), "1cdef234"); - buffer.undo_or_redo(transactions[0].clone()).unwrap(); + buffer.undo_or_redo(entries[0].transaction.clone()).unwrap(); assert_eq!(buffer.text(), "1abcdef234"); - buffer.undo_or_redo(transactions[1].clone()).unwrap(); + buffer.undo_or_redo(entries[1].transaction.clone()).unwrap(); assert_eq!(buffer.text(), "1abcdx234"); - buffer.undo_or_redo(transactions[2].clone()).unwrap(); + buffer.undo_or_redo(entries[2].transaction.clone()).unwrap(); assert_eq!(buffer.text(), "1abx234"); - buffer.undo_or_redo(transactions[1].clone()).unwrap(); + buffer.undo_or_redo(entries[1].transaction.clone()).unwrap(); assert_eq!(buffer.text(), "1abyzef234"); - buffer.undo_or_redo(transactions[2].clone()).unwrap(); + buffer.undo_or_redo(entries[2].transaction.clone()).unwrap(); assert_eq!(buffer.text(), "1abcdef234"); - buffer.undo_or_redo(transactions[2].clone()).unwrap(); + buffer.undo_or_redo(entries[2].transaction.clone()).unwrap(); assert_eq!(buffer.text(), "1abyzef234"); - buffer.undo_or_redo(transactions[0].clone()).unwrap(); + buffer.undo_or_redo(entries[0].transaction.clone()).unwrap(); assert_eq!(buffer.text(), "1yzef234"); - buffer.undo_or_redo(transactions[1].clone()).unwrap(); + buffer.undo_or_redo(entries[1].transaction.clone()).unwrap(); assert_eq!(buffer.text(), "1234"); } @@ -502,7 +502,7 @@ fn test_history() { } #[test] -fn test_avoid_grouping_next_transaction() { +fn test_finalize_last_transaction() { let now = Instant::now(); let mut buffer = Buffer::new(0, 0, History::new("123456".into())); @@ -511,7 +511,7 @@ fn test_avoid_grouping_next_transaction() { buffer.end_transaction_at(now); assert_eq!(buffer.text(), "12cd56"); - buffer.avoid_grouping_next_transaction(); + buffer.finalize_last_transaction(); buffer.start_transaction_at(now); buffer.edit(vec![4..5], "e"); buffer.end_transaction_at(now).unwrap(); @@ -536,6 +536,44 @@ fn test_avoid_grouping_next_transaction() { assert_eq!(buffer.text(), "ab2cde6"); } +#[test] +fn test_edited_ranges_for_transaction() { + let now = Instant::now(); + let mut buffer = Buffer::new(0, 0, History::new("1234567".into())); + + buffer.start_transaction_at(now); + buffer.edit(vec![2..4], "cd"); + buffer.edit(vec![6..6], "efg"); + buffer.end_transaction_at(now); + assert_eq!(buffer.text(), "12cd56efg7"); + + let tx = buffer.finalize_last_transaction().unwrap().clone(); + assert_eq!( + buffer + .edited_ranges_for_transaction::(&tx) + .collect::>(), + [2..4, 6..9] + ); + + buffer.edit(vec![5..5], "hijk"); + assert_eq!(buffer.text(), "12cd5hijk6efg7"); + assert_eq!( + buffer + .edited_ranges_for_transaction::(&tx) + .collect::>(), + [2..4, 10..13] + ); + + buffer.edit(vec![4..4], "l"); + assert_eq!(buffer.text(), "12cdl5hijk6efg7"); + assert_eq!( + buffer + .edited_ranges_for_transaction::(&tx) + .collect::>(), + [2..4, 11..14] + ); +} + #[test] fn test_concurrent_edits() { let text = "abcdef"; @@ -551,12 +589,12 @@ fn test_concurrent_edits() { let buf3_op = buffer3.edit(vec![5..6], "56"); assert_eq!(buffer3.text(), "abcde56"); - buffer1.apply_op(Operation::Edit(buf2_op.clone())).unwrap(); - buffer1.apply_op(Operation::Edit(buf3_op.clone())).unwrap(); - buffer2.apply_op(Operation::Edit(buf1_op.clone())).unwrap(); - buffer2.apply_op(Operation::Edit(buf3_op.clone())).unwrap(); - buffer3.apply_op(Operation::Edit(buf1_op.clone())).unwrap(); - buffer3.apply_op(Operation::Edit(buf2_op.clone())).unwrap(); + buffer1.apply_op(buf2_op.clone()).unwrap(); + buffer1.apply_op(buf3_op.clone()).unwrap(); + buffer2.apply_op(buf1_op.clone()).unwrap(); + buffer2.apply_op(buf3_op.clone()).unwrap(); + buffer3.apply_op(buf1_op.clone()).unwrap(); + buffer3.apply_op(buf2_op.clone()).unwrap(); assert_eq!(buffer1.text(), "a12c34e56"); assert_eq!(buffer2.text(), "a12c34e56"); diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 088cd51cbe0b54e70f32ccc9f3bad94c6c3fefca..9b7f8dd230e0210b372c0b2ad9f0faf1aef1f004 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -40,7 +40,7 @@ pub use subscription::*; pub use sum_tree::Bias; use sum_tree::{FilterCursor, SumTree}; -pub type TransactionId = usize; +pub type TransactionId = clock::Local; pub struct Buffer { snapshot: BufferSnapshot, @@ -67,28 +67,37 @@ pub struct BufferSnapshot { } #[derive(Clone, Debug)] -pub struct Transaction { - id: TransactionId, - start: clock::Global, - end: clock::Global, - edits: Vec, - ranges: Vec>, +pub struct HistoryEntry { + transaction: Transaction, first_edit_at: Instant, last_edit_at: Instant, suppress_grouping: bool, } -impl Transaction { +#[derive(Clone, Debug)] +pub struct Transaction { + pub id: TransactionId, + pub edit_ids: Vec, + pub start: clock::Global, + pub end: clock::Global, + pub ranges: Vec>, +} + +impl HistoryEntry { + pub fn transaction_id(&self) -> TransactionId { + self.transaction.id + } + fn push_edit(&mut self, edit: &EditOperation) { - self.edits.push(edit.timestamp.local()); - self.end.observe(edit.timestamp.local()); + self.transaction.edit_ids.push(edit.timestamp.local()); + self.transaction.end.observe(edit.timestamp.local()); let mut other_ranges = edit.ranges.iter().peekable(); let mut new_ranges = Vec::new(); let insertion_len = edit.new_text.as_ref().map_or(0, |t| t.len()); let mut delta = 0; - for mut self_range in self.ranges.iter().cloned() { + for mut self_range in self.transaction.ranges.iter().cloned() { self_range.start += delta; self_range.end += delta; @@ -122,7 +131,7 @@ impl Transaction { delta += insertion_len; } - self.ranges = new_ranges; + self.transaction.ranges = new_ranges; } } @@ -130,42 +139,46 @@ impl Transaction { pub struct History { // TODO: Turn this into a String or Rope, maybe. pub base_text: Arc, - ops: HashMap, - undo_stack: Vec, - redo_stack: Vec, + operations: HashMap, + undo_stack: Vec, + redo_stack: Vec, transaction_depth: usize, group_interval: Duration, - next_transaction_id: TransactionId, } impl History { pub fn new(base_text: Arc) -> Self { Self { base_text, - ops: Default::default(), + operations: Default::default(), undo_stack: Vec::new(), redo_stack: Vec::new(), transaction_depth: 0, group_interval: Duration::from_millis(300), - next_transaction_id: 0, } } - fn push(&mut self, op: EditOperation) { - self.ops.insert(op.timestamp.local(), op); + fn push(&mut self, op: Operation) { + self.operations.insert(op.local_timestamp(), op); } - fn start_transaction(&mut self, start: clock::Global, now: Instant) -> Option { + fn start_transaction( + &mut self, + start: clock::Global, + now: Instant, + local_clock: &mut clock::Local, + ) -> Option { self.transaction_depth += 1; if self.transaction_depth == 1 { - let id = self.next_transaction_id; - self.next_transaction_id += 1; - self.undo_stack.push(Transaction { - id, - start: start.clone(), - end: start, - edits: Vec::new(), - ranges: Vec::new(), + let id = local_clock.tick(); + self.undo_stack.push(HistoryEntry { + transaction: Transaction { + id, + start: start.clone(), + end: start, + edit_ids: Default::default(), + ranges: Default::default(), + }, first_edit_at: now, last_edit_at: now, suppress_grouping: false, @@ -176,17 +189,24 @@ impl History { } } - fn end_transaction(&mut self, now: Instant) -> Option<&Transaction> { + fn end_transaction(&mut self, now: Instant) -> Option<&HistoryEntry> { assert_ne!(self.transaction_depth, 0); self.transaction_depth -= 1; if self.transaction_depth == 0 { - if self.undo_stack.last().unwrap().ranges.is_empty() { + if self + .undo_stack + .last() + .unwrap() + .transaction + .ranges + .is_empty() + { self.undo_stack.pop(); None } else { - let transaction = self.undo_stack.last_mut().unwrap(); - transaction.last_edit_at = now; - Some(transaction) + let entry = self.undo_stack.last_mut().unwrap(); + entry.last_edit_at = now; + Some(entry) } } else { None @@ -195,16 +215,15 @@ impl History { fn group(&mut self) -> Option { let mut new_len = self.undo_stack.len(); - let mut transactions = self.undo_stack.iter_mut(); - - if let Some(mut transaction) = transactions.next_back() { - while let Some(prev_transaction) = transactions.next_back() { - if !prev_transaction.suppress_grouping - && transaction.first_edit_at - prev_transaction.last_edit_at - <= self.group_interval - && transaction.start == prev_transaction.end + let mut entries = self.undo_stack.iter_mut(); + + if let Some(mut entry) = entries.next_back() { + while let Some(prev_entry) = entries.next_back() { + if !prev_entry.suppress_grouping + && entry.first_edit_at - prev_entry.last_edit_at <= self.group_interval + && entry.transaction.start == prev_entry.transaction.end { - transaction = prev_transaction; + entry = prev_entry; new_len -= 1; } else { break; @@ -212,101 +231,114 @@ impl History { } } - let (transactions_to_keep, transactions_to_merge) = self.undo_stack.split_at_mut(new_len); - if let Some(last_transaction) = transactions_to_keep.last_mut() { - for transaction in &*transactions_to_merge { - for edit_id in &transaction.edits { - last_transaction.push_edit(&self.ops[edit_id]); + let (entries_to_keep, entries_to_merge) = self.undo_stack.split_at_mut(new_len); + if let Some(last_entry) = entries_to_keep.last_mut() { + for entry in &*entries_to_merge { + for edit_id in &entry.transaction.edit_ids { + last_entry.push_edit(self.operations[edit_id].as_edit().unwrap()); } } - if let Some(transaction) = transactions_to_merge.last_mut() { - last_transaction.last_edit_at = transaction.last_edit_at; - last_transaction.end = transaction.end.clone(); + if let Some(entry) = entries_to_merge.last_mut() { + last_entry.last_edit_at = entry.last_edit_at; + last_entry.transaction.end = entry.transaction.end.clone(); } } self.undo_stack.truncate(new_len); - self.undo_stack.last().map(|t| t.id) + self.undo_stack.last().map(|e| e.transaction.id) } - fn avoid_grouping_next_transaction(&mut self) { - if let Some(transaction) = self.undo_stack.last_mut() { - transaction.suppress_grouping = true; - } + fn finalize_last_transaction(&mut self) -> Option<&Transaction> { + self.undo_stack.last_mut().map(|entry| { + entry.suppress_grouping = true; + &entry.transaction + }) } - fn push_transaction(&mut self, edit_ids: impl IntoIterator, now: Instant) { + fn push_transaction(&mut self, transaction: Transaction, now: Instant) { assert_eq!(self.transaction_depth, 0); - let mut edit_ids = edit_ids.into_iter().peekable(); - - if let Some(first_edit_id) = edit_ids.peek() { - let version = self.ops[first_edit_id].version.clone(); - self.start_transaction(version, now); - for edit_id in edit_ids { - self.push_undo(edit_id); - } - self.end_transaction(now); - } + self.undo_stack.push(HistoryEntry { + transaction, + first_edit_at: now, + last_edit_at: now, + suppress_grouping: false, + }); } - fn push_undo(&mut self, edit_id: clock::Local) { + fn push_undo(&mut self, op_id: clock::Local) { assert_ne!(self.transaction_depth, 0); - let last_transaction = self.undo_stack.last_mut().unwrap(); - last_transaction.push_edit(&self.ops[&edit_id]); + if let Some(Operation::Edit(edit)) = self.operations.get(&op_id) { + let last_transaction = self.undo_stack.last_mut().unwrap(); + last_transaction.push_edit(&edit); + } } - fn pop_undo(&mut self) -> Option<&Transaction> { + fn pop_undo(&mut self) -> Option<&HistoryEntry> { assert_eq!(self.transaction_depth, 0); - if let Some(transaction) = self.undo_stack.pop() { - self.redo_stack.push(transaction); + if let Some(entry) = self.undo_stack.pop() { + self.redo_stack.push(entry); self.redo_stack.last() } else { None } } - fn remove_from_undo(&mut self, transaction_id: TransactionId) -> Option<&Transaction> { + fn remove_from_undo(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] { assert_eq!(self.transaction_depth, 0); - if let Some(transaction_ix) = self.undo_stack.iter().rposition(|t| t.id == transaction_id) { - let transaction = self.undo_stack.remove(transaction_ix); - self.redo_stack.push(transaction); - self.redo_stack.last() - } else { - None + + let redo_stack_start_len = self.redo_stack.len(); + if let Some(entry_ix) = self + .undo_stack + .iter() + .rposition(|entry| entry.transaction.id == transaction_id) + { + self.redo_stack + .extend(self.undo_stack.drain(entry_ix..).rev()); } + &self.redo_stack[redo_stack_start_len..] } fn forget(&mut self, transaction_id: TransactionId) { assert_eq!(self.transaction_depth, 0); - if let Some(transaction_ix) = self.undo_stack.iter().rposition(|t| t.id == transaction_id) { - self.undo_stack.remove(transaction_ix); - } else if let Some(transaction_ix) = - self.redo_stack.iter().rposition(|t| t.id == transaction_id) + if let Some(entry_ix) = self + .undo_stack + .iter() + .rposition(|entry| entry.transaction.id == transaction_id) + { + self.undo_stack.remove(entry_ix); + } else if let Some(entry_ix) = self + .redo_stack + .iter() + .rposition(|entry| entry.transaction.id == transaction_id) { - self.undo_stack.remove(transaction_ix); + self.undo_stack.remove(entry_ix); } } - fn pop_redo(&mut self) -> Option<&Transaction> { + fn pop_redo(&mut self) -> Option<&HistoryEntry> { assert_eq!(self.transaction_depth, 0); - if let Some(transaction) = self.redo_stack.pop() { - self.undo_stack.push(transaction); + if let Some(entry) = self.redo_stack.pop() { + self.undo_stack.push(entry); self.undo_stack.last() } else { None } } - fn remove_from_redo(&mut self, transaction_id: TransactionId) -> Option<&Transaction> { + fn remove_from_redo(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] { assert_eq!(self.transaction_depth, 0); - if let Some(transaction_ix) = self.redo_stack.iter().rposition(|t| t.id == transaction_id) { - let transaction = self.redo_stack.remove(transaction_ix); - self.undo_stack.push(transaction); - self.undo_stack.last() - } else { - None + + let undo_stack_start_len = self.undo_stack.len(); + if let Some(entry_ix) = self + .redo_stack + .iter() + .rposition(|entry| entry.transaction.id == transaction_id) + { + self.undo_stack + .extend(self.redo_stack.drain(entry_ix..).rev()); } + &self.undo_stack[undo_stack_start_len..] } } @@ -545,57 +577,6 @@ impl Buffer { } } - pub fn from_parts( - replica_id: u16, - remote_id: u64, - visible_text: &str, - deleted_text: &str, - undo_map: impl Iterator)>, - fragments: impl ExactSizeIterator, - lamport_timestamp: u32, - version: clock::Global, - ) -> Self { - let visible_text = visible_text.into(); - let deleted_text = deleted_text.into(); - let fragments = SumTree::from_iter(fragments, &None); - let mut insertions = fragments - .iter() - .map(|fragment| InsertionFragment { - timestamp: fragment.insertion_timestamp.local(), - split_offset: fragment.insertion_offset, - fragment_id: fragment.id.clone(), - }) - .collect::>(); - insertions.sort_unstable_by_key(|i| (i.timestamp, i.split_offset)); - Self { - remote_id, - replica_id, - - history: History::new("".into()), - deferred_ops: OperationQueue::new(), - deferred_replicas: Default::default(), - local_clock: clock::Local { - replica_id, - value: version.get(replica_id) + 1, - }, - lamport_clock: clock::Lamport { - replica_id, - value: lamport_timestamp, - }, - subscriptions: Default::default(), - edit_id_resolvers: Default::default(), - snapshot: BufferSnapshot { - replica_id, - visible_text, - deleted_text, - undo_map: UndoMap(undo_map.collect()), - fragments, - insertions: SumTree::from_iter(insertions, &()), - version, - }, - } - } - pub fn version(&self) -> clock::Global { self.version.clone() } @@ -620,7 +601,7 @@ impl Buffer { self.history.group_interval } - pub fn edit(&mut self, ranges: R, new_text: T) -> EditOperation + pub fn edit(&mut self, ranges: R, new_text: T) -> Operation where R: IntoIterator, I: ExactSizeIterator>, @@ -641,13 +622,14 @@ impl Buffer { local: self.local_clock.tick().value, lamport: self.lamport_clock.tick().value, }; - let edit = self.apply_local_edit(ranges.into_iter(), new_text, timestamp); + let operation = + Operation::Edit(self.apply_local_edit(ranges.into_iter(), new_text, timestamp)); - self.history.push(edit.clone()); - self.history.push_undo(edit.timestamp.local()); - self.snapshot.version.observe(edit.timestamp.local()); + self.history.push(operation.clone()); + self.history.push_undo(operation.local_timestamp()); + self.snapshot.version.observe(operation.local_timestamp()); self.end_transaction(); - edit + operation } fn apply_local_edit( @@ -815,6 +797,7 @@ impl Buffer { pub fn apply_ops>(&mut self, ops: I) -> Result<()> { let mut deferred_ops = Vec::new(); for op in ops { + self.history.push(op.clone()); if self.can_apply_op(&op) { self.apply_op(op)?; } else { @@ -839,7 +822,6 @@ impl Buffer { ); self.snapshot.version.observe(edit.timestamp.local()); self.resolve_edit(edit.timestamp.local()); - self.history.push(edit); } } Operation::Undo { @@ -1142,10 +1124,6 @@ impl Buffer { Ok(()) } - pub fn deferred_ops(&self) -> impl Iterator { - self.deferred_ops.iter() - } - fn flush_deferred_ops(&mut self) -> Result<()> { self.deferred_replicas.clear(); let mut deferred_ops = Vec::new(); @@ -1172,16 +1150,21 @@ impl Buffer { } } - pub fn peek_undo_stack(&self) -> Option<&Transaction> { + pub fn peek_undo_stack(&self) -> Option<&HistoryEntry> { self.history.undo_stack.last() } + pub fn peek_redo_stack(&self) -> Option<&HistoryEntry> { + self.history.redo_stack.last() + } + pub fn start_transaction(&mut self) -> Option { self.start_transaction_at(Instant::now()) } pub fn start_transaction_at(&mut self, now: Instant) -> Option { - self.history.start_transaction(self.version.clone(), now) + self.history + .start_transaction(self.version.clone(), now, &mut self.local_clock) } pub fn end_transaction(&mut self) -> Option<(TransactionId, clock::Global)> { @@ -1189,8 +1172,8 @@ impl Buffer { } pub fn end_transaction_at(&mut self, now: Instant) -> Option<(TransactionId, clock::Global)> { - if let Some(transaction) = self.history.end_transaction(now) { - let since = transaction.start.clone(); + if let Some(entry) = self.history.end_transaction(now) { + let since = entry.transaction.start.clone(); let id = self.history.group().unwrap(); Some((id, since)) } else { @@ -1198,16 +1181,16 @@ impl Buffer { } } - pub fn avoid_grouping_next_transaction(&mut self) { - self.history.avoid_grouping_next_transaction() + pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> { + self.history.finalize_last_transaction() } pub fn base_text(&self) -> &Arc { &self.history.base_text } - pub fn history(&self) -> impl Iterator { - self.history.ops.values() + pub fn history(&self) -> impl Iterator { + self.history.operations.values() } pub fn undo_history(&self) -> impl Iterator { @@ -1218,7 +1201,8 @@ impl Buffer { } pub fn undo(&mut self) -> Option<(TransactionId, Operation)> { - if let Some(transaction) = self.history.pop_undo().cloned() { + if let Some(entry) = self.history.pop_undo() { + let transaction = entry.transaction.clone(); let transaction_id = transaction.id; let op = self.undo_or_redo(transaction).unwrap(); Some((transaction_id, op)) @@ -1227,13 +1211,18 @@ impl Buffer { } } - pub fn undo_transaction(&mut self, transaction_id: TransactionId) -> Option { - if let Some(transaction) = self.history.remove_from_undo(transaction_id).cloned() { - let op = self.undo_or_redo(transaction).unwrap(); - Some(op) - } else { - None - } + pub fn undo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec { + let transactions = self + .history + .remove_from_undo(transaction_id) + .iter() + .map(|entry| entry.transaction.clone()) + .collect::>(); + + transactions + .into_iter() + .map(|transaction| self.undo_or_redo(transaction).unwrap()) + .collect() } pub fn forget_transaction(&mut self, transaction_id: TransactionId) { @@ -1241,7 +1230,8 @@ impl Buffer { } pub fn redo(&mut self) -> Option<(TransactionId, Operation)> { - if let Some(transaction) = self.history.pop_redo().cloned() { + if let Some(entry) = self.history.pop_redo() { + let transaction = entry.transaction.clone(); let transaction_id = transaction.id; let op = self.undo_or_redo(transaction).unwrap(); Some((transaction_id, op)) @@ -1250,18 +1240,23 @@ impl Buffer { } } - pub fn redo_transaction(&mut self, transaction_id: TransactionId) -> Option { - if let Some(transaction) = self.history.remove_from_redo(transaction_id).cloned() { - let op = self.undo_or_redo(transaction).unwrap(); - Some(op) - } else { - None - } + pub fn redo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec { + let transactions = self + .history + .remove_from_redo(transaction_id) + .iter() + .map(|entry| entry.transaction.clone()) + .collect::>(); + + transactions + .into_iter() + .map(|transaction| self.undo_or_redo(transaction).unwrap()) + .collect() } fn undo_or_redo(&mut self, transaction: Transaction) -> Result { let mut counts = HashMap::default(); - for edit_id in transaction.edits { + for edit_id in transaction.edit_ids { counts.insert(edit_id, self.undo_map.undo_count(edit_id) + 1); } @@ -1272,20 +1267,18 @@ impl Buffer { version: transaction.start.clone(), }; self.apply_undo(&undo)?; - self.snapshot.version.observe(undo.id); - - Ok(Operation::Undo { + let operation = Operation::Undo { undo, lamport_timestamp: self.lamport_clock.tick(), - }) + }; + self.snapshot.version.observe(operation.local_timestamp()); + self.history.push(operation.clone()); + Ok(operation) } - pub fn push_transaction( - &mut self, - edit_ids: impl IntoIterator, - now: Instant, - ) { - self.history.push_transaction(edit_ids, now); + pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) { + self.history.push_transaction(transaction, now); + self.history.finalize_last_transaction(); } pub fn subscribe(&mut self) -> Subscription { @@ -1294,13 +1287,13 @@ impl Buffer { pub fn wait_for_edits( &mut self, - edit_ids: &[clock::Local], + edit_ids: impl IntoIterator, ) -> impl 'static + Future { let mut futures = Vec::new(); for edit_id in edit_ids { - if !self.version.observed(*edit_id) { + if !self.version.observed(edit_id) { let (tx, rx) = oneshot::channel(); - self.edit_id_resolvers.entry(*edit_id).or_default().push(tx); + self.edit_id_resolvers.entry(edit_id).or_default().push(tx); futures.push(rx); } } @@ -1404,7 +1397,7 @@ impl Buffer { new_text ); let op = self.edit(old_ranges.iter().cloned(), new_text.as_str()); - (old_ranges, new_text, Operation::Edit(op)) + (old_ranges, new_text, op) } pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng) -> Vec { @@ -1412,7 +1405,8 @@ impl Buffer { let mut ops = Vec::new(); for _ in 0..rng.gen_range(1..=5) { - if let Some(transaction) = self.history.undo_stack.choose(rng).cloned() { + if let Some(entry) = self.history.undo_stack.choose(rng) { + let transaction = entry.transaction.clone(); log::info!( "undoing buffer {} transaction {:?}", self.replica_id, @@ -1512,6 +1506,10 @@ impl BufferSnapshot { self.visible_text.offset_to_point_utf16(offset) } + pub fn point_to_point_utf16(&self, point: Point) -> PointUtf16 { + self.visible_text.point_to_point_utf16(point) + } + pub fn version(&self) -> &clock::Global { &self.version } @@ -1748,14 +1746,6 @@ impl BufferSnapshot { self.visible_text.clip_point_utf16(point, bias) } - // pub fn point_for_offset(&self, offset: usize) -> Result { - // if offset <= self.len() { - // Ok(self.text_summary_for_range(0..offset)) - // } else { - // Err(anyhow!("offset out of bounds")) - // } - // } - pub fn edits_since<'a, D>( &'a self, since: &'a clock::Global, @@ -1766,6 +1756,42 @@ impl BufferSnapshot { self.edits_since_in_range(since, Anchor::min()..Anchor::max()) } + pub fn edited_ranges_for_transaction<'a, D>( + &'a self, + transaction: &'a Transaction, + ) -> impl 'a + Iterator> + where + D: TextDimension, + { + let mut cursor = self.fragments.cursor::<(VersionedFullOffset, usize)>(); + let mut rope_cursor = self.visible_text.cursor(0); + let cx = Some(transaction.end.clone()); + let mut position = D::default(); + transaction.ranges.iter().map(move |range| { + cursor.seek_forward(&VersionedFullOffset::Offset(range.start), Bias::Right, &cx); + let mut start_offset = cursor.start().1; + if cursor + .item() + .map_or(false, |fragment| fragment.is_visible(&self.undo_map)) + { + start_offset += range.start - cursor.start().0.full_offset() + } + position.add_assign(&rope_cursor.summary(start_offset)); + let start = position.clone(); + + cursor.seek_forward(&VersionedFullOffset::Offset(range.end), Bias::Left, &cx); + let mut end_offset = cursor.start().1; + if cursor + .item() + .map_or(false, |fragment| fragment.is_visible(&self.undo_map)) + { + end_offset += range.end - cursor.start().0.full_offset(); + } + position.add_assign(&rope_cursor.summary(end_offset)); + start..position.clone() + }) + } + pub fn edits_since_in_range<'a, D>( &'a self, since: &'a clock::Global, @@ -2178,6 +2204,20 @@ impl Operation { operation_queue::Operation::lamport_timestamp(self).replica_id } + pub fn local_timestamp(&self) -> clock::Local { + match self { + Operation::Edit(edit) => edit.timestamp.local(), + Operation::Undo { undo, .. } => undo.id, + } + } + + pub fn as_edit(&self) -> Option<&EditOperation> { + match self { + Operation::Edit(edit) => Some(edit), + _ => None, + } + } + pub fn is_edit(&self) -> bool { match self { Operation::Edit { .. } => true, @@ -2260,6 +2300,34 @@ impl ToPoint for Point { } } +pub trait ToPointUtf16 { + fn to_point_utf16<'a>(&self, snapshot: &BufferSnapshot) -> PointUtf16; +} + +impl ToPointUtf16 for Anchor { + fn to_point_utf16<'a>(&self, snapshot: &BufferSnapshot) -> PointUtf16 { + snapshot.summary_for_anchor(self) + } +} + +impl ToPointUtf16 for usize { + fn to_point_utf16<'a>(&self, snapshot: &BufferSnapshot) -> PointUtf16 { + snapshot.offset_to_point_utf16(*self) + } +} + +impl ToPointUtf16 for PointUtf16 { + fn to_point_utf16<'a>(&self, _: &BufferSnapshot) -> PointUtf16 { + *self + } +} + +impl ToPointUtf16 for Point { + fn to_point_utf16<'a>(&self, snapshot: &BufferSnapshot) -> PointUtf16 { + snapshot.point_to_point_utf16(*self) + } +} + pub trait Clip { fn clip(&self, bias: Bias, snapshot: &BufferSnapshot) -> Self; } diff --git a/crates/theme/src/theme.rs b/crates/theme/src/theme.rs index 339cabbbf3241516685f34899ca7f2cd857d6220..db078cd862e5f055550971ccbad0aa169dc244e9 100644 --- a/crates/theme/src/theme.rs +++ b/crates/theme/src/theme.rs @@ -293,6 +293,7 @@ pub struct EditorStyle { pub hint_diagnostic: DiagnosticStyle, pub invalid_hint_diagnostic: DiagnosticStyle, pub autocomplete: AutocompleteStyle, + pub code_actions_indicator: Color, } #[derive(Clone, Deserialize, Default)] @@ -420,6 +421,7 @@ impl InputEditorStyle { hint_diagnostic: default_diagnostic_style.clone(), invalid_hint_diagnostic: default_diagnostic_style.clone(), autocomplete: Default::default(), + code_actions_indicator: Default::default(), } } } diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index 05b7b7e19b433b2db654607a85e53de97b29badb..8c7c001e33b25877b70e7121aa87fe5512d792e1 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -221,7 +221,7 @@ impl Pane { let task = workspace.load_path(project_path, cx); cx.spawn(|workspace, mut cx| async move { let item = task.await; - if let Some(pane) = cx.read(|cx| pane.upgrade(cx)) { + if let Some(pane) = pane.upgrade(&cx) { if let Some(item) = item.log_err() { workspace.update(&mut cx, |workspace, cx| { pane.update(cx, |p, _| p.nav_history.borrow_mut().set_mode(mode)); @@ -279,7 +279,7 @@ impl Pane { item_view.added_to_pane(cx); let item_idx = cmp::min(self.active_item_index + 1, self.item_views.len()); self.item_views - .insert(item_idx, (item_view.item_handle(cx).id(), item_view)); + .insert(item_idx, (item_view.item_id(cx), item_view)); self.activate_item(item_idx, cx); cx.notify(); } diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 86d5271224d80e114eb4885f5f886d597e6c5133..2203e8cbf7d879b2ab3a4ea530a7687586ee6aec 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -150,11 +150,9 @@ pub trait Item: Entity + Sized { } pub trait ItemView: View { - type ItemHandle: ItemHandle; - fn deactivated(&mut self, _: &mut ViewContext) {} fn navigate(&mut self, _: Box, _: &mut ViewContext) {} - fn item_handle(&self, cx: &AppContext) -> Self::ItemHandle; + fn item_id(&self, cx: &AppContext) -> usize; fn tab_content(&self, style: &theme::Tab, cx: &AppContext) -> ElementBox; fn project_path(&self, cx: &AppContext) -> Option; fn clone_on_split(&self, _: &mut ViewContext) -> Option @@ -170,7 +168,11 @@ pub trait ItemView: View { false } fn can_save(&self, cx: &AppContext) -> bool; - fn save(&mut self, cx: &mut ViewContext) -> Task>; + fn save( + &mut self, + project: ModelHandle, + cx: &mut ViewContext, + ) -> Task>; fn can_save_as(&self, cx: &AppContext) -> bool; fn save_as( &mut self, @@ -222,7 +224,7 @@ pub trait WeakItemHandle { } pub trait ItemViewHandle: 'static { - fn item_handle(&self, cx: &AppContext) -> Box; + fn item_id(&self, cx: &AppContext) -> usize; fn tab_content(&self, style: &theme::Tab, cx: &AppContext) -> ElementBox; fn project_path(&self, cx: &AppContext) -> Option; fn boxed_clone(&self) -> Box; @@ -236,7 +238,7 @@ pub trait ItemViewHandle: 'static { fn has_conflict(&self, cx: &AppContext) -> bool; fn can_save(&self, cx: &AppContext) -> bool; fn can_save_as(&self, cx: &AppContext) -> bool; - fn save(&self, cx: &mut MutableAppContext) -> Task>; + fn save(&self, project: ModelHandle, cx: &mut MutableAppContext) -> Task>; fn save_as( &self, project: ModelHandle, @@ -324,7 +326,7 @@ impl WeakItemHandle for WeakModelHandle { } fn upgrade(&self, cx: &AppContext) -> Option> { - WeakModelHandle::::upgrade(*self, cx).map(|i| Box::new(i) as Box) + WeakModelHandle::::upgrade(self, cx).map(|i| Box::new(i) as Box) } } @@ -354,8 +356,8 @@ impl dyn ItemViewHandle { } impl ItemViewHandle for ViewHandle { - fn item_handle(&self, cx: &AppContext) -> Box { - Box::new(self.read(cx).item_handle(cx)) + fn item_id(&self, cx: &AppContext) -> usize { + self.read(cx).item_id(cx) } fn tab_content(&self, style: &theme::Tab, cx: &AppContext) -> ElementBox { @@ -404,8 +406,8 @@ impl ItemViewHandle for ViewHandle { self.update(cx, |this, cx| this.navigate(data, cx)); } - fn save(&self, cx: &mut MutableAppContext) -> Task> { - self.update(cx, |item, cx| item.save(cx)) + fn save(&self, project: ModelHandle, cx: &mut MutableAppContext) -> Task> { + self.update(cx, |item, cx| item.save(project, cx)) } fn save_as( @@ -589,7 +591,7 @@ impl Workspace { while stream.recv().await.is_some() { cx.update(|cx| { - if let Some(this) = this.upgrade(&cx) { + if let Some(this) = this.upgrade(cx) { this.update(cx, |_, cx| cx.notify()); } }) @@ -772,7 +774,7 @@ impl Workspace { let item = load_task.await?; this.update(&mut cx, |this, cx| { let pane = pane - .upgrade(&cx) + .upgrade(cx) .ok_or_else(|| anyhow!("could not upgrade pane reference"))?; Ok(this.open_item_in_pane(item, &pane, cx)) }) @@ -822,6 +824,7 @@ impl Workspace { } pub fn save_active_item(&mut self, cx: &mut ViewContext) -> Task> { + let project = self.project.clone(); if let Some(item) = self.active_item(cx) { if item.can_save(cx) { if item.has_conflict(cx.as_ref()) { @@ -835,12 +838,12 @@ impl Workspace { cx.spawn(|_, mut cx| async move { let answer = answer.recv().await; if answer == Some(0) { - cx.update(|cx| item.save(cx)).await?; + cx.update(|cx| item.save(project, cx)).await?; } Ok(()) }) } else { - item.save(cx) + item.save(project, cx) } } else if item.can_save_as(cx) { let worktree = self.worktrees(cx).next(); @@ -849,9 +852,8 @@ impl Workspace { .map_or(Path::new(""), |w| w.abs_path()) .to_path_buf(); let mut abs_path = cx.prompt_for_new_path(&start_abs_path); - cx.spawn(|this, mut cx| async move { + cx.spawn(|_, mut cx| async move { if let Some(abs_path) = abs_path.recv().await.flatten() { - let project = this.read_with(&cx, |this, _| this.project().clone()); cx.update(|cx| item.save_as(project, abs_path, cx)).await?; } Ok(()) diff --git a/crates/zed/assets/icons/zap.svg b/crates/zed/assets/icons/zap.svg new file mode 100644 index 0000000000000000000000000000000000000000..8d517dcb53db7159e8a7133b19ce5ee0f8f293b5 --- /dev/null +++ b/crates/zed/assets/icons/zap.svg @@ -0,0 +1,3 @@ + + + diff --git a/crates/zed/assets/themes/_base.toml b/crates/zed/assets/themes/_base.toml index 2f3541dc1b3b0943034dab8dcca00100a07ad989..389f4571070cc3dd2274fd1cfc9ed5f30f75699d 100644 --- a/crates/zed/assets/themes/_base.toml +++ b/crates/zed/assets/themes/_base.toml @@ -253,6 +253,7 @@ line_number_active = "$text.0.color" selection = "$selection.host" guest_selections = "$selection.guests" error_color = "$status.bad" +code_actions_indicator = "$text.3.color" [editor.diagnostic_path_header] background = "$state.active_line" diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index d51376afb01a2824f634bb95fcdf4bf6bb4c03ba..791c236ec686082daa6fdc635b7221652c921084 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -126,7 +126,7 @@ mod tests { use super::*; use editor::{DisplayPoint, Editor}; use gpui::{MutableAppContext, TestAppContext, ViewHandle}; - use project::ProjectPath; + use project::{Fs, ProjectPath}; use serde_json::json; use std::{ collections::HashSet, @@ -817,7 +817,10 @@ mod tests { .active_pane() .update(cx, |pane, cx| pane.close_item(editor2.id(), cx)); drop(editor2); - app_state.fs.as_fake().remove(Path::new("/root/a/file2")) + app_state + .fs + .as_fake() + .remove_file(Path::new("/root/a/file2"), Default::default()) }) .await .unwrap(); diff --git a/script/drop-test-dbs b/script/drop-test-dbs new file mode 100755 index 0000000000000000000000000000000000000000..72d6ff5f79a0d7375f38dfd84f400efba9253805 --- /dev/null +++ b/script/drop-test-dbs @@ -0,0 +1,16 @@ +#!/bin/bash + +databases=$(psql --tuples-only --command " + SELECT + datname + FROM + pg_database + WHERE + datistemplate = false + AND datname like 'zed-test-%' +") + +for database in $databases; do + echo $database + dropdb $database +done \ No newline at end of file