From 811696670a98a0e3b61e34556c55a458621bb541 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Sat, 4 Dec 2021 06:57:56 -0700 Subject: [PATCH 001/196] Start on a new FragmentList Here I'm exploring a new approach to the project-wide diagnostics view that can exactly mirror the contents of cargo check. The `FragmentList` composes an arbitrary list of fragments from other buffers and presents them as if they were a single buffer. --- Cargo.lock | 1 + crates/editor/Cargo.toml | 2 + crates/editor/src/display_map.rs | 3 +- crates/editor/src/display_map/fold_map.rs | 13 +- crates/editor/src/editor.rs | 10 +- crates/editor/src/element.rs | 8 +- crates/editor/src/test.rs | 13 - crates/language/Cargo.toml | 3 + crates/language/src/fragment_list.rs | 291 ++++++++++++++++++++++ crates/language/src/language.rs | 1 + crates/text/src/text.rs | 2 +- crates/util/src/test.rs | 13 + 12 files changed, 329 insertions(+), 31 deletions(-) create mode 100644 crates/language/src/fragment_list.rs diff --git a/Cargo.lock b/Cargo.lock index f6227543798b09ad0afa62b93631e863fb7cdb2e..686157fb854e8ff1c94f19eba78730c807373c2b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2587,6 +2587,7 @@ dependencies = [ "serde", "similar", "smol", + "sum_tree", "text", "theme", "tree-sitter", diff --git a/crates/editor/Cargo.toml b/crates/editor/Cargo.toml index ed0d1b74133945a798bacb1f3b32c64361fec240..46250a55dfab05dca36fe00a20bd925d728f5eb9 100644 --- a/crates/editor/Cargo.toml +++ b/crates/editor/Cargo.toml @@ -11,6 +11,7 @@ test-support = [ "text/test-support", "language/test-support", "gpui/test-support", + "util/test-support", ] [dependencies] @@ -37,6 +38,7 @@ smol = "1.2" text = { path = "../text", features = ["test-support"] } language = { path = "../language", features = ["test-support"] } gpui = { path = "../gpui", features = ["test-support"] } +util = { path = "../util", features = ["test-support"] } ctor = "0.1" env_logger = "0.8" rand = "0.8" diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index fa0f17331822b192f5d7ac6c7868175ae573e912..9167356795d354da5ed332e00c626f1ad807837a 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -459,6 +459,7 @@ mod tests { use rand::{prelude::StdRng, Rng}; use std::{env, sync::Arc}; use theme::SyntaxTheme; + use util::test::sample_text; use Bias::*; #[gpui::test(iterations = 100)] @@ -720,7 +721,7 @@ mod tests { #[gpui::test] fn test_text_chunks(cx: &mut gpui::MutableAppContext) { - let text = sample_text(6, 6); + let text = sample_text(6, 6, 'a'); let buffer = cx.add_model(|cx| Buffer::new(0, text, cx)); let tab_size = 4; let family_id = cx.font_cache().load_family(&["Helvetica"]).unwrap(); diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index 0e920c5e6aebd3556528cb0d985a05657ba52582..31edebe99c1d7fff23bfe8ee06a55864f5fb59bb 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -1064,16 +1064,17 @@ impl FoldEdit { #[cfg(test)] mod tests { use super::*; - use crate::{test::sample_text, ToPoint}; + use crate::ToPoint; use language::Buffer; use rand::prelude::*; use std::{env, mem}; use text::RandomCharIter; + use util::test::sample_text; use Bias::{Left, Right}; #[gpui::test] fn test_basic_folds(cx: &mut gpui::MutableAppContext) { - let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(5, 6), cx)); + let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(5, 6, 'a'), cx)); let buffer_snapshot = buffer.read(cx).snapshot(); let mut map = FoldMap::new(buffer_snapshot.clone()).0; @@ -1187,7 +1188,7 @@ mod tests { #[gpui::test] fn test_overlapping_folds(cx: &mut gpui::MutableAppContext) { - let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(5, 6), cx)); + let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(5, 6, 'a'), cx)); let buffer_snapshot = buffer.read(cx).snapshot(); let mut map = FoldMap::new(buffer_snapshot.clone()).0; let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]); @@ -1203,7 +1204,7 @@ mod tests { #[gpui::test] fn test_merging_folds_via_edit(cx: &mut gpui::MutableAppContext) { - let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(5, 6), cx)); + let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(5, 6, 'a'), cx)); let buffer_snapshot = buffer.read(cx).snapshot(); let mut map = FoldMap::new(buffer_snapshot.clone()).0; @@ -1226,7 +1227,7 @@ mod tests { #[gpui::test] fn test_folds_in_range(cx: &mut gpui::MutableAppContext) { - let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(5, 6), cx)); + let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(5, 6, 'a'), cx)); let buffer_snapshot = buffer.read(cx).snapshot(); let mut map = FoldMap::new(buffer_snapshot.clone()).0; let buffer = buffer.read(cx); @@ -1471,7 +1472,7 @@ mod tests { #[gpui::test] fn test_buffer_rows(cx: &mut gpui::MutableAppContext) { - let text = sample_text(6, 6) + "\n"; + let text = sample_text(6, 6, 'a') + "\n"; let buffer = cx.add_model(|cx| Buffer::new(0, text, cx)); let buffer_snapshot = buffer.read(cx).snapshot(); diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 539736aca254e803fe9ac2ab29c51c63e0f014cb..4299807f97a233720df9c339724cf2c66c1e41ea 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -3676,9 +3676,9 @@ pub fn diagnostic_style( #[cfg(test)] mod tests { use super::*; - use crate::test::sample_text; use text::Point; use unindent::Unindent; + use util::test::sample_text; #[gpui::test] fn test_selection_with_mouse(cx: &mut gpui::MutableAppContext) { @@ -3912,7 +3912,7 @@ mod tests { #[gpui::test] fn test_move_cursor(cx: &mut gpui::MutableAppContext) { - let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6), cx)); + let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'a'), cx)); let settings = EditorSettings::test(&cx); let (_, view) = cx.add_window(Default::default(), |cx| { build_editor(buffer.clone(), settings, cx) @@ -4708,7 +4708,7 @@ mod tests { #[gpui::test] fn test_move_line_up_down(cx: &mut gpui::MutableAppContext) { let settings = EditorSettings::test(&cx); - let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(10, 5), cx)); + let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(10, 5, 'a'), cx)); let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, settings, cx)); view.update(cx, |view, cx| { view.fold_ranges( @@ -4954,7 +4954,7 @@ mod tests { #[gpui::test] fn test_select_line(cx: &mut gpui::MutableAppContext) { let settings = EditorSettings::test(&cx); - let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(6, 5), cx)); + let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(6, 5, 'a'), cx)); let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, settings, cx)); view.update(cx, |view, cx| { view.select_display_ranges( @@ -5000,7 +5000,7 @@ mod tests { #[gpui::test] fn test_split_selection_into_lines(cx: &mut gpui::MutableAppContext) { let settings = EditorSettings::test(&cx); - let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(9, 5), cx)); + let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(9, 5, 'a'), cx)); let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, settings, cx)); view.update(cx, |view, cx| { view.fold_ranges( diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index db16b3f01d5c04add650a3116a9812f809da1b72..45764e65fb662cbdcba6d188c81b73d6b4d6b523 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -1164,17 +1164,15 @@ fn scale_horizontal_mouse_autoscroll_delta(delta: f32) -> f32 { #[cfg(test)] mod tests { use super::*; - use crate::{ - test::sample_text, - {Editor, EditorSettings}, - }; + use crate::{Editor, EditorSettings}; use language::Buffer; + use util::test::sample_text; #[gpui::test] fn test_layout_line_numbers(cx: &mut gpui::MutableAppContext) { let settings = EditorSettings::test(cx); - let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6), cx)); + let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'a'), cx)); let (window_id, editor) = cx.add_window(Default::default(), |cx| { Editor::for_buffer( buffer, diff --git a/crates/editor/src/test.rs b/crates/editor/src/test.rs index 26f29364fd8a0240e9fae12b3f4de3fada4d665e..3fb538dfbd55e518aeb3358ed047a879bfd8df1f 100644 --- a/crates/editor/src/test.rs +++ b/crates/editor/src/test.rs @@ -9,19 +9,6 @@ fn init_logger() { env_logger::init(); } -pub fn sample_text(rows: usize, cols: usize) -> String { - let mut text = String::new(); - for row in 0..rows { - let c: char = ('a' as u32 + row as u32) as u8 as char; - let mut line = c.to_string().repeat(cols); - if row < rows - 1 { - line.push('\n'); - } - text += &line; - } - text -} - pub struct Observer(PhantomData); impl Entity for Observer { diff --git a/crates/language/Cargo.toml b/crates/language/Cargo.toml index ad0f84b4dcd620309202ce14f0ab60b6cfb020b6..16c1f6edee55c68aee5ce9298f9ead25a22ede30 100644 --- a/crates/language/Cargo.toml +++ b/crates/language/Cargo.toml @@ -12,6 +12,7 @@ test-support = [ "text/test-support", "lsp/test-support", "tree-sitter-rust", + "util/test-support", ] [dependencies] @@ -20,6 +21,7 @@ clock = { path = "../clock" } gpui = { path = "../gpui" } lsp = { path = "../lsp" } rpc = { path = "../rpc" } +sum_tree = { path = "../sum_tree" } theme = { path = "../theme" } util = { path = "../util" } anyhow = "1.0.38" @@ -39,6 +41,7 @@ tree-sitter-rust = { version = "0.19.0", optional = true } text = { path = "../text", features = ["test-support"] } gpui = { path = "../gpui", features = ["test-support"] } lsp = { path = "../lsp", features = ["test-support"] } +util = { path = "../util", features = ["test-support"] } rand = "0.8.3" tree-sitter-rust = "0.19.0" unindent = "0.1.7" diff --git a/crates/language/src/fragment_list.rs b/crates/language/src/fragment_list.rs new file mode 100644 index 0000000000000000000000000000000000000000..deef5570a95f3e1a6ec91f6b174b09aa6bf58b4a --- /dev/null +++ b/crates/language/src/fragment_list.rs @@ -0,0 +1,291 @@ +use std::{ + cmp, + ops::{Deref, Range}, +}; +use sum_tree::{Bias, Cursor, SumTree}; +use text::TextSummary; +use theme::SyntaxTheme; +use util::post_inc; + +use crate::{buffer, Buffer, Chunk}; +use gpui::{Entity, ModelContext, ModelHandle}; + +const NEWLINES: &'static [u8] = &[b'\n'; u8::MAX as usize]; + +pub trait ToOffset { + fn to_offset<'a>(&self, content: &Snapshot) -> usize; +} + +pub type FragmentId = usize; + +#[derive(Default)] +pub struct FragmentList { + snapshot: Snapshot, + next_fragment_id: FragmentId, +} + +#[derive(Clone, Default)] +pub struct Snapshot { + entries: SumTree, +} + +pub struct FragmentProperties<'a, T> { + buffer: &'a ModelHandle, + range: Range, + header_height: u8, +} + +#[derive(Clone)] +struct Entry { + buffer: buffer::Snapshot, + buffer_id: usize, + buffer_range: Range, + text_summary: TextSummary, + header_height: u8, +} + +#[derive(Clone, Debug, Default)] +struct EntrySummary { + min_buffer_id: usize, + max_buffer_id: usize, + text: TextSummary, +} + +pub struct Chunks<'a> { + range: Range, + cursor: Cursor<'a, Entry, usize>, + header_height: u8, + entry_chunks: Option>, + theme: Option<&'a SyntaxTheme>, +} + +impl FragmentList { + pub fn new() -> Self { + Self::default() + } + + pub fn push<'a, O: text::ToOffset>( + &mut self, + props: FragmentProperties<'a, O>, + cx: &mut ModelContext, + ) -> FragmentId { + let id = post_inc(&mut self.next_fragment_id); + + let buffer = props.buffer.read(cx); + let buffer_range = props.range.start.to_offset(buffer)..props.range.end.to_offset(buffer); + let mut text_summary = + buffer.text_summary_for_range::(buffer_range.clone()); + if props.header_height > 0 { + text_summary.first_line_chars = 0; + text_summary.lines.row += props.header_height as u32; + text_summary.lines_utf16.row += props.header_height as u32; + text_summary.bytes += props.header_height as usize; + } + + self.snapshot.entries.push( + Entry { + buffer: props.buffer.read(cx).snapshot(), + buffer_id: props.buffer.id(), + buffer_range, + text_summary, + header_height: props.header_height, + }, + &(), + ); + + id + } +} + +impl Deref for FragmentList { + type Target = Snapshot; + + fn deref(&self) -> &Self::Target { + &self.snapshot + } +} + +impl Entity for FragmentList { + type Event = (); +} + +impl Snapshot { + pub fn text(&self) -> String { + self.chunks(0..self.len(), None) + .map(|chunk| chunk.text) + .collect() + } + + pub fn len(&self) -> usize { + self.entries.summary().text.bytes + } + + pub fn chunks<'a, T: ToOffset>( + &'a self, + range: Range, + theme: Option<&'a SyntaxTheme>, + ) -> Chunks<'a> { + let range = range.start.to_offset(self)..range.end.to_offset(self); + let mut cursor = self.entries.cursor::(); + cursor.seek(&range.start, Bias::Right, &()); + + let entry_chunks = cursor.item().map(|entry| { + let buffer_start = entry.buffer_range.start + (range.start - cursor.start()); + let buffer_end = cmp::min( + entry.buffer_range.end, + entry.buffer_range.start + (range.end - cursor.start()), + ); + entry.buffer.chunks(buffer_start..buffer_end, theme) + }); + let header_height = cursor.item().map_or(0, |entry| entry.header_height); + + Chunks { + range, + cursor, + header_height, + entry_chunks, + theme, + } + } +} + +impl sum_tree::Item for Entry { + type Summary = EntrySummary; + + fn summary(&self) -> Self::Summary { + EntrySummary { + min_buffer_id: self.buffer_id, + max_buffer_id: self.buffer_id, + text: self.text_summary.clone(), + } + } +} + +impl sum_tree::Summary for EntrySummary { + type Context = (); + + fn add_summary(&mut self, summary: &Self, _: &()) { + self.min_buffer_id = cmp::min(self.min_buffer_id, summary.min_buffer_id); + self.max_buffer_id = cmp::max(self.max_buffer_id, summary.max_buffer_id); + self.text.add_summary(&summary.text, &()); + } +} + +impl<'a> sum_tree::Dimension<'a, EntrySummary> for usize { + fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) { + *self += summary.text.bytes + } +} + +impl<'a> Iterator for Chunks<'a> { + type Item = Chunk<'a>; + + fn next(&mut self) -> Option { + if self.header_height > 0 { + let chunk = Chunk { + text: unsafe { + std::str::from_utf8_unchecked(&NEWLINES[..self.header_height as usize]) + }, + ..Default::default() + }; + self.header_height = 0; + return Some(chunk); + } + + if let Some(entry_chunks) = self.entry_chunks.as_mut() { + if let Some(chunk) = entry_chunks.next() { + return Some(chunk); + } else { + self.entry_chunks.take(); + } + } + + self.cursor.next(&()); + let entry = self.cursor.item()?; + + let buffer_end = cmp::min( + entry.buffer_range.end, + entry.buffer_range.start + (self.range.end - self.cursor.start()), + ); + + self.header_height = entry.header_height; + self.entry_chunks = Some( + entry + .buffer + .chunks(entry.buffer_range.start..buffer_end, self.theme), + ); + + Some(Chunk { + text: "\n", + ..Default::default() + }) + } +} + +impl ToOffset for usize { + fn to_offset<'a>(&self, _: &Snapshot) -> usize { + *self + } +} + +#[cfg(test)] +mod tests { + use super::{FragmentList, FragmentProperties}; + use crate::Buffer; + use gpui::MutableAppContext; + use text::Point; + use util::test::sample_text; + + #[gpui::test] + fn test_fragment_buffer(cx: &mut MutableAppContext) { + let buffer_1 = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'a'), cx)); + let buffer_2 = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'g'), cx)); + + let list = cx.add_model(|cx| { + let mut list = FragmentList::new(); + + list.push( + FragmentProperties { + buffer: &buffer_1, + range: Point::new(1, 2)..Point::new(2, 5), + header_height: 2, + }, + cx, + ); + list.push( + FragmentProperties { + buffer: &buffer_1, + range: Point::new(3, 3)..Point::new(4, 4), + header_height: 1, + }, + cx, + ); + list.push( + FragmentProperties { + buffer: &buffer_2, + range: Point::new(3, 1)..Point::new(3, 3), + header_height: 3, + }, + cx, + ); + list + }); + + assert_eq!( + list.read(cx).text(), + concat!( + "\n", // Preserve newlines + "\n", // + "bbbb\n", // + "ccccc\n", // + "\n", // + "ddd\n", // + "eeee\n", // + "\n", // + "\n", // + "\n", // + "jj" // + ) + ) + } +} diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 77d01c7ecf85b4b231d4cc03856a13e0c6f48dc8..704e9b967c9433bf2a943a8e9cbfce366b229239 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -1,4 +1,5 @@ mod buffer; +mod fragment_list; mod highlight_map; pub mod proto; #[cfg(test)] diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index a434e97e2ee66acf05171c500b6c4be5f98fdb33..0fef4aac82bb300e66fda5000af62ab215865bfc 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -49,7 +49,7 @@ pub struct Buffer { subscriptions: Vec>>>>, } -#[derive(Clone)] +#[derive(Clone, Debug)] pub struct Snapshot { visible_text: Rope, deleted_text: Rope, diff --git a/crates/util/src/test.rs b/crates/util/src/test.rs index 57a4b21105fc4ba54e4aadba4facd5381b53fb0e..71b847df692af9e65bf24223c1bdeae35a923d25 100644 --- a/crates/util/src/test.rs +++ b/crates/util/src/test.rs @@ -35,3 +35,16 @@ fn write_tree(path: &Path, tree: serde_json::Value) { panic!("You must pass a JSON object to this helper") } } + +pub fn sample_text(rows: usize, cols: usize, start_char: char) -> String { + let mut text = String::new(); + for row in 0..rows { + let c: char = (start_char as u32 + row as u32) as u8 as char; + let mut line = c.to_string().repeat(cols); + if row < rows - 1 { + line.push('\n'); + } + text += &line; + } + text +} From 0be897d5acd7c4cf5afb60a44caa3c473093cc25 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Sat, 4 Dec 2021 07:19:30 -0700 Subject: [PATCH 002/196] WIP: Edit one of the excerpted buffers and add an assertion We'll need to detect edits on the child buffers and understand their impact on the tree. --- crates/language/src/fragment_list.rs | 31 +++++++++++++++++++++++++++- 1 file changed, 30 insertions(+), 1 deletion(-) diff --git a/crates/language/src/fragment_list.rs b/crates/language/src/fragment_list.rs index deef5570a95f3e1a6ec91f6b174b09aa6bf58b4a..725e628a1d69d9ac8d9f18dd7f305668794e6ab0 100644 --- a/crates/language/src/fragment_list.rs +++ b/crates/language/src/fragment_list.rs @@ -286,6 +286,35 @@ mod tests { "\n", // "jj" // ) - ) + ); + + buffer_1.update(cx, |buffer, cx| { + buffer.edit( + [ + Point::new(0, 0)..Point::new(0, 0), + Point::new(2, 1)..Point::new(2, 2), + ], + "\n", + cx, + ); + }); + + assert_eq!( + list.read(cx).text(), + concat!( + "\n", // Preserve newlines + "\n", // + "bbbb\n", // + "c\n", // + "ccc\n", // + "\n", // + "ddd\n", // + "eeee\n", // + "\n", // + "\n", // + "\n", // + "jj" // + ) + ); } } From 45d6f5ab048be9ec841624fde8190d2646627ae4 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Mon, 6 Dec 2021 12:10:25 +0100 Subject: [PATCH 003/196] Start on maintaining edits in `FragmentList` --- Cargo.lock | 2 + crates/language/Cargo.toml | 10 +- crates/language/src/fragment_list.rs | 220 +++++++++++++++++++++++---- crates/text/src/patch.rs | 4 + 4 files changed, 200 insertions(+), 36 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a1188259ac23e9b18f57fd22c68da69da81b012d..2b49f5d4ceefa9ba3568262c066b928e123e85d8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2575,6 +2575,7 @@ version = "0.1.0" dependencies = [ "anyhow", "clock", + "collections", "futures", "gpui", "lazy_static", @@ -2586,6 +2587,7 @@ dependencies = [ "rpc", "serde", "similar", + "smallvec", "smol", "sum_tree", "text", diff --git a/crates/language/Cargo.toml b/crates/language/Cargo.toml index a888d3d8ebbc20d7fa5d39e19d1b879dfdbe54ce..585f885d1a8be1c3484e92bc68d436293445ffc6 100644 --- a/crates/language/Cargo.toml +++ b/crates/language/Cargo.toml @@ -9,19 +9,21 @@ path = "src/language.rs" [features] test-support = [ "rand", - "text/test-support", + "collections/test-support", "lsp/test-support", + "text/test-support", "tree-sitter-rust", "util/test-support", ] [dependencies] -text = { path = "../text" } clock = { path = "../clock" } +collections = { path = "../collections" } gpui = { path = "../gpui" } lsp = { path = "../lsp" } rpc = { path = "../rpc" } sum_tree = { path = "../sum_tree" } +text = { path = "../text" } theme = { path = "../theme" } util = { path = "../util" } anyhow = "1.0.38" @@ -33,14 +35,16 @@ postage = { version = "0.4.1", features = ["futures-traits"] } rand = { version = "0.8.3", optional = true } serde = { version = "1", features = ["derive"] } similar = "1.3" +smallvec = { version = "1.6", features = ["union"] } smol = "1.2" tree-sitter = "0.20.0" tree-sitter-rust = { version = "0.20.0", optional = true } [dev-dependencies] -text = { path = "../text", features = ["test-support"] } +collections = { path = "../collections", features = ["test-support"] } gpui = { path = "../gpui", features = ["test-support"] } lsp = { path = "../lsp", features = ["test-support"] } +text = { path = "../text", features = ["test-support"] } util = { path = "../util", features = ["test-support"] } rand = "0.8.3" tree-sitter-rust = "0.20.0" diff --git a/crates/language/src/fragment_list.rs b/crates/language/src/fragment_list.rs index 725e628a1d69d9ac8d9f18dd7f305668794e6ab0..c5d6377ad89cc775ee54b6421e3d65f3fcb5cfc0 100644 --- a/crates/language/src/fragment_list.rs +++ b/crates/language/src/fragment_list.rs @@ -1,14 +1,12 @@ -use std::{ - cmp, - ops::{Deref, Range}, -}; +use crate::{buffer, Buffer, Chunk}; +use collections::HashMap; +use gpui::{AppContext, Entity, ModelContext, ModelHandle}; +use parking_lot::Mutex; +use smallvec::{smallvec, SmallVec}; +use std::{cmp, iter, mem, ops::Range}; use sum_tree::{Bias, Cursor, SumTree}; use text::TextSummary; use theme::SyntaxTheme; -use util::post_inc; - -use crate::{buffer, Buffer, Chunk}; -use gpui::{Entity, ModelContext, ModelHandle}; const NEWLINES: &'static [u8] = &[b'\n'; u8::MAX as usize]; @@ -16,12 +14,12 @@ pub trait ToOffset { fn to_offset<'a>(&self, content: &Snapshot) -> usize; } -pub type FragmentId = usize; +pub type FragmentId = Location; #[derive(Default)] pub struct FragmentList { - snapshot: Snapshot, - next_fragment_id: FragmentId, + snapshot: Mutex, + buffers: HashMap, text::Subscription, Vec)>, } #[derive(Clone, Default)] @@ -37,8 +35,8 @@ pub struct FragmentProperties<'a, T> { #[derive(Clone)] struct Entry { + id: FragmentId, buffer: buffer::Snapshot, - buffer_id: usize, buffer_range: Range, text_summary: TextSummary, header_height: u8, @@ -46,11 +44,13 @@ struct Entry { #[derive(Clone, Debug, Default)] struct EntrySummary { - min_buffer_id: usize, - max_buffer_id: usize, + fragment_id: FragmentId, text: TextSummary, } +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub struct Location(SmallVec<[usize; 4]>); + pub struct Chunks<'a> { range: Range, cursor: Cursor<'a, Entry, usize>, @@ -64,12 +64,20 @@ impl FragmentList { Self::default() } - pub fn push<'a, O: text::ToOffset>( + pub fn snapshot(&self, cx: &AppContext) -> Snapshot { + self.sync(cx); + self.snapshot.lock().clone() + } + + pub fn push( &mut self, - props: FragmentProperties<'a, O>, + props: FragmentProperties, cx: &mut ModelContext, - ) -> FragmentId { - let id = post_inc(&mut self.next_fragment_id); + ) -> FragmentId + where + O: text::ToOffset, + { + self.sync(cx); let buffer = props.buffer.read(cx); let buffer_range = props.range.start.to_offset(buffer)..props.range.end.to_offset(buffer); @@ -82,10 +90,13 @@ impl FragmentList { text_summary.bytes += props.header_height as usize; } - self.snapshot.entries.push( + let mut snapshot = self.snapshot.lock(); + let prev_id = snapshot.entries.last().map(|e| &e.id); + let id = FragmentId::between(prev_id.unwrap_or(&FragmentId::min()), &FragmentId::max()); + snapshot.entries.push( Entry { + id: id.clone(), buffer: props.buffer.read(cx).snapshot(), - buffer_id: props.buffer.id(), buffer_range, text_summary, header_height: props.header_height, @@ -93,15 +104,89 @@ impl FragmentList { &(), ); + self.buffers + .entry(props.buffer.id()) + .or_insert_with(|| { + let subscription = props.buffer.update(cx, |buffer, _| buffer.subscribe()); + (props.buffer.clone(), subscription, Default::default()) + }) + .2 + .push(id.clone()); + id } -} -impl Deref for FragmentList { - type Target = Snapshot; + fn sync(&self, cx: &AppContext) { + let mut snapshot = self.snapshot.lock(); + let mut patches = Vec::new(); + let mut fragments_to_edit = Vec::new(); + for (buffer, subscription, fragment_ids) in self.buffers.values() { + let patch = subscription.consume(); + if !patch.is_empty() { + let patch_ix = patches.len(); + patches.push(patch); + fragments_to_edit.extend( + fragment_ids + .iter() + .map(|fragment_id| (buffer, fragment_id, patch_ix)), + ) + } + } + fragments_to_edit.sort_unstable_by_key(|(_, fragment_id, _)| *fragment_id); + + let old_fragments = mem::take(&mut snapshot.entries); + let mut cursor = old_fragments.cursor::(); + for (buffer, fragment_id, patch_ix) in fragments_to_edit { + snapshot + .entries + .push_tree(cursor.slice(fragment_id, Bias::Left, &()), &()); + + let fragment = cursor.item().unwrap(); + let mut new_range = fragment.buffer_range.clone(); + for edit in patches[patch_ix].edits() { + let edit_start = edit.new.start; + let edit_end = edit.new.start + edit.old_len(); + if edit_end < new_range.start { + let delta = edit.new_len() as isize - edit.old_len() as isize; + new_range.start = (new_range.start as isize + delta) as usize; + new_range.end = (new_range.end as isize + delta) as usize; + } else if edit_start >= new_range.end { + break; + } else { + let mut new_range_len = new_range.len(); + new_range_len -= + cmp::min(new_range.end, edit_end) - cmp::max(new_range.start, edit_start); + if edit_start > new_range.start { + new_range_len += edit.new_len(); + } + + new_range.start = cmp::min(new_range.start, edit.new.end); + new_range.end = new_range.start + new_range_len; + } + } - fn deref(&self) -> &Self::Target { - &self.snapshot + let buffer = buffer.read(cx); + let mut text_summary: TextSummary = buffer.text_summary_for_range(new_range.clone()); + if fragment.header_height > 0 { + text_summary.first_line_chars = 0; + text_summary.lines.row += fragment.header_height as u32; + text_summary.lines_utf16.row += fragment.header_height as u32; + text_summary.bytes += fragment.header_height as usize; + } + snapshot.entries.push( + Entry { + id: fragment.id.clone(), + buffer: buffer.snapshot(), + buffer_range: new_range, + text_summary, + header_height: fragment.header_height, + }, + &(), + ); + + cursor.next(&()); + } + snapshot.entries.push_tree(cursor.suffix(&()), &()); } } @@ -154,8 +239,7 @@ impl sum_tree::Item for Entry { fn summary(&self) -> Self::Summary { EntrySummary { - min_buffer_id: self.buffer_id, - max_buffer_id: self.buffer_id, + fragment_id: self.id.clone(), text: self.text_summary.clone(), } } @@ -165,15 +249,22 @@ impl sum_tree::Summary for EntrySummary { type Context = (); fn add_summary(&mut self, summary: &Self, _: &()) { - self.min_buffer_id = cmp::min(self.min_buffer_id, summary.min_buffer_id); - self.max_buffer_id = cmp::max(self.max_buffer_id, summary.max_buffer_id); + debug_assert!(summary.fragment_id > self.fragment_id); + self.fragment_id = summary.fragment_id.clone(); self.text.add_summary(&summary.text, &()); } } impl<'a> sum_tree::Dimension<'a, EntrySummary> for usize { fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) { - *self += summary.text.bytes + *self += summary.text.bytes; + } +} + +impl<'a> sum_tree::Dimension<'a, EntrySummary> for FragmentId { + fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) { + debug_assert!(summary.fragment_id > *self); + *self = summary.fragment_id.clone(); } } @@ -228,11 +319,42 @@ impl ToOffset for usize { } } +impl Default for Location { + fn default() -> Self { + Self::min() + } +} + +impl Location { + pub fn min() -> Self { + Self(smallvec![usize::MIN]) + } + + pub fn max() -> Self { + Self(smallvec![usize::MAX]) + } + + pub fn between(lhs: &Self, rhs: &Self) -> Self { + let lhs = lhs.0.iter().copied().chain(iter::repeat(usize::MIN)); + let rhs = rhs.0.iter().copied().chain(iter::repeat(usize::MAX)); + let mut location = SmallVec::new(); + for (lhs, rhs) in lhs.zip(rhs) { + let mid = lhs + (rhs.saturating_sub(lhs)) / 2; + location.push(mid); + if mid > lhs { + break; + } + } + Self(location) + } +} + #[cfg(test)] mod tests { - use super::{FragmentList, FragmentProperties}; + use super::*; use crate::Buffer; use gpui::MutableAppContext; + use rand::prelude::*; use text::Point; use util::test::sample_text; @@ -272,7 +394,7 @@ mod tests { }); assert_eq!( - list.read(cx).text(), + list.read(cx).snapshot(cx).text(), concat!( "\n", // Preserve newlines "\n", // @@ -300,7 +422,7 @@ mod tests { }); assert_eq!( - list.read(cx).text(), + list.read(cx).snapshot(cx).text(), concat!( "\n", // Preserve newlines "\n", // @@ -317,4 +439,36 @@ mod tests { ) ); } + + #[gpui::test(iterations = 10000)] + fn test_location(mut rng: StdRng) { + let mut lhs = Default::default(); + let mut rhs = Default::default(); + while lhs == rhs { + lhs = Location( + (0..rng.gen_range(1..=5)) + .map(|_| rng.gen_range(0..=100)) + .collect(), + ); + rhs = Location( + (0..rng.gen_range(1..=5)) + .map(|_| rng.gen_range(0..=100)) + .collect(), + ); + } + + if lhs > rhs { + mem::swap(&mut lhs, &mut rhs); + } + + let middle = Location::between(&lhs, &rhs); + assert!(middle > lhs); + assert!(middle < rhs); + for ix in 0..middle.0.len() - 1 { + assert!( + middle.0[ix] == *lhs.0.get(ix).unwrap_or(&0) + || middle.0[ix] == *rhs.0.get(ix).unwrap_or(&0) + ); + } + } } diff --git a/crates/text/src/patch.rs b/crates/text/src/patch.rs index 85d354d3b01ddc4a33bd4da298a0dfa7a0809399..91a07610177c7f9bd06c13f67d5f7657ecbbca86 100644 --- a/crates/text/src/patch.rs +++ b/crates/text/src/patch.rs @@ -33,6 +33,10 @@ where Self(edits) } + pub fn edits(&self) -> &[Edit] { + &self.0 + } + pub fn into_inner(self) -> Vec> { self.0 } From 8354d1520dbabf358972793f3111d065f9feeabe Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Mon, 6 Dec 2021 14:03:38 +0100 Subject: [PATCH 004/196] :art: --- crates/language/src/fragment_list.rs | 27 +++++++++++++++++++-------- 1 file changed, 19 insertions(+), 8 deletions(-) diff --git a/crates/language/src/fragment_list.rs b/crates/language/src/fragment_list.rs index c5d6377ad89cc775ee54b6421e3d65f3fcb5cfc0..a1c27757de2d2f798b5d20a52cb9d37690e1625e 100644 --- a/crates/language/src/fragment_list.rs +++ b/crates/language/src/fragment_list.rs @@ -19,7 +19,13 @@ pub type FragmentId = Location; #[derive(Default)] pub struct FragmentList { snapshot: Mutex, - buffers: HashMap, text::Subscription, Vec)>, + buffers: HashMap, +} + +struct BufferState { + buffer: ModelHandle, + subscription: text::Subscription, + fragments: Vec, } #[derive(Clone, Default)] @@ -108,9 +114,13 @@ impl FragmentList { .entry(props.buffer.id()) .or_insert_with(|| { let subscription = props.buffer.update(cx, |buffer, _| buffer.subscribe()); - (props.buffer.clone(), subscription, Default::default()) + BufferState { + buffer: props.buffer.clone(), + subscription, + fragments: Default::default(), + } }) - .2 + .fragments .push(id.clone()); id @@ -120,15 +130,16 @@ impl FragmentList { let mut snapshot = self.snapshot.lock(); let mut patches = Vec::new(); let mut fragments_to_edit = Vec::new(); - for (buffer, subscription, fragment_ids) in self.buffers.values() { - let patch = subscription.consume(); + for buffer_state in self.buffers.values() { + let patch = buffer_state.subscription.consume(); if !patch.is_empty() { let patch_ix = patches.len(); patches.push(patch); fragments_to_edit.extend( - fragment_ids + buffer_state + .fragments .iter() - .map(|fragment_id| (buffer, fragment_id, patch_ix)), + .map(|fragment_id| (&buffer_state.buffer, fragment_id, patch_ix)), ) } } @@ -440,7 +451,7 @@ mod tests { ); } - #[gpui::test(iterations = 10000)] + #[gpui::test(iterations = 100)] fn test_location(mut rng: StdRng) { let mut lhs = Default::default(); let mut rhs = Default::default(); From e37908cf3b8801afb8f79d15e231989b5f60f602 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Mon, 6 Dec 2021 16:08:17 +0100 Subject: [PATCH 005/196] Start on a simple randomized test for `FragmentList` --- Cargo.lock | 2 + crates/language/Cargo.toml | 2 + crates/language/src/fragment_list.rs | 86 ++++++++++++++++++++++++++-- crates/language/src/tests.rs | 7 +++ crates/text/src/random_char_iter.rs | 8 +++ 5 files changed, 99 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2b49f5d4ceefa9ba3568262c066b928e123e85d8..d4508ba73c7efe19f166225c601355af7bc852d8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2576,6 +2576,8 @@ dependencies = [ "anyhow", "clock", "collections", + "ctor", + "env_logger", "futures", "gpui", "lazy_static", diff --git a/crates/language/Cargo.toml b/crates/language/Cargo.toml index 585f885d1a8be1c3484e92bc68d436293445ffc6..d6121cc7bd15644b17d7131f2fb295354a70b84b 100644 --- a/crates/language/Cargo.toml +++ b/crates/language/Cargo.toml @@ -46,6 +46,8 @@ gpui = { path = "../gpui", features = ["test-support"] } lsp = { path = "../lsp", features = ["test-support"] } text = { path = "../text", features = ["test-support"] } util = { path = "../util", features = ["test-support"] } +ctor = "0.1" +env_logger = "0.8" rand = "0.8.3" tree-sitter-rust = "0.20.0" unindent = "0.1.7" diff --git a/crates/language/src/fragment_list.rs b/crates/language/src/fragment_list.rs index a1c27757de2d2f798b5d20a52cb9d37690e1625e..e8312aeda0272f558a3f668121c745da9443cae4 100644 --- a/crates/language/src/fragment_list.rs +++ b/crates/language/src/fragment_list.rs @@ -157,21 +157,22 @@ impl FragmentList { for edit in patches[patch_ix].edits() { let edit_start = edit.new.start; let edit_end = edit.new.start + edit.old_len(); - if edit_end < new_range.start { + if edit_start > new_range.end { + break; + } else if edit_end < new_range.start { let delta = edit.new_len() as isize - edit.old_len() as isize; new_range.start = (new_range.start as isize + delta) as usize; new_range.end = (new_range.end as isize + delta) as usize; - } else if edit_start >= new_range.end { - break; } else { let mut new_range_len = new_range.len(); new_range_len -= cmp::min(new_range.end, edit_end) - cmp::max(new_range.start, edit_start); - if edit_start > new_range.start { + if edit_start < new_range.start { + new_range.start = edit.new.end; + } else { new_range_len += edit.new_len(); } - new_range.start = cmp::min(new_range.start, edit.new.end); new_range.end = new_range.start + new_range_len; } } @@ -362,11 +363,13 @@ impl Location { #[cfg(test)] mod tests { + use std::env; + use super::*; use crate::Buffer; use gpui::MutableAppContext; use rand::prelude::*; - use text::Point; + use text::{Point, RandomCharIter}; use util::test::sample_text; #[gpui::test] @@ -451,6 +454,77 @@ mod tests { ); } + #[gpui::test(iterations = 100)] + fn test_random(cx: &mut MutableAppContext, mut rng: StdRng) { + let operations = env::var("OPERATIONS") + .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) + .unwrap_or(10); + + let mut buffers: Vec> = Vec::new(); + let list = cx.add_model(|_| FragmentList::new()); + let mut fragment_ids = Vec::new(); + let mut expected_fragments = Vec::new(); + + for _ in 0..operations { + match rng.gen_range(0..100) { + 0..=19 if !buffers.is_empty() => { + let buffer = buffers.choose(&mut rng).unwrap(); + buffer.update(cx, |buf, cx| buf.randomly_edit(&mut rng, 5, cx)); + } + _ => { + let buffer_handle = if buffers.is_empty() || rng.gen_bool(0.4) { + let base_text = RandomCharIter::new(&mut rng).take(10).collect::(); + buffers.push(cx.add_model(|cx| Buffer::new(0, base_text, cx))); + buffers.last().unwrap() + } else { + buffers.choose(&mut rng).unwrap() + }; + + let buffer = buffer_handle.read(cx); + let end_ix = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Bias::Right); + let start_ix = buffer.clip_offset(rng.gen_range(0..=end_ix), Bias::Left); + let header_height = rng.gen_range(0..=5); + let anchor_range = buffer.anchor_before(start_ix)..buffer.anchor_after(end_ix); + log::info!( + "Pushing fragment for buffer {}: {:?}[{:?}] = {:?}", + buffer_handle.id(), + buffer.text(), + start_ix..end_ix, + &buffer.text()[start_ix..end_ix] + ); + + let fragment_id = list.update(cx, |list, cx| { + list.push( + FragmentProperties { + buffer: &buffer_handle, + range: start_ix..end_ix, + header_height, + }, + cx, + ) + }); + fragment_ids.push(fragment_id); + expected_fragments.push((buffer_handle.clone(), anchor_range, header_height)); + } + } + + let snapshot = list.read(cx).snapshot(cx); + let mut expected_text = String::new(); + for (buffer, range, header_height) in &expected_fragments { + let buffer = buffer.read(cx); + if !expected_text.is_empty() { + expected_text.push('\n'); + } + + for _ in 0..*header_height { + expected_text.push('\n'); + } + expected_text.extend(buffer.text_for_range(range.clone())); + } + assert_eq!(snapshot.text(), expected_text); + } + } + #[gpui::test(iterations = 100)] fn test_location(mut rng: StdRng) { let mut lhs = Default::default(); diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index cff74af1e3bee4b9c65416ecdd5986fbb787f4ac..9a52322c22bbdd20d423fb8b24038fb387cd50a4 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -12,6 +12,13 @@ use std::{ }; use unindent::Unindent as _; +#[cfg(test)] +#[ctor::ctor] +fn init_logger() { + // std::env::set_var("RUST_LOG", "info"); + env_logger::init(); +} + #[test] fn test_select_language() { let registry = LanguageRegistry { diff --git a/crates/text/src/random_char_iter.rs b/crates/text/src/random_char_iter.rs index 244665688d6008caa1bbb0c8208aef0df863b8e9..94913150be3fd43c086979d54ce7b2350f512707 100644 --- a/crates/text/src/random_char_iter.rs +++ b/crates/text/src/random_char_iter.rs @@ -12,6 +12,14 @@ impl Iterator for RandomCharIter { type Item = char; fn next(&mut self) -> Option { + if std::env::var("SIMPLE_TEXT").map_or(false, |v| !v.is_empty()) { + return if self.0.gen_range(0..100) < 5 { + Some('\n') + } else { + Some(self.0.gen_range(b'a'..b'z' + 1).into()) + }; + } + match self.0.gen_range(0..100) { // whitespace 0..=19 => [' ', '\n', '\t'].choose(&mut self.0).copied(), From 42eba7268d5ce4acc1a2f478f2f04ad82df8d182 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Mon, 6 Dec 2021 19:48:45 +0100 Subject: [PATCH 006/196] Introduce `Buffer::edits_since_in_range` Co-Authored-By: Nathan Sobo Co-Authored-By: Max Brunsfeld --- crates/language/Cargo.toml | 2 +- crates/language/src/fragment_list.rs | 29 ++++---- crates/text/src/tests.rs | 26 +++++++ crates/text/src/text.rs | 100 ++++++++++++++++++++++++--- 4 files changed, 135 insertions(+), 22 deletions(-) diff --git a/crates/language/Cargo.toml b/crates/language/Cargo.toml index d6121cc7bd15644b17d7131f2fb295354a70b84b..a9a781e604b87af2386f674fbdee1bc668c44afa 100644 --- a/crates/language/Cargo.toml +++ b/crates/language/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "language" version = "0.1.0" -edition = "2018" +edition = "2021" [lib] path = "src/language.rs" diff --git a/crates/language/src/fragment_list.rs b/crates/language/src/fragment_list.rs index e8312aeda0272f558a3f668121c745da9443cae4..6f16e9ab51881577437c167b90ddf2efcc3e4bf7 100644 --- a/crates/language/src/fragment_list.rs +++ b/crates/language/src/fragment_list.rs @@ -5,7 +5,7 @@ use parking_lot::Mutex; use smallvec::{smallvec, SmallVec}; use std::{cmp, iter, mem, ops::Range}; use sum_tree::{Bias, Cursor, SumTree}; -use text::TextSummary; +use text::{Anchor, AnchorRangeExt, TextSummary}; use theme::SyntaxTheme; const NEWLINES: &'static [u8] = &[b'\n'; u8::MAX as usize]; @@ -43,7 +43,7 @@ pub struct FragmentProperties<'a, T> { struct Entry { id: FragmentId, buffer: buffer::Snapshot, - buffer_range: Range, + buffer_range: Range, text_summary: TextSummary, header_height: u8, } @@ -86,7 +86,8 @@ impl FragmentList { self.sync(cx); let buffer = props.buffer.read(cx); - let buffer_range = props.range.start.to_offset(buffer)..props.range.end.to_offset(buffer); + let buffer_range = + buffer.anchor_before(props.range.start)..buffer.anchor_after(props.range.end); let mut text_summary = buffer.text_summary_for_range::(buffer_range.clone()); if props.header_height > 0 { @@ -148,12 +149,13 @@ impl FragmentList { let old_fragments = mem::take(&mut snapshot.entries); let mut cursor = old_fragments.cursor::(); for (buffer, fragment_id, patch_ix) in fragments_to_edit { + let buffer = buffer.read(cx); snapshot .entries .push_tree(cursor.slice(fragment_id, Bias::Left, &()), &()); let fragment = cursor.item().unwrap(); - let mut new_range = fragment.buffer_range.clone(); + let mut new_range = fragment.buffer_range.to_offset(buffer); for edit in patches[patch_ix].edits() { let edit_start = edit.new.start; let edit_end = edit.new.start + edit.old_len(); @@ -177,7 +179,6 @@ impl FragmentList { } } - let buffer = buffer.read(cx); let mut text_summary: TextSummary = buffer.text_summary_for_range(new_range.clone()); if fragment.header_height > 0 { text_summary.first_line_chars = 0; @@ -189,7 +190,8 @@ impl FragmentList { Entry { id: fragment.id.clone(), buffer: buffer.snapshot(), - buffer_range: new_range, + buffer_range: buffer.anchor_before(new_range.start) + ..buffer.anchor_after(new_range.end), text_summary, header_height: fragment.header_height, }, @@ -227,10 +229,11 @@ impl Snapshot { cursor.seek(&range.start, Bias::Right, &()); let entry_chunks = cursor.item().map(|entry| { - let buffer_start = entry.buffer_range.start + (range.start - cursor.start()); + let buffer_range = entry.buffer_range.to_offset(&entry.buffer); + let buffer_start = buffer_range.start + (range.start - cursor.start()); let buffer_end = cmp::min( - entry.buffer_range.end, - entry.buffer_range.start + (range.end - cursor.start()), + buffer_range.end, + buffer_range.start + (range.end - cursor.start()), ); entry.buffer.chunks(buffer_start..buffer_end, theme) }); @@ -305,17 +308,17 @@ impl<'a> Iterator for Chunks<'a> { self.cursor.next(&()); let entry = self.cursor.item()?; - + let buffer_range = entry.buffer_range.to_offset(&entry.buffer); let buffer_end = cmp::min( - entry.buffer_range.end, - entry.buffer_range.start + (self.range.end - self.cursor.start()), + buffer_range.end, + buffer_range.start + (self.range.end - self.cursor.start()), ); self.header_height = entry.header_height; self.entry_chunks = Some( entry .buffer - .chunks(entry.buffer_range.start..buffer_end, self.theme), + .chunks(buffer_range.start..buffer_end, self.theme), ); Some(Chunk { diff --git a/crates/text/src/tests.rs b/crates/text/src/tests.rs index a13273b898b9d6febd865add8cfee83d30f81fc1..ff1a3d9ec8b4fbb086d1a9a1c2f83ce13b46e033 100644 --- a/crates/text/src/tests.rs +++ b/crates/text/src/tests.rs @@ -102,6 +102,32 @@ fn test_random_edits(mut rng: StdRng) { } assert_eq!(text.to_string(), buffer.text()); + for _ in 0..5 { + let end_ix = old_buffer.clip_offset(rng.gen_range(0..=old_buffer.len()), Bias::Right); + let start_ix = old_buffer.clip_offset(rng.gen_range(0..=end_ix), Bias::Left); + let range = old_buffer.anchor_before(start_ix)..old_buffer.anchor_after(end_ix); + let mut old_text = old_buffer.text_for_range(range.clone()).collect::(); + let edits = buffer + .edits_since_in_range::(&old_buffer.version, range.clone()) + .collect::>(); + log::info!( + "applying edits since version {:?} to old text in range {:?}: {:?}: {:?}", + old_buffer.version(), + start_ix..end_ix, + old_text, + edits, + ); + + let new_text = buffer.text_for_range(range).collect::(); + for edit in edits { + old_text.replace_range( + edit.new.start..edit.new.start + edit.old_len(), + &new_text[edit.new], + ); + } + assert_eq!(old_text, new_text); + } + let subscription_edits = subscription.consume(); log::info!( "applying subscription edits since version {:?} to old text: {:?}: {:?}", diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 0fef4aac82bb300e66fda5000af62ab215865bfc..06c92ca91efe07d22fbc30bb3eacf115d5a47304 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -302,6 +302,7 @@ struct Edits<'a, D: TextDimension<'a>, F: FnMut(&FragmentSummary) -> bool> { since: &'a clock::Global, old_end: D, new_end: D, + range: Range, } #[derive(Clone, Debug, Default, Eq, PartialEq)] @@ -402,6 +403,12 @@ struct FragmentTextSummary { deleted: usize, } +impl FragmentTextSummary { + pub fn full_offset(&self) -> FullOffset { + FullOffset(self.visible + self.deleted) + } +} + impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary { fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option) { self.visible += summary.text.visible; @@ -1873,6 +1880,17 @@ impl Snapshot { &'a self, since: &'a clock::Global, ) -> impl 'a + Iterator> + where + D: 'a + TextDimension<'a> + Ord, + { + self.edits_since_in_range(since, Anchor::min()..Anchor::max()) + } + + pub fn edits_since_in_range<'a, D>( + &'a self, + since: &'a clock::Global, + range: Range, + ) -> impl 'a + Iterator> where D: 'a + TextDimension<'a> + Ord, { @@ -1885,14 +1903,36 @@ impl Snapshot { ) }; + let mut cursor = self + .fragments + .cursor::<(VersionedFullOffset, FragmentTextSummary)>(); + cursor.seek( + &VersionedFullOffset::Offset(range.start.full_offset), + range.start.bias, + &Some(range.start.version), + ); + let mut visible_start = cursor.start().1.visible; + let mut deleted_start = cursor.start().1.deleted; + if let Some(fragment) = cursor.item() { + let overshoot = range.start.full_offset.0 - cursor.start().0.full_offset().0; + if fragment.visible { + visible_start += overshoot; + } else { + deleted_start += overshoot; + } + } + + let full_offset_start = FullOffset(visible_start + deleted_start); + let full_offset_end = range.end.to_full_offset(self, range.end.bias); Edits { - visible_cursor: self.visible_text.cursor(0), - deleted_cursor: self.deleted_text.cursor(0), + visible_cursor: self.visible_text.cursor(visible_start), + deleted_cursor: self.deleted_text.cursor(deleted_start), fragments_cursor, undos: &self.undo_map, since, old_end: Default::default(), new_end: Default::default(), + range: full_offset_start..full_offset_end, } } } @@ -1960,9 +2000,19 @@ impl<'a, D: TextDimension<'a> + Ord, F: FnMut(&FragmentSummary) -> bool> Iterato let cursor = self.fragments_cursor.as_mut()?; while let Some(fragment) = cursor.item() { - let summary = self.visible_cursor.summary(cursor.start().visible); - self.old_end.add_assign(&summary); - self.new_end.add_assign(&summary); + if cursor.end(&None).full_offset() < self.range.start { + cursor.next(&None); + continue; + } else if cursor.start().full_offset() >= self.range.end { + break; + } + + if cursor.start().visible > self.visible_cursor.offset() { + let summary = self.visible_cursor.summary(cursor.start().visible); + self.old_end.add_assign(&summary); + self.new_end.add_assign(&summary); + } + if pending_edit .as_ref() .map_or(false, |change| change.new.end < self.new_end) @@ -1971,7 +2021,12 @@ impl<'a, D: TextDimension<'a> + Ord, F: FnMut(&FragmentSummary) -> bool> Iterato } if !fragment.was_visible(&self.since, &self.undos) && fragment.visible { - let fragment_summary = self.visible_cursor.summary(cursor.end(&None).visible); + let visible_end = cmp::min( + cursor.end(&None).visible, + cursor.start().visible + (self.range.end - cursor.start().full_offset()), + ); + + let fragment_summary = self.visible_cursor.summary(visible_end); let mut new_end = self.new_end.clone(); new_end.add_assign(&fragment_summary); if let Some(pending_edit) = pending_edit.as_mut() { @@ -1985,8 +2040,15 @@ impl<'a, D: TextDimension<'a> + Ord, F: FnMut(&FragmentSummary) -> bool> Iterato self.new_end = new_end; } else if fragment.was_visible(&self.since, &self.undos) && !fragment.visible { - self.deleted_cursor.seek_forward(cursor.start().deleted); - let fragment_summary = self.deleted_cursor.summary(cursor.end(&None).deleted); + let deleted_end = cmp::min( + cursor.end(&None).deleted, + cursor.start().deleted + (self.range.end - cursor.start().full_offset()), + ); + + if cursor.start().deleted > self.deleted_cursor.offset() { + self.deleted_cursor.seek_forward(cursor.start().deleted); + } + let fragment_summary = self.deleted_cursor.summary(deleted_end); let mut old_end = self.old_end.clone(); old_end.add_assign(&fragment_summary); if let Some(pending_edit) = pending_edit.as_mut() { @@ -2251,6 +2313,28 @@ impl ToOffset for Anchor { fn to_offset<'a>(&self, content: &Snapshot) -> usize { content.summary_for_anchor(self) } + + fn to_full_offset<'a>(&self, content: &Snapshot, bias: Bias) -> FullOffset { + if content.version == self.version { + self.full_offset + } else { + let mut cursor = content + .fragments + .cursor::<(VersionedFullOffset, FragmentTextSummary)>(); + cursor.seek( + &VersionedFullOffset::Offset(self.full_offset), + bias, + &Some(self.version.clone()), + ); + + let mut full_offset = cursor.start().1.full_offset().0; + if cursor.item().is_some() { + full_offset += self.full_offset - cursor.start().0.full_offset(); + } + + FullOffset(full_offset) + } + } } impl<'a> ToOffset for &'a Anchor { From cff610e1ec27bd3e39fce22e61fcc37fbda04b48 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Mon, 6 Dec 2021 11:59:32 -0700 Subject: [PATCH 007/196] Rename FragmentList to ExcerptList Co-Authored-By: Max Brunsfeld --- .../src/{fragment_list.rs => excerpt_list.rs} | 102 +++++++++--------- crates/language/src/language.rs | 2 +- 2 files changed, 50 insertions(+), 54 deletions(-) rename crates/language/src/{fragment_list.rs => excerpt_list.rs} (84%) diff --git a/crates/language/src/fragment_list.rs b/crates/language/src/excerpt_list.rs similarity index 84% rename from crates/language/src/fragment_list.rs rename to crates/language/src/excerpt_list.rs index 6f16e9ab51881577437c167b90ddf2efcc3e4bf7..bd47965e108f9fbd3b912a7de2723dd72ef38eda 100644 --- a/crates/language/src/fragment_list.rs +++ b/crates/language/src/excerpt_list.rs @@ -14,10 +14,10 @@ pub trait ToOffset { fn to_offset<'a>(&self, content: &Snapshot) -> usize; } -pub type FragmentId = Location; +pub type ExcerptId = Location; #[derive(Default)] -pub struct FragmentList { +pub struct ExcerptList { snapshot: Mutex, buffers: HashMap, } @@ -25,7 +25,7 @@ pub struct FragmentList { struct BufferState { buffer: ModelHandle, subscription: text::Subscription, - fragments: Vec, + excerpts: Vec, } #[derive(Clone, Default)] @@ -33,7 +33,7 @@ pub struct Snapshot { entries: SumTree, } -pub struct FragmentProperties<'a, T> { +pub struct ExcerptProperties<'a, T> { buffer: &'a ModelHandle, range: Range, header_height: u8, @@ -41,7 +41,7 @@ pub struct FragmentProperties<'a, T> { #[derive(Clone)] struct Entry { - id: FragmentId, + id: ExcerptId, buffer: buffer::Snapshot, buffer_range: Range, text_summary: TextSummary, @@ -50,7 +50,7 @@ struct Entry { #[derive(Clone, Debug, Default)] struct EntrySummary { - fragment_id: FragmentId, + excerpt_id: ExcerptId, text: TextSummary, } @@ -65,7 +65,7 @@ pub struct Chunks<'a> { theme: Option<&'a SyntaxTheme>, } -impl FragmentList { +impl ExcerptList { pub fn new() -> Self { Self::default() } @@ -75,11 +75,7 @@ impl FragmentList { self.snapshot.lock().clone() } - pub fn push( - &mut self, - props: FragmentProperties, - cx: &mut ModelContext, - ) -> FragmentId + pub fn push(&mut self, props: ExcerptProperties, cx: &mut ModelContext) -> ExcerptId where O: text::ToOffset, { @@ -99,7 +95,7 @@ impl FragmentList { let mut snapshot = self.snapshot.lock(); let prev_id = snapshot.entries.last().map(|e| &e.id); - let id = FragmentId::between(prev_id.unwrap_or(&FragmentId::min()), &FragmentId::max()); + let id = ExcerptId::between(prev_id.unwrap_or(&ExcerptId::min()), &ExcerptId::max()); snapshot.entries.push( Entry { id: id.clone(), @@ -118,10 +114,10 @@ impl FragmentList { BufferState { buffer: props.buffer.clone(), subscription, - fragments: Default::default(), + excerpts: Default::default(), } }) - .fragments + .excerpts .push(id.clone()); id @@ -130,32 +126,32 @@ impl FragmentList { fn sync(&self, cx: &AppContext) { let mut snapshot = self.snapshot.lock(); let mut patches = Vec::new(); - let mut fragments_to_edit = Vec::new(); + let mut excerpts_to_edit = Vec::new(); for buffer_state in self.buffers.values() { let patch = buffer_state.subscription.consume(); if !patch.is_empty() { let patch_ix = patches.len(); patches.push(patch); - fragments_to_edit.extend( + excerpts_to_edit.extend( buffer_state - .fragments + .excerpts .iter() - .map(|fragment_id| (&buffer_state.buffer, fragment_id, patch_ix)), + .map(|excerpt_id| (&buffer_state.buffer, excerpt_id, patch_ix)), ) } } - fragments_to_edit.sort_unstable_by_key(|(_, fragment_id, _)| *fragment_id); + excerpts_to_edit.sort_unstable_by_key(|(_, excerpt_id, _)| *excerpt_id); - let old_fragments = mem::take(&mut snapshot.entries); - let mut cursor = old_fragments.cursor::(); - for (buffer, fragment_id, patch_ix) in fragments_to_edit { + let old_excerpts = mem::take(&mut snapshot.entries); + let mut cursor = old_excerpts.cursor::(); + for (buffer, excerpt_id, patch_ix) in excerpts_to_edit { let buffer = buffer.read(cx); snapshot .entries - .push_tree(cursor.slice(fragment_id, Bias::Left, &()), &()); + .push_tree(cursor.slice(excerpt_id, Bias::Left, &()), &()); - let fragment = cursor.item().unwrap(); - let mut new_range = fragment.buffer_range.to_offset(buffer); + let excerpt = cursor.item().unwrap(); + let mut new_range = excerpt.buffer_range.to_offset(buffer); for edit in patches[patch_ix].edits() { let edit_start = edit.new.start; let edit_end = edit.new.start + edit.old_len(); @@ -180,20 +176,20 @@ impl FragmentList { } let mut text_summary: TextSummary = buffer.text_summary_for_range(new_range.clone()); - if fragment.header_height > 0 { + if excerpt.header_height > 0 { text_summary.first_line_chars = 0; - text_summary.lines.row += fragment.header_height as u32; - text_summary.lines_utf16.row += fragment.header_height as u32; - text_summary.bytes += fragment.header_height as usize; + text_summary.lines.row += excerpt.header_height as u32; + text_summary.lines_utf16.row += excerpt.header_height as u32; + text_summary.bytes += excerpt.header_height as usize; } snapshot.entries.push( Entry { - id: fragment.id.clone(), + id: excerpt.id.clone(), buffer: buffer.snapshot(), buffer_range: buffer.anchor_before(new_range.start) ..buffer.anchor_after(new_range.end), text_summary, - header_height: fragment.header_height, + header_height: excerpt.header_height, }, &(), ); @@ -204,7 +200,7 @@ impl FragmentList { } } -impl Entity for FragmentList { +impl Entity for ExcerptList { type Event = (); } @@ -254,7 +250,7 @@ impl sum_tree::Item for Entry { fn summary(&self) -> Self::Summary { EntrySummary { - fragment_id: self.id.clone(), + excerpt_id: self.id.clone(), text: self.text_summary.clone(), } } @@ -264,8 +260,8 @@ impl sum_tree::Summary for EntrySummary { type Context = (); fn add_summary(&mut self, summary: &Self, _: &()) { - debug_assert!(summary.fragment_id > self.fragment_id); - self.fragment_id = summary.fragment_id.clone(); + debug_assert!(summary.excerpt_id > self.excerpt_id); + self.excerpt_id = summary.excerpt_id.clone(); self.text.add_summary(&summary.text, &()); } } @@ -276,10 +272,10 @@ impl<'a> sum_tree::Dimension<'a, EntrySummary> for usize { } } -impl<'a> sum_tree::Dimension<'a, EntrySummary> for FragmentId { +impl<'a> sum_tree::Dimension<'a, EntrySummary> for ExcerptId { fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) { - debug_assert!(summary.fragment_id > *self); - *self = summary.fragment_id.clone(); + debug_assert!(summary.excerpt_id > *self); + *self = summary.excerpt_id.clone(); } } @@ -376,15 +372,15 @@ mod tests { use util::test::sample_text; #[gpui::test] - fn test_fragment_buffer(cx: &mut MutableAppContext) { + fn test_excerpt_buffer(cx: &mut MutableAppContext) { let buffer_1 = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'g'), cx)); let list = cx.add_model(|cx| { - let mut list = FragmentList::new(); + let mut list = ExcerptList::new(); list.push( - FragmentProperties { + ExcerptProperties { buffer: &buffer_1, range: Point::new(1, 2)..Point::new(2, 5), header_height: 2, @@ -392,7 +388,7 @@ mod tests { cx, ); list.push( - FragmentProperties { + ExcerptProperties { buffer: &buffer_1, range: Point::new(3, 3)..Point::new(4, 4), header_height: 1, @@ -400,7 +396,7 @@ mod tests { cx, ); list.push( - FragmentProperties { + ExcerptProperties { buffer: &buffer_2, range: Point::new(3, 1)..Point::new(3, 3), header_height: 3, @@ -464,9 +460,9 @@ mod tests { .unwrap_or(10); let mut buffers: Vec> = Vec::new(); - let list = cx.add_model(|_| FragmentList::new()); - let mut fragment_ids = Vec::new(); - let mut expected_fragments = Vec::new(); + let list = cx.add_model(|_| ExcerptList::new()); + let mut excerpt_ids = Vec::new(); + let mut expected_excerpts = Vec::new(); for _ in 0..operations { match rng.gen_range(0..100) { @@ -489,16 +485,16 @@ mod tests { let header_height = rng.gen_range(0..=5); let anchor_range = buffer.anchor_before(start_ix)..buffer.anchor_after(end_ix); log::info!( - "Pushing fragment for buffer {}: {:?}[{:?}] = {:?}", + "Pushing excerpt for buffer {}: {:?}[{:?}] = {:?}", buffer_handle.id(), buffer.text(), start_ix..end_ix, &buffer.text()[start_ix..end_ix] ); - let fragment_id = list.update(cx, |list, cx| { + let excerpt_id = list.update(cx, |list, cx| { list.push( - FragmentProperties { + ExcerptProperties { buffer: &buffer_handle, range: start_ix..end_ix, header_height, @@ -506,14 +502,14 @@ mod tests { cx, ) }); - fragment_ids.push(fragment_id); - expected_fragments.push((buffer_handle.clone(), anchor_range, header_height)); + excerpt_ids.push(excerpt_id); + expected_excerpts.push((buffer_handle.clone(), anchor_range, header_height)); } } let snapshot = list.read(cx).snapshot(cx); let mut expected_text = String::new(); - for (buffer, range, header_height) in &expected_fragments { + for (buffer, range, header_height) in &expected_excerpts { let buffer = buffer.read(cx); if !expected_text.is_empty() { expected_text.push('\n'); diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 704e9b967c9433bf2a943a8e9cbfce366b229239..de8d5fa8939315613e88178f879d51e89578078b 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -1,5 +1,5 @@ mod buffer; -mod fragment_list; +mod excerpt_list; mod highlight_map; pub mod proto; #[cfg(test)] From 6965117dd8617174ed1dd12ac0662ab61a739fc2 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Mon, 6 Dec 2021 13:00:51 -0700 Subject: [PATCH 008/196] Allow patches to be composed with edit iterators in addition to other Patches This can avoid an extra allocation in some cases. Co-Authored-By: Max Brunsfeld --- crates/text/src/patch.rs | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/crates/text/src/patch.rs b/crates/text/src/patch.rs index 91a07610177c7f9bd06c13f67d5f7657ecbbca86..a21f1125ec1470ce9be0d909cc2410cf4e0b8c18 100644 --- a/crates/text/src/patch.rs +++ b/crates/text/src/patch.rs @@ -9,7 +9,8 @@ pub struct Patch(Vec>); impl Patch where - T: Clone + T: 'static + + Clone + Copy + Ord + Sub @@ -41,9 +42,9 @@ where self.0 } - pub fn compose(&self, other: &Self) -> Self { + pub fn compose(&self, new_edits_iter: impl IntoIterator>) -> Self { let mut old_edits_iter = self.0.iter().cloned().peekable(); - let mut new_edits_iter = other.0.iter().cloned().peekable(); + let mut new_edits_iter = new_edits_iter.into_iter().peekable(); let mut composed = Patch(Vec::new()); let mut old_start = T::default(); @@ -200,6 +201,15 @@ where } } +impl<'a, T: Clone> IntoIterator for &'a Patch { + type Item = Edit; + type IntoIter = std::iter::Cloned>>; + + fn into_iter(self) -> Self::IntoIter { + self.0.iter().cloned() + } +} + #[cfg(test)] mod tests { use super::*; From a02a29944c11abf5ed6b4b7777362b84e1ab2f5a Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Mon, 6 Dec 2021 13:01:09 -0700 Subject: [PATCH 009/196] Get the basic ExcerptList unit test passing again Co-Authored-By: Max Brunsfeld --- crates/language/src/excerpt_list.rs | 208 ++++++++++++++-------------- 1 file changed, 103 insertions(+), 105 deletions(-) diff --git a/crates/language/src/excerpt_list.rs b/crates/language/src/excerpt_list.rs index bd47965e108f9fbd3b912a7de2723dd72ef38eda..59619f156e0416279126377a215c20f630759286 100644 --- a/crates/language/src/excerpt_list.rs +++ b/crates/language/src/excerpt_list.rs @@ -1,11 +1,12 @@ use crate::{buffer, Buffer, Chunk}; use collections::HashMap; use gpui::{AppContext, Entity, ModelContext, ModelHandle}; +use lsp::TextDocumentSaveReason; use parking_lot::Mutex; use smallvec::{smallvec, SmallVec}; use std::{cmp, iter, mem, ops::Range}; use sum_tree::{Bias, Cursor, SumTree}; -use text::{Anchor, AnchorRangeExt, TextSummary}; +use text::{Anchor, AnchorRangeExt, Patch, TextSummary}; use theme::SyntaxTheme; const NEWLINES: &'static [u8] = &[b'\n'; u8::MAX as usize]; @@ -22,15 +23,16 @@ pub struct ExcerptList { buffers: HashMap, } +#[derive(Debug)] struct BufferState { buffer: ModelHandle, - subscription: text::Subscription, + last_sync: clock::Global, excerpts: Vec, } #[derive(Clone, Default)] pub struct Snapshot { - entries: SumTree, + excerpts: SumTree, } pub struct ExcerptProperties<'a, T> { @@ -40,10 +42,10 @@ pub struct ExcerptProperties<'a, T> { } #[derive(Clone)] -struct Entry { +struct Excerpt { id: ExcerptId, buffer: buffer::Snapshot, - buffer_range: Range, + range: Range, text_summary: TextSummary, header_height: u8, } @@ -55,11 +57,11 @@ struct EntrySummary { } #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] -pub struct Location(SmallVec<[usize; 4]>); +pub struct Location(SmallVec<[u8; 4]>); pub struct Chunks<'a> { range: Range, - cursor: Cursor<'a, Entry, usize>, + cursor: Cursor<'a, Excerpt, usize>, header_height: u8, entry_chunks: Option>, theme: Option<&'a SyntaxTheme>, @@ -82,40 +84,21 @@ impl ExcerptList { self.sync(cx); let buffer = props.buffer.read(cx); - let buffer_range = - buffer.anchor_before(props.range.start)..buffer.anchor_after(props.range.end); - let mut text_summary = - buffer.text_summary_for_range::(buffer_range.clone()); - if props.header_height > 0 { - text_summary.first_line_chars = 0; - text_summary.lines.row += props.header_height as u32; - text_summary.lines_utf16.row += props.header_height as u32; - text_summary.bytes += props.header_height as usize; - } - + let range = buffer.anchor_before(props.range.start)..buffer.anchor_after(props.range.end); let mut snapshot = self.snapshot.lock(); - let prev_id = snapshot.entries.last().map(|e| &e.id); + let prev_id = snapshot.excerpts.last().map(|e| &e.id); let id = ExcerptId::between(prev_id.unwrap_or(&ExcerptId::min()), &ExcerptId::max()); - snapshot.entries.push( - Entry { - id: id.clone(), - buffer: props.buffer.read(cx).snapshot(), - buffer_range, - text_summary, - header_height: props.header_height, - }, + + snapshot.excerpts.push( + Excerpt::new(id.clone(), buffer.snapshot(), range, props.header_height), &(), ); - self.buffers .entry(props.buffer.id()) - .or_insert_with(|| { - let subscription = props.buffer.update(cx, |buffer, _| buffer.subscribe()); - BufferState { - buffer: props.buffer.clone(), - subscription, - excerpts: Default::default(), - } + .or_insert_with(|| BufferState { + buffer: props.buffer.clone(), + last_sync: buffer.version(), + excerpts: Default::default(), }) .excerpts .push(id.clone()); @@ -125,78 +108,66 @@ impl ExcerptList { fn sync(&self, cx: &AppContext) { let mut snapshot = self.snapshot.lock(); - let mut patches = Vec::new(); let mut excerpts_to_edit = Vec::new(); for buffer_state in self.buffers.values() { - let patch = buffer_state.subscription.consume(); - if !patch.is_empty() { - let patch_ix = patches.len(); - patches.push(patch); + if buffer_state + .buffer + .read(cx) + .version() + .gt(&buffer_state.last_sync) + { excerpts_to_edit.extend( buffer_state .excerpts .iter() - .map(|excerpt_id| (&buffer_state.buffer, excerpt_id, patch_ix)), - ) + .map(|excerpt_id| (excerpt_id, buffer_state)), + ); } } - excerpts_to_edit.sort_unstable_by_key(|(_, excerpt_id, _)| *excerpt_id); - - let old_excerpts = mem::take(&mut snapshot.entries); - let mut cursor = old_excerpts.cursor::(); - for (buffer, excerpt_id, patch_ix) in excerpts_to_edit { - let buffer = buffer.read(cx); - snapshot - .entries - .push_tree(cursor.slice(excerpt_id, Bias::Left, &()), &()); - - let excerpt = cursor.item().unwrap(); - let mut new_range = excerpt.buffer_range.to_offset(buffer); - for edit in patches[patch_ix].edits() { - let edit_start = edit.new.start; - let edit_end = edit.new.start + edit.old_len(); - if edit_start > new_range.end { - break; - } else if edit_end < new_range.start { - let delta = edit.new_len() as isize - edit.old_len() as isize; - new_range.start = (new_range.start as isize + delta) as usize; - new_range.end = (new_range.end as isize + delta) as usize; - } else { - let mut new_range_len = new_range.len(); - new_range_len -= - cmp::min(new_range.end, edit_end) - cmp::max(new_range.start, edit_start); - if edit_start < new_range.start { - new_range.start = edit.new.end; - } else { - new_range_len += edit.new_len(); - } - - new_range.end = new_range.start + new_range_len; - } - } - - let mut text_summary: TextSummary = buffer.text_summary_for_range(new_range.clone()); - if excerpt.header_height > 0 { - text_summary.first_line_chars = 0; - text_summary.lines.row += excerpt.header_height as u32; - text_summary.lines_utf16.row += excerpt.header_height as u32; - text_summary.bytes += excerpt.header_height as usize; - } - snapshot.entries.push( - Entry { - id: excerpt.id.clone(), - buffer: buffer.snapshot(), - buffer_range: buffer.anchor_before(new_range.start) - ..buffer.anchor_after(new_range.end), - text_summary, - header_height: excerpt.header_height, - }, + excerpts_to_edit.sort_unstable_by_key(|(excerpt_id, _)| *excerpt_id); + + dbg!(&excerpts_to_edit); + + let mut patch = Patch::::default(); + let mut new_excerpts = SumTree::new(); + let mut cursor = snapshot.excerpts.cursor::<(ExcerptId, usize)>(); + + for (id, buffer_state) in excerpts_to_edit { + new_excerpts.push_tree(cursor.slice(id, Bias::Left, &()), &()); + let old_excerpt = cursor.item().unwrap(); + let buffer = buffer_state.buffer.read(cx); + new_excerpts.push( + Excerpt::new( + id.clone(), + buffer.snapshot(), + old_excerpt.range.clone(), + old_excerpt.header_height, + ), &(), ); + let edits = buffer + .edits_since_in_range::( + old_excerpt.buffer.version(), + old_excerpt.range.clone(), + ) + .map(|mut edit| { + let excerpt_old_start = cursor.start().1; + let excerpt_new_start = new_excerpts.summary().text.bytes; + edit.old.start += excerpt_old_start; + edit.old.end += excerpt_old_start; + edit.new.start += excerpt_new_start; + edit.new.end += excerpt_new_start; + edit + }); + patch = patch.compose(edits); + cursor.next(&()); } - snapshot.entries.push_tree(cursor.suffix(&()), &()); + new_excerpts.push_tree(cursor.suffix(&()), &()); + + drop(cursor); + snapshot.excerpts = new_excerpts; } } @@ -212,7 +183,7 @@ impl Snapshot { } pub fn len(&self) -> usize { - self.entries.summary().text.bytes + self.excerpts.summary().text.bytes } pub fn chunks<'a, T: ToOffset>( @@ -221,11 +192,11 @@ impl Snapshot { theme: Option<&'a SyntaxTheme>, ) -> Chunks<'a> { let range = range.start.to_offset(self)..range.end.to_offset(self); - let mut cursor = self.entries.cursor::(); + let mut cursor = self.excerpts.cursor::(); cursor.seek(&range.start, Bias::Right, &()); let entry_chunks = cursor.item().map(|entry| { - let buffer_range = entry.buffer_range.to_offset(&entry.buffer); + let buffer_range = entry.range.to_offset(&entry.buffer); let buffer_start = buffer_range.start + (range.start - cursor.start()); let buffer_end = cmp::min( buffer_range.end, @@ -245,7 +216,31 @@ impl Snapshot { } } -impl sum_tree::Item for Entry { +impl Excerpt { + fn new( + id: ExcerptId, + buffer: buffer::Snapshot, + range: Range, + header_height: u8, + ) -> Self { + let mut text_summary = buffer.text_summary_for_range::(range.clone()); + if header_height > 0 { + text_summary.first_line_chars = 0; + text_summary.lines.row += header_height as u32; + text_summary.lines_utf16.row += header_height as u32; + text_summary.bytes += header_height as usize; + } + Excerpt { + id, + buffer, + range, + text_summary, + header_height, + } + } +} + +impl sum_tree::Item for Excerpt { type Summary = EntrySummary; fn summary(&self) -> Self::Summary { @@ -272,7 +267,7 @@ impl<'a> sum_tree::Dimension<'a, EntrySummary> for usize { } } -impl<'a> sum_tree::Dimension<'a, EntrySummary> for ExcerptId { +impl<'a> sum_tree::Dimension<'a, EntrySummary> for Location { fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) { debug_assert!(summary.excerpt_id > *self); *self = summary.excerpt_id.clone(); @@ -304,7 +299,7 @@ impl<'a> Iterator for Chunks<'a> { self.cursor.next(&()); let entry = self.cursor.item()?; - let buffer_range = entry.buffer_range.to_offset(&entry.buffer); + let buffer_range = entry.range.to_offset(&entry.buffer); let buffer_end = cmp::min( buffer_range.end, buffer_range.start + (self.range.end - self.cursor.start()), @@ -338,16 +333,16 @@ impl Default for Location { impl Location { pub fn min() -> Self { - Self(smallvec![usize::MIN]) + Self(smallvec![u8::MIN]) } pub fn max() -> Self { - Self(smallvec![usize::MAX]) + Self(smallvec![u8::MAX]) } pub fn between(lhs: &Self, rhs: &Self) -> Self { - let lhs = lhs.0.iter().copied().chain(iter::repeat(usize::MIN)); - let rhs = rhs.0.iter().copied().chain(iter::repeat(usize::MAX)); + let lhs = lhs.0.iter().copied().chain(iter::repeat(u8::MIN)); + let rhs = rhs.0.iter().copied().chain(iter::repeat(u8::MAX)); let mut location = SmallVec::new(); for (lhs, rhs) in lhs.zip(rhs) { let mid = lhs + (rhs.saturating_sub(lhs)) / 2; @@ -378,7 +373,10 @@ mod tests { let list = cx.add_model(|cx| { let mut list = ExcerptList::new(); - + // aaaaaa + // bbbbbb + // cccccc + // dddddd list.push( ExcerptProperties { buffer: &buffer_1, From 4578938ea1a1aba1f4e339923ac3ec36b61ca27a Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Mon, 6 Dec 2021 13:37:17 -0700 Subject: [PATCH 010/196] Implement ExcerptList::subscribe Co-Authored-By: Max Brunsfeld --- crates/language/src/buffer.rs | 1 + crates/language/src/excerpt_list.rs | 73 +++++++++++++++++++++-------- crates/text/src/patch.rs | 9 ++++ crates/text/src/subscription.rs | 48 +++++++++++++++++++ crates/text/src/text.rs | 42 ++++------------- 5 files changed, 119 insertions(+), 54 deletions(-) create mode 100644 crates/text/src/subscription.rs diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 55346fc9dd410c5617c2c1186ca839cc9c0ca32e..b8d1aad91b05a3ca76e478638e2e2257a6bfc9cb 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -28,6 +28,7 @@ use std::{ time::{Duration, Instant, SystemTime, UNIX_EPOCH}, vec, }; +use text::subscription::Subscription; pub use text::{Buffer as TextBuffer, Operation as _, *}; use theme::SyntaxTheme; use tree_sitter::{InputEdit, Parser, QueryCursor, Tree}; diff --git a/crates/language/src/excerpt_list.rs b/crates/language/src/excerpt_list.rs index 59619f156e0416279126377a215c20f630759286..0cda89c5794f316246782ed21aea2c6898ae9c99 100644 --- a/crates/language/src/excerpt_list.rs +++ b/crates/language/src/excerpt_list.rs @@ -1,12 +1,14 @@ use crate::{buffer, Buffer, Chunk}; use collections::HashMap; use gpui::{AppContext, Entity, ModelContext, ModelHandle}; -use lsp::TextDocumentSaveReason; use parking_lot::Mutex; use smallvec::{smallvec, SmallVec}; -use std::{cmp, iter, mem, ops::Range}; +use std::{cmp, iter, ops::Range}; use sum_tree::{Bias, Cursor, SumTree}; -use text::{Anchor, AnchorRangeExt, Patch, TextSummary}; +use text::{ + subscription::{Subscription, Topic}, + Anchor, AnchorRangeExt, Edit, Patch, TextSummary, +}; use theme::SyntaxTheme; const NEWLINES: &'static [u8] = &[b'\n'; u8::MAX as usize]; @@ -21,6 +23,7 @@ pub type ExcerptId = Location; pub struct ExcerptList { snapshot: Mutex, buffers: HashMap, + subscriptions: Topic, } #[derive(Debug)] @@ -77,6 +80,10 @@ impl ExcerptList { self.snapshot.lock().clone() } + pub fn subscribe(&mut self) -> Subscription { + self.subscriptions.subscribe() + } + pub fn push(&mut self, props: ExcerptProperties, cx: &mut ModelContext) -> ExcerptId where O: text::ToOffset, @@ -89,10 +96,13 @@ impl ExcerptList { let prev_id = snapshot.excerpts.last().map(|e| &e.id); let id = ExcerptId::between(prev_id.unwrap_or(&ExcerptId::min()), &ExcerptId::max()); - snapshot.excerpts.push( - Excerpt::new(id.clone(), buffer.snapshot(), range, props.header_height), - &(), - ); + let edit_start = snapshot.excerpts.summary().text.bytes; + let excerpt = Excerpt::new(id.clone(), buffer.snapshot(), range, props.header_height); + let edit = Edit { + old: edit_start..edit_start, + new: edit_start..edit_start + excerpt.text_summary.bytes, + }; + snapshot.excerpts.push(excerpt, &()); self.buffers .entry(props.buffer.id()) .or_insert_with(|| BufferState { @@ -103,6 +113,8 @@ impl ExcerptList { .excerpts .push(id.clone()); + self.subscriptions.publish_mut([edit]); + id } @@ -126,8 +138,6 @@ impl ExcerptList { } excerpts_to_edit.sort_unstable_by_key(|(excerpt_id, _)| *excerpt_id); - dbg!(&excerpts_to_edit); - let mut patch = Patch::::default(); let mut new_excerpts = SumTree::new(); let mut cursor = snapshot.excerpts.cursor::<(ExcerptId, usize)>(); @@ -168,6 +178,8 @@ impl ExcerptList { drop(cursor); snapshot.excerpts = new_excerpts; + + self.subscriptions.publish(&patch); } } @@ -357,12 +369,11 @@ impl Location { #[cfg(test)] mod tests { - use std::env; - use super::*; use crate::Buffer; use gpui::MutableAppContext; use rand::prelude::*; + use std::{env, mem}; use text::{Point, RandomCharIter}; use util::test::sample_text; @@ -371,12 +382,10 @@ mod tests { let buffer_1 = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'g'), cx)); - let list = cx.add_model(|cx| { - let mut list = ExcerptList::new(); - // aaaaaa - // bbbbbb - // cccccc - // dddddd + let list = cx.add_model(|cx| ExcerptList::new()); + + let subscription = list.update(cx, |list, cx| { + let subscription = list.subscribe(); list.push( ExcerptProperties { buffer: &buffer_1, @@ -385,6 +394,14 @@ mod tests { }, cx, ); + assert_eq!( + subscription.consume().into_inner(), + [Edit { + old: 0..0, + new: 0..12 + }] + ); + list.push( ExcerptProperties { buffer: &buffer_1, @@ -401,7 +418,15 @@ mod tests { }, cx, ); - list + assert_eq!( + subscription.consume().into_inner(), + [Edit { + old: 12..12, + new: 12..26 + }] + ); + + subscription }); assert_eq!( @@ -425,7 +450,7 @@ mod tests { buffer.edit( [ Point::new(0, 0)..Point::new(0, 0), - Point::new(2, 1)..Point::new(2, 2), + Point::new(2, 1)..Point::new(2, 3), ], "\n", cx, @@ -439,7 +464,7 @@ mod tests { "\n", // "bbbb\n", // "c\n", // - "ccc\n", // + "cc\n", // "\n", // "ddd\n", // "eeee\n", // @@ -449,6 +474,14 @@ mod tests { "jj" // ) ); + + assert_eq!( + subscription.consume().into_inner(), + [Edit { + old: 17..19, + new: 17..18 + }] + ); } #[gpui::test(iterations = 100)] diff --git a/crates/text/src/patch.rs b/crates/text/src/patch.rs index a21f1125ec1470ce9be0d909cc2410cf4e0b8c18..f5592cefd00f41c17b320cd7a5d55511dc4ed514 100644 --- a/crates/text/src/patch.rs +++ b/crates/text/src/patch.rs @@ -210,6 +210,15 @@ impl<'a, T: Clone> IntoIterator for &'a Patch { } } +impl<'a, T: Clone> IntoIterator for &'a mut Patch { + type Item = Edit; + type IntoIter = std::iter::Cloned>>; + + fn into_iter(self) -> Self::IntoIter { + self.0.iter().cloned() + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/text/src/subscription.rs b/crates/text/src/subscription.rs new file mode 100644 index 0000000000000000000000000000000000000000..b636dfcc923654619f7ea61d87b61d7abb4dbae5 --- /dev/null +++ b/crates/text/src/subscription.rs @@ -0,0 +1,48 @@ +use crate::{Edit, Patch}; +use parking_lot::Mutex; +use std::{ + mem, + sync::{Arc, Weak}, +}; + +#[derive(Default)] +pub struct Topic(Mutex>>>>); + +pub struct Subscription(Arc>>); + +impl Topic { + pub fn subscribe(&mut self) -> Subscription { + let subscription = Subscription(Default::default()); + self.0.get_mut().push(Arc::downgrade(&subscription.0)); + subscription + } + + pub fn publish(&self, edits: impl Clone + IntoIterator>) { + publish(&mut *self.0.lock(), edits); + } + + pub fn publish_mut(&mut self, edits: impl Clone + IntoIterator>) { + publish(self.0.get_mut(), edits); + } +} + +impl Subscription { + pub fn consume(&self) -> Patch { + mem::take(&mut *self.0.lock()) + } +} + +fn publish( + subscriptions: &mut Vec>>>, + edits: impl Clone + IntoIterator>, +) { + subscriptions.retain(|subscription| { + if let Some(subscription) = subscription.upgrade() { + let mut patch = subscription.lock(); + *patch = patch.compose(edits.clone()); + true + } else { + false + } + }); +} diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 06c92ca91efe07d22fbc30bb3eacf115d5a47304..f52a76c3e4b168822640b23cb76eeda0925e8902 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -7,6 +7,7 @@ mod point_utf16; pub mod random_char_iter; pub mod rope; mod selection; +pub mod subscription; #[cfg(test)] mod tests; @@ -15,7 +16,6 @@ use anyhow::{anyhow, Result}; use clock::ReplicaId; use collections::{HashMap, HashSet}; use operation_queue::OperationQueue; -use parking_lot::Mutex; pub use patch::Patch; pub use point::*; pub use point_utf16::*; @@ -29,9 +29,10 @@ use std::{ iter::Iterator, ops::{self, Deref, Range, Sub}, str, - sync::{Arc, Weak}, + sync::Arc, time::{Duration, Instant}, }; +use subscription::{Subscription, Topic}; pub use sum_tree::Bias; use sum_tree::{FilterCursor, SumTree}; @@ -46,7 +47,7 @@ pub struct Buffer { remote_id: u64, local_clock: clock::Local, lamport_clock: clock::Lamport, - subscriptions: Vec>>>>, + subscriptions: Topic, } #[derive(Clone, Debug)] @@ -343,20 +344,6 @@ impl Edit<(D1, D2)> { } } -#[derive(Clone, Default)] -pub struct Subscription(Arc>>>); - -impl Subscription { - pub fn consume(&self) -> Patch { - let mut patches = self.0.lock(); - let mut changes = Patch::default(); - for patch in patches.drain(..) { - changes = changes.compose(&patch); - } - changes - } -} - #[derive(Copy, Clone, Debug, Default, Eq, PartialEq)] pub struct InsertionTimestamp { pub replica_id: ReplicaId, @@ -699,7 +686,7 @@ impl Buffer { self.snapshot.fragments = new_fragments; self.snapshot.visible_text = visible_text; self.snapshot.deleted_text = deleted_text; - self.update_subscriptions(edits); + self.subscriptions.publish_mut(&edits); edit_op.new_text = new_text; edit_op } @@ -955,7 +942,7 @@ impl Buffer { self.snapshot.deleted_text = deleted_text; self.local_clock.observe(timestamp.local()); self.lamport_clock.observe(timestamp.lamport()); - self.update_subscriptions(edits); + self.subscriptions.publish_mut(&edits); } fn apply_undo(&mut self, undo: &UndoOperation) -> Result<()> { @@ -1045,7 +1032,7 @@ impl Buffer { self.snapshot.fragments = new_fragments; self.snapshot.visible_text = visible_text; self.snapshot.deleted_text = deleted_text; - self.update_subscriptions(edits); + self.subscriptions.publish_mut(&edits); Ok(()) } @@ -1203,20 +1190,7 @@ impl Buffer { } pub fn subscribe(&mut self) -> Subscription { - let subscription = Subscription(Default::default()); - self.subscriptions.push(Arc::downgrade(&subscription.0)); - subscription - } - - fn update_subscriptions(&mut self, edits: Patch) { - self.subscriptions.retain(|subscription| { - if let Some(subscription) = subscription.upgrade() { - subscription.lock().push(edits.clone()); - true - } else { - false - } - }); + self.subscriptions.subscribe() } pub fn selection_set(&self, set_id: SelectionSetId) -> Result<&SelectionSet> { From 88e3d87098f2934ae72f46c0a108650ced994e54 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Mon, 6 Dec 2021 13:48:07 -0700 Subject: [PATCH 011/196] Get randomized test passing on basic excerpt list features Co-Authored-By: Max Brunsfeld --- crates/language/src/excerpt_list.rs | 41 +++++++++++++++-------------- 1 file changed, 21 insertions(+), 20 deletions(-) diff --git a/crates/language/src/excerpt_list.rs b/crates/language/src/excerpt_list.rs index 0cda89c5794f316246782ed21aea2c6898ae9c99..66b052598d6425e3a87edc1e33dc70e78c3d0d0a 100644 --- a/crates/language/src/excerpt_list.rs +++ b/crates/language/src/excerpt_list.rs @@ -7,7 +7,7 @@ use std::{cmp, iter, ops::Range}; use sum_tree::{Bias, Cursor, SumTree}; use text::{ subscription::{Subscription, Topic}, - Anchor, AnchorRangeExt, Edit, Patch, TextSummary, + Anchor, AnchorRangeExt, Edit, TextSummary, }; use theme::SyntaxTheme; @@ -138,7 +138,7 @@ impl ExcerptList { } excerpts_to_edit.sort_unstable_by_key(|(excerpt_id, _)| *excerpt_id); - let mut patch = Patch::::default(); + let mut edits = Vec::new(); let mut new_excerpts = SumTree::new(); let mut cursor = snapshot.excerpts.cursor::<(ExcerptId, usize)>(); @@ -156,21 +156,22 @@ impl ExcerptList { &(), ); - let edits = buffer - .edits_since_in_range::( - old_excerpt.buffer.version(), - old_excerpt.range.clone(), - ) - .map(|mut edit| { - let excerpt_old_start = cursor.start().1; - let excerpt_new_start = new_excerpts.summary().text.bytes; - edit.old.start += excerpt_old_start; - edit.old.end += excerpt_old_start; - edit.new.start += excerpt_new_start; - edit.new.end += excerpt_new_start; - edit - }); - patch = patch.compose(edits); + edits.extend( + buffer + .edits_since_in_range::( + old_excerpt.buffer.version(), + old_excerpt.range.clone(), + ) + .map(|mut edit| { + let excerpt_old_start = cursor.start().1; + let excerpt_new_start = new_excerpts.summary().text.bytes; + edit.old.start += excerpt_old_start; + edit.old.end += excerpt_old_start; + edit.new.start += excerpt_new_start; + edit.new.end += excerpt_new_start; + edit + }), + ); cursor.next(&()); } @@ -179,7 +180,7 @@ impl ExcerptList { drop(cursor); snapshot.excerpts = new_excerpts; - self.subscriptions.publish(&patch); + self.subscriptions.publish(edits); } } @@ -382,7 +383,7 @@ mod tests { let buffer_1 = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'g'), cx)); - let list = cx.add_model(|cx| ExcerptList::new()); + let list = cx.add_model(|_| ExcerptList::new()); let subscription = list.update(cx, |list, cx| { let subscription = list.subscribe(); @@ -485,7 +486,7 @@ mod tests { } #[gpui::test(iterations = 100)] - fn test_random(cx: &mut MutableAppContext, mut rng: StdRng) { + fn test_random_excerpts(cx: &mut MutableAppContext, mut rng: StdRng) { let operations = env::var("OPERATIONS") .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) .unwrap_or(10); From 02f42f287759729f945273685d55b7ff84285c25 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Mon, 6 Dec 2021 14:51:23 -0700 Subject: [PATCH 012/196] WIP Co-Authored-By: Max Brunsfeld --- crates/language/src/excerpt_list.rs | 147 +++++++++++++++++++++------- 1 file changed, 110 insertions(+), 37 deletions(-) diff --git a/crates/language/src/excerpt_list.rs b/crates/language/src/excerpt_list.rs index 66b052598d6425e3a87edc1e33dc70e78c3d0d0a..1d0caa291ea17df744636e6a15d668bc8d12bdcc 100644 --- a/crates/language/src/excerpt_list.rs +++ b/crates/language/src/excerpt_list.rs @@ -3,7 +3,10 @@ use collections::HashMap; use gpui::{AppContext, Entity, ModelContext, ModelHandle}; use parking_lot::Mutex; use smallvec::{smallvec, SmallVec}; -use std::{cmp, iter, ops::Range}; +use std::{ + cmp, iter, + ops::{Deref, Range}, +}; use sum_tree::{Bias, Cursor, SumTree}; use text::{ subscription::{Subscription, Topic}, @@ -146,15 +149,6 @@ impl ExcerptList { new_excerpts.push_tree(cursor.slice(id, Bias::Left, &()), &()); let old_excerpt = cursor.item().unwrap(); let buffer = buffer_state.buffer.read(cx); - new_excerpts.push( - Excerpt::new( - id.clone(), - buffer.snapshot(), - old_excerpt.range.clone(), - old_excerpt.header_height, - ), - &(), - ); edits.extend( buffer @@ -163,8 +157,10 @@ impl ExcerptList { old_excerpt.range.clone(), ) .map(|mut edit| { - let excerpt_old_start = cursor.start().1; - let excerpt_new_start = new_excerpts.summary().text.bytes; + let excerpt_old_start = + cursor.start().1 + old_excerpt.header_height as usize; + let excerpt_new_start = + new_excerpts.summary().text.bytes + old_excerpt.header_height as usize; edit.old.start += excerpt_old_start; edit.old.end += excerpt_old_start; edit.new.start += excerpt_new_start; @@ -173,6 +169,16 @@ impl ExcerptList { }), ); + new_excerpts.push( + Excerpt::new( + id.clone(), + buffer.snapshot(), + old_excerpt.range.clone(), + old_excerpt.header_height, + ), + &(), + ); + cursor.next(&()); } new_excerpts.push_tree(cursor.suffix(&()), &()); @@ -195,6 +201,13 @@ impl Snapshot { .collect() } + pub fn text_for_range<'a, T: ToOffset>( + &'a self, + range: Range, + ) -> impl Iterator { + self.chunks(range, None).map(|chunk| chunk.text) + } + pub fn len(&self) -> usize { self.excerpts.summary().text.bytes } @@ -208,16 +221,34 @@ impl Snapshot { let mut cursor = self.excerpts.cursor::(); cursor.seek(&range.start, Bias::Right, &()); - let entry_chunks = cursor.item().map(|entry| { - let buffer_range = entry.range.to_offset(&entry.buffer); - let buffer_start = buffer_range.start + (range.start - cursor.start()); - let buffer_end = cmp::min( - buffer_range.end, - buffer_range.start + (range.end - cursor.start()), - ); - entry.buffer.chunks(buffer_start..buffer_end, theme) + let mut header_height: u8 = 0; + let entry_chunks = cursor.item().map(|excerpt| { + let buffer_range = excerpt.range.to_offset(&excerpt.buffer); + header_height = excerpt.header_height; + let start_overshoot = range.start - cursor.start(); + let buffer_start; + if start_overshoot < excerpt.header_height as usize { + header_height -= start_overshoot as u8; + buffer_start = buffer_range.start; + } else { + buffer_start = buffer_range.start + start_overshoot - header_height as usize; + header_height = 0; + } + + let end_overshoot = range.end - cursor.start(); + let buffer_end; + if end_overshoot < excerpt.header_height as usize { + header_height -= excerpt.header_height - end_overshoot as u8; + buffer_end = buffer_start; + } else { + buffer_end = cmp::min( + buffer_range.end, + buffer_range.start + end_overshoot - header_height as usize, + ); + } + + excerpt.buffer.chunks(buffer_start..buffer_end, theme) }); - let header_height = cursor.item().map_or(0, |entry| entry.header_height); Chunks { range, @@ -242,7 +273,15 @@ impl Excerpt { text_summary.lines.row += header_height as u32; text_summary.lines_utf16.row += header_height as u32; text_summary.bytes += header_height as usize; + text_summary.longest_row += header_height as u32; } + text_summary.last_line_chars = 0; + text_summary.lines.row += 1; + text_summary.lines.column = 0; + text_summary.lines_utf16.row += 1; + text_summary.lines_utf16.column = 0; + text_summary.bytes += 1; + Excerpt { id, buffer, @@ -307,6 +346,10 @@ impl<'a> Iterator for Chunks<'a> { return Some(chunk); } else { self.entry_chunks.take(); + return Some(Chunk { + text: "\n", + ..Default::default() + }); } } @@ -325,10 +368,7 @@ impl<'a> Iterator for Chunks<'a> { .chunks(buffer_range.start..buffer_end, self.theme), ); - Some(Chunk { - text: "\n", - ..Default::default() - }) + self.next() } } @@ -399,7 +439,7 @@ mod tests { subscription.consume().into_inner(), [Edit { old: 0..0, - new: 0..12 + new: 0..13 }] ); @@ -422,8 +462,8 @@ mod tests { assert_eq!( subscription.consume().into_inner(), [Edit { - old: 12..12, - new: 12..26 + old: 13..13, + new: 13..29 }] ); @@ -443,7 +483,7 @@ mod tests { "\n", // "\n", // "\n", // - "jj" // + "jj\n" // ) ); @@ -472,15 +512,15 @@ mod tests { "\n", // "\n", // "\n", // - "jj" // + "jj\n" // ) ); assert_eq!( subscription.consume().into_inner(), [Edit { - old: 17..19, - new: 17..18 + old: 18..20, + new: 18..19 }] ); } @@ -495,12 +535,13 @@ mod tests { let list = cx.add_model(|_| ExcerptList::new()); let mut excerpt_ids = Vec::new(); let mut expected_excerpts = Vec::new(); + let mut old_versions = Vec::new(); for _ in 0..operations { match rng.gen_range(0..100) { 0..=19 if !buffers.is_empty() => { let buffer = buffers.choose(&mut rng).unwrap(); - buffer.update(cx, |buf, cx| buf.randomly_edit(&mut rng, 5, cx)); + buffer.update(cx, |buf, cx| buf.randomly_edit(&mut rng, 1, cx)); } _ => { let buffer_handle = if buffers.is_empty() || rng.gen_bool(0.4) { @@ -539,21 +580,53 @@ mod tests { } } + if rng.gen_bool(0.3) { + list.update(cx, |list, cx| { + old_versions.push((list.snapshot(cx), list.subscribe())); + }) + } + let snapshot = list.read(cx).snapshot(cx); let mut expected_text = String::new(); for (buffer, range, header_height) in &expected_excerpts { let buffer = buffer.read(cx); - if !expected_text.is_empty() { - expected_text.push('\n'); - } - for _ in 0..*header_height { expected_text.push('\n'); } expected_text.extend(buffer.text_for_range(range.clone())); + expected_text.push('\n'); } assert_eq!(snapshot.text(), expected_text); + + for i in 0..10 { + let end_ix = snapshot.clip_offset(rng.gen_range(0..=snapshot.len()), Bias::Right); + let start_ix = snapshot.clip_offset(rng.gen_range(0..=end_ix), Bias::Left); + + let actual = snapshot.text_for_range(start_ix..end_ix).collect(); + } + // for i in 0..expected_text.len() { + // assert_eq!(snapshot.text(), expected_text); + + // } } + + // let snapshot = list.read(cx).snapshot(cx); + // for (old_snapshot, subscription) in old_versions { + // let edits = subscription.consume().into_inner(); + + // log::info!( + // "applying edits since old text: {:?}: {:?}", + // old_snapshot.text(), + // edits, + // ); + + // let mut text = old_snapshot.text(); + // for edit in edits { + // let new_text: String = snapshot.text_for_range(edit.new.clone()).collect(); + // text.replace_range(edit.new.start..edit.new.start + edit.old.len(), &new_text); + // } + // assert_eq!(text.to_string(), snapshot.text()); + // } } #[gpui::test(iterations = 100)] From 416033a01cce3599fd6b8cf9980beb5f349d3035 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 6 Dec 2021 16:17:31 -0800 Subject: [PATCH 013/196] Get random excerpts test passing w/ text in range, edits --- crates/language/src/excerpt_list.rs | 115 +++++++++++++++++++--------- 1 file changed, 77 insertions(+), 38 deletions(-) diff --git a/crates/language/src/excerpt_list.rs b/crates/language/src/excerpt_list.rs index 1d0caa291ea17df744636e6a15d668bc8d12bdcc..42cfef848c38d8f5cb591c8d3fd7ec52a0d5f9b8 100644 --- a/crates/language/src/excerpt_list.rs +++ b/crates/language/src/excerpt_list.rs @@ -3,10 +3,7 @@ use collections::HashMap; use gpui::{AppContext, Entity, ModelContext, ModelHandle}; use parking_lot::Mutex; use smallvec::{smallvec, SmallVec}; -use std::{ - cmp, iter, - ops::{Deref, Range}, -}; +use std::{cmp, iter, ops::Range}; use sum_tree::{Bias, Cursor, SumTree}; use text::{ subscription::{Subscription, Topic}, @@ -212,6 +209,33 @@ impl Snapshot { self.excerpts.summary().text.bytes } + pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize { + let mut cursor = self.excerpts.cursor::(); + cursor.seek(&offset, bias, &()); + if let Some(excerpt) = cursor.item() { + let overshoot = offset - cursor.start(); + let header_height = excerpt.header_height as usize; + if overshoot < header_height { + *cursor.start() + } else { + let excerpt_start = + text::ToOffset::to_offset(&excerpt.range.start, &excerpt.buffer); + let buffer_offset = excerpt.buffer.clip_offset( + excerpt_start + (offset - header_height - cursor.start()), + bias, + ); + let offset_in_excerpt = if buffer_offset > excerpt_start { + buffer_offset - excerpt_start + } else { + 0 + }; + cursor.start() + header_height + offset_in_excerpt + } + } else { + self.excerpts.summary().text.bytes + } + } + pub fn chunks<'a, T: ToOffset>( &'a self, range: Range, @@ -225,25 +249,27 @@ impl Snapshot { let entry_chunks = cursor.item().map(|excerpt| { let buffer_range = excerpt.range.to_offset(&excerpt.buffer); header_height = excerpt.header_height; - let start_overshoot = range.start - cursor.start(); + let buffer_start; + let start_overshoot = range.start - cursor.start(); if start_overshoot < excerpt.header_height as usize { header_height -= start_overshoot as u8; buffer_start = buffer_range.start; } else { - buffer_start = buffer_range.start + start_overshoot - header_height as usize; + buffer_start = + buffer_range.start + start_overshoot - excerpt.header_height as usize; header_height = 0; } - let end_overshoot = range.end - cursor.start(); let buffer_end; + let end_overshoot = range.end - cursor.start(); if end_overshoot < excerpt.header_height as usize { header_height -= excerpt.header_height - end_overshoot as u8; buffer_end = buffer_start; } else { buffer_end = cmp::min( buffer_range.end, - buffer_range.start + end_overshoot - header_height as usize, + buffer_range.start + end_overshoot - excerpt.header_height as usize, ); } @@ -344,26 +370,35 @@ impl<'a> Iterator for Chunks<'a> { if let Some(entry_chunks) = self.entry_chunks.as_mut() { if let Some(chunk) = entry_chunks.next() { return Some(chunk); - } else { + } else if self.range.end >= self.cursor.end(&()) { self.entry_chunks.take(); return Some(Chunk { text: "\n", ..Default::default() }); + } else { + return None; } } self.cursor.next(&()); - let entry = self.cursor.item()?; - let buffer_range = entry.range.to_offset(&entry.buffer); + if *self.cursor.start() == self.range.end { + return None; + } + + let excerpt = self.cursor.item()?; + let buffer_range = excerpt.range.to_offset(&excerpt.buffer); + let buffer_end = cmp::min( buffer_range.end, - buffer_range.start + (self.range.end - self.cursor.start()), + buffer_range.start + self.range.end + - excerpt.header_height as usize + - self.cursor.start(), ); - self.header_height = entry.header_height; + self.header_height = excerpt.header_height; self.entry_chunks = Some( - entry + excerpt .buffer .chunks(buffer_range.start..buffer_end, self.theme), ); @@ -558,7 +593,8 @@ mod tests { let header_height = rng.gen_range(0..=5); let anchor_range = buffer.anchor_before(start_ix)..buffer.anchor_after(end_ix); log::info!( - "Pushing excerpt for buffer {}: {:?}[{:?}] = {:?}", + "Pushing excerpt wih header {}, buffer {}: {:?}[{:?}] = {:?}", + header_height, buffer_handle.id(), buffer.text(), start_ix..end_ix, @@ -598,35 +634,38 @@ mod tests { } assert_eq!(snapshot.text(), expected_text); - for i in 0..10 { + for _ in 0..10 { let end_ix = snapshot.clip_offset(rng.gen_range(0..=snapshot.len()), Bias::Right); let start_ix = snapshot.clip_offset(rng.gen_range(0..=end_ix), Bias::Left); - let actual = snapshot.text_for_range(start_ix..end_ix).collect(); + assert_eq!( + snapshot + .text_for_range(start_ix..end_ix) + .collect::(), + &expected_text[start_ix..end_ix], + "incorrect text for range {:?}", + start_ix..end_ix + ); } - // for i in 0..expected_text.len() { - // assert_eq!(snapshot.text(), expected_text); - - // } } - // let snapshot = list.read(cx).snapshot(cx); - // for (old_snapshot, subscription) in old_versions { - // let edits = subscription.consume().into_inner(); - - // log::info!( - // "applying edits since old text: {:?}: {:?}", - // old_snapshot.text(), - // edits, - // ); - - // let mut text = old_snapshot.text(); - // for edit in edits { - // let new_text: String = snapshot.text_for_range(edit.new.clone()).collect(); - // text.replace_range(edit.new.start..edit.new.start + edit.old.len(), &new_text); - // } - // assert_eq!(text.to_string(), snapshot.text()); - // } + let snapshot = list.read(cx).snapshot(cx); + for (old_snapshot, subscription) in old_versions { + let edits = subscription.consume().into_inner(); + + log::info!( + "applying edits since old text: {:?}: {:?}", + old_snapshot.text(), + edits, + ); + + let mut text = old_snapshot.text(); + for edit in edits { + let new_text: String = snapshot.text_for_range(edit.new.clone()).collect(); + text.replace_range(edit.new.start..edit.new.start + edit.old.len(), &new_text); + } + assert_eq!(text.to_string(), snapshot.text()); + } } #[gpui::test(iterations = 100)] From 09c0c3a0e74911738f42bb2cc98d70142cabb719 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 6 Dec 2021 16:28:44 -0800 Subject: [PATCH 014/196] :art: excerpt_list::Chunks::next --- crates/language/src/excerpt_list.rs | 81 ++++++++++++++--------------- 1 file changed, 40 insertions(+), 41 deletions(-) diff --git a/crates/language/src/excerpt_list.rs b/crates/language/src/excerpt_list.rs index 42cfef848c38d8f5cb591c8d3fd7ec52a0d5f9b8..b00f1edbc1edab535714d6b1b82179d4aa9cb162 100644 --- a/crates/language/src/excerpt_list.rs +++ b/crates/language/src/excerpt_list.rs @@ -356,54 +356,53 @@ impl<'a> Iterator for Chunks<'a> { type Item = Chunk<'a>; fn next(&mut self) -> Option { - if self.header_height > 0 { - let chunk = Chunk { - text: unsafe { - std::str::from_utf8_unchecked(&NEWLINES[..self.header_height as usize]) - }, - ..Default::default() - }; - self.header_height = 0; - return Some(chunk); - } - - if let Some(entry_chunks) = self.entry_chunks.as_mut() { - if let Some(chunk) = entry_chunks.next() { - return Some(chunk); - } else if self.range.end >= self.cursor.end(&()) { - self.entry_chunks.take(); - return Some(Chunk { - text: "\n", + loop { + if self.header_height > 0 { + let chunk = Chunk { + text: unsafe { + std::str::from_utf8_unchecked(&NEWLINES[..self.header_height as usize]) + }, ..Default::default() - }); - } else { - return None; + }; + self.header_height = 0; + return Some(chunk); } - } - self.cursor.next(&()); - if *self.cursor.start() == self.range.end { - return None; - } + if let Some(entry_chunks) = self.entry_chunks.as_mut() { + if let Some(chunk) = entry_chunks.next() { + return Some(chunk); + } + self.entry_chunks.take(); + if self.cursor.end(&()) <= self.range.end { + return Some(Chunk { + text: "\n", + ..Default::default() + }); + } + } - let excerpt = self.cursor.item()?; - let buffer_range = excerpt.range.to_offset(&excerpt.buffer); + self.cursor.next(&()); + if *self.cursor.start() >= self.range.end { + return None; + } - let buffer_end = cmp::min( - buffer_range.end, - buffer_range.start + self.range.end - - excerpt.header_height as usize - - self.cursor.start(), - ); + let excerpt = self.cursor.item()?; + let buffer_range = excerpt.range.to_offset(&excerpt.buffer); - self.header_height = excerpt.header_height; - self.entry_chunks = Some( - excerpt - .buffer - .chunks(buffer_range.start..buffer_end, self.theme), - ); + let buffer_end = cmp::min( + buffer_range.end, + buffer_range.start + self.range.end + - excerpt.header_height as usize + - self.cursor.start(), + ); - self.next() + self.header_height = excerpt.header_height; + self.entry_chunks = Some( + excerpt + .buffer + .chunks(buffer_range.start..buffer_end, self.theme), + ); + } } } From 102926d171ac3c377da3b7b0a0a389aa16bda773 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 6 Dec 2021 17:28:52 -0800 Subject: [PATCH 015/196] Implement and randomized test excerpt list point translation and clipping --- crates/language/src/excerpt_list.rs | 117 +++++++++++++++++++++++++++- 1 file changed, 114 insertions(+), 3 deletions(-) diff --git a/crates/language/src/excerpt_list.rs b/crates/language/src/excerpt_list.rs index b00f1edbc1edab535714d6b1b82179d4aa9cb162..ff6462a8d10b6e178ff7deb210dee276609cee64 100644 --- a/crates/language/src/excerpt_list.rs +++ b/crates/language/src/excerpt_list.rs @@ -7,7 +7,7 @@ use std::{cmp, iter, ops::Range}; use sum_tree::{Bias, Cursor, SumTree}; use text::{ subscription::{Subscription, Topic}, - Anchor, AnchorRangeExt, Edit, TextSummary, + Anchor, AnchorRangeExt, Edit, Point, TextSummary, }; use theme::SyntaxTheme; @@ -211,7 +211,7 @@ impl Snapshot { pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize { let mut cursor = self.excerpts.cursor::(); - cursor.seek(&offset, bias, &()); + cursor.seek(&offset, Bias::Right, &()); if let Some(excerpt) = cursor.item() { let overshoot = offset - cursor.start(); let header_height = excerpt.header_height as usize; @@ -236,6 +236,56 @@ impl Snapshot { } } + pub fn to_point(&self, offset: usize) -> Point { + let mut cursor = self.excerpts.cursor::<(usize, Point)>(); + cursor.seek(&offset, Bias::Right, &()); + if let Some(excerpt) = cursor.item() { + let overshoot = offset - cursor.start().0; + let header_height = excerpt.header_height as usize; + if overshoot < header_height { + cursor.start().1 + } else { + let excerpt_start_offset = + text::ToOffset::to_offset(&excerpt.range.start, &excerpt.buffer); + let excerpt_start_point = + text::ToPoint::to_point(&excerpt.range.start, &excerpt.buffer); + let buffer_point = excerpt + .buffer + .to_point(excerpt_start_offset + (offset - header_height - cursor.start().0)); + cursor.start().1 + + Point::new(header_height as u32, 0) + + (buffer_point - excerpt_start_point) + } + } else { + self.excerpts.summary().text.lines + } + } + + pub fn to_offset(&self, point: Point) -> usize { + let mut cursor = self.excerpts.cursor::<(Point, usize)>(); + cursor.seek(&point, Bias::Right, &()); + if let Some(excerpt) = cursor.item() { + let overshoot = point - cursor.start().0; + let header_height = Point::new(excerpt.header_height as u32, 0); + if overshoot < header_height { + cursor.start().1 + } else { + let excerpt_start_offset = + text::ToOffset::to_offset(&excerpt.range.start, &excerpt.buffer); + let excerpt_start_point = + text::ToPoint::to_point(&excerpt.range.start, &excerpt.buffer); + let buffer_offset = excerpt + .buffer + .to_offset(excerpt_start_point + (point - header_height - cursor.start().0)); + cursor.start().1 + + excerpt.header_height as usize + + (buffer_offset - excerpt_start_offset) + } + } else { + self.excerpts.summary().text.bytes + } + } + pub fn chunks<'a, T: ToOffset>( &'a self, range: Range, @@ -345,6 +395,12 @@ impl<'a> sum_tree::Dimension<'a, EntrySummary> for usize { } } +impl<'a> sum_tree::Dimension<'a, EntrySummary> for Point { + fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) { + *self += summary.text.lines; + } +} + impl<'a> sum_tree::Dimension<'a, EntrySummary> for Location { fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) { debug_assert!(summary.excerpt_id > *self); @@ -412,6 +468,12 @@ impl ToOffset for usize { } } +impl ToOffset for Point { + fn to_offset<'a>(&self, snapshot: &Snapshot) -> usize { + snapshot.to_offset(*self) + } +} + impl Default for Location { fn default() -> Self { Self::min() @@ -622,15 +684,64 @@ mod tests { } let snapshot = list.read(cx).snapshot(cx); + let mut expected_text = String::new(); for (buffer, range, header_height) in &expected_excerpts { + let buffer_id = buffer.id(); let buffer = buffer.read(cx); + let buffer_range = range.to_offset(buffer); + let buffer_start_point = buffer.to_point(buffer_range.start); + for _ in 0..*header_height { expected_text.push('\n'); } - expected_text.extend(buffer.text_for_range(range.clone())); + + let excerpt_start = TextSummary::from(expected_text.as_str()); + expected_text.extend(buffer.text_for_range(buffer_range.clone())); expected_text.push('\n'); + + for buffer_offset in buffer_range.clone() { + let offset = excerpt_start.bytes + (buffer_offset - buffer_range.start); + let left_offset = snapshot.clip_offset(offset, Bias::Left); + let right_offset = snapshot.clip_offset(offset, Bias::Right); + let buffer_left_offset = buffer.clip_offset(buffer_offset, Bias::Left); + let buffer_right_offset = buffer.clip_offset(buffer_offset, Bias::Right); + let left_point = snapshot.to_point(left_offset); + + assert_eq!( + left_offset, + excerpt_start.bytes + (buffer_left_offset - buffer_range.start), + "clip_offset({}, Left). buffer: {}, buffer offset: {}", + offset, + buffer_id, + buffer_offset, + ); + assert_eq!( + right_offset, + excerpt_start.bytes + (buffer_right_offset - buffer_range.start), + "clip_offset({}, Right). buffer: {}, buffer offset: {}", + offset, + buffer_id, + buffer_offset, + ); + assert_eq!( + left_point, + excerpt_start.lines + + (buffer.to_point(buffer_left_offset) - buffer_start_point), + "to_point({}). buffer: {}, buffer offset: {}", + offset, + buffer_id, + buffer_offset, + ); + assert_eq!( + snapshot.to_offset(left_point), + left_offset, + "to_offset({:?})", + left_point, + ) + } } + assert_eq!(snapshot.text(), expected_text); for _ in 0..10 { From 39cc0cac936d6f11b3e02ed9ac6070e0819495c7 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 6 Dec 2021 17:40:17 -0800 Subject: [PATCH 016/196] Fix Subscription re-export after moving it into its own module --- crates/language/src/buffer.rs | 1 - crates/text/src/text.rs | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index b8d1aad91b05a3ca76e478638e2e2257a6bfc9cb..55346fc9dd410c5617c2c1186ca839cc9c0ca32e 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -28,7 +28,6 @@ use std::{ time::{Duration, Instant, SystemTime, UNIX_EPOCH}, vec, }; -use text::subscription::Subscription; pub use text::{Buffer as TextBuffer, Operation as _, *}; use theme::SyntaxTheme; use tree_sitter::{InputEdit, Parser, QueryCursor, Tree}; diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index f52a76c3e4b168822640b23cb76eeda0925e8902..3a19977fa5f2457a9d47baa8e7af19346de478d8 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -32,7 +32,7 @@ use std::{ sync::Arc, time::{Duration, Instant}, }; -use subscription::{Subscription, Topic}; +pub use subscription::*; pub use sum_tree::Bias; use sum_tree::{FilterCursor, SumTree}; From ad33111a22123e8cbad139ca0e30212c8d28cb98 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 6 Dec 2021 17:40:32 -0800 Subject: [PATCH 017/196] Fix assertion in excerpt unit test after fixing edits --- crates/language/src/excerpt_list.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/language/src/excerpt_list.rs b/crates/language/src/excerpt_list.rs index ff6462a8d10b6e178ff7deb210dee276609cee64..bd2fa558a6a73d9c0646b67b2c429309b708f7e6 100644 --- a/crates/language/src/excerpt_list.rs +++ b/crates/language/src/excerpt_list.rs @@ -615,8 +615,8 @@ mod tests { assert_eq!( subscription.consume().into_inner(), [Edit { - old: 18..20, - new: 18..19 + old: 8..10, + new: 8..9 }] ); } From fa379885f1abc48522967d896a7fd9ca80221702 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 8 Dec 2021 09:24:00 -0800 Subject: [PATCH 018/196] Give more specific names to all snapshot and text iterator types --- crates/editor/src/display_map.rs | 40 ++++++------ crates/editor/src/display_map/block_map.rs | 26 ++++---- crates/editor/src/display_map/fold_map.rs | 74 +++++++++++----------- crates/editor/src/display_map/tab_map.rs | 34 +++++----- crates/editor/src/display_map/wrap_map.rs | 46 ++++++++------ crates/editor/src/editor.rs | 32 +++++----- crates/editor/src/element.rs | 20 +++--- crates/editor/src/movement.rs | 22 +++---- crates/language/src/buffer.rs | 38 +++++------ crates/language/src/excerpt_list.rs | 6 +- crates/text/src/anchor.rs | 44 ++++++++----- crates/text/src/selection.rs | 10 +-- crates/text/src/text.rs | 44 ++++++------- 13 files changed, 229 insertions(+), 207 deletions(-) diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 9167356795d354da5ed332e00c626f1ad807837a..fc871c98622680024870d78f13b081de676dc1dd 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -3,9 +3,6 @@ mod fold_map; mod tab_map; mod wrap_map; -pub use block_map::{ - AlignedBlock, BlockContext, BlockDisposition, BlockId, BlockProperties, BufferRows, Chunks, -}; use block_map::{BlockMap, BlockPoint}; use fold_map::{FoldMap, ToFoldPoint as _}; use gpui::{fonts::FontId, ElementBox, Entity, ModelContext, ModelHandle}; @@ -19,8 +16,13 @@ use tab_map::TabMap; use theme::SyntaxTheme; use wrap_map::WrapMap; +pub use block_map::{ + AlignedBlock, BlockBufferRows as DisplayBufferRows, BlockChunks as DisplayChunks, BlockContext, + BlockDisposition, BlockId, BlockProperties, +}; + pub trait ToDisplayPoint { - fn to_display_point(&self, map: &DisplayMapSnapshot) -> DisplayPoint; + fn to_display_point(&self, map: &DisplaySnapshot) -> DisplayPoint; } pub struct DisplayMap { @@ -61,7 +63,7 @@ impl DisplayMap { } } - pub fn snapshot(&self, cx: &mut ModelContext) -> DisplayMapSnapshot { + pub fn snapshot(&self, cx: &mut ModelContext) -> DisplaySnapshot { let buffer_snapshot = self.buffer.read(cx).snapshot(); let edits = self.buffer_subscription.consume().into_inner(); let (folds_snapshot, edits) = self.fold_map.read(buffer_snapshot, edits); @@ -71,7 +73,7 @@ impl DisplayMap { .update(cx, |map, cx| map.sync(tabs_snapshot.clone(), edits, cx)); let blocks_snapshot = self.block_map.read(wraps_snapshot.clone(), edits, cx); - DisplayMapSnapshot { + DisplaySnapshot { buffer_snapshot: self.buffer.read(cx).snapshot(), folds_snapshot, tabs_snapshot, @@ -176,15 +178,15 @@ impl DisplayMap { } } -pub struct DisplayMapSnapshot { - pub buffer_snapshot: language::Snapshot, - folds_snapshot: fold_map::Snapshot, - tabs_snapshot: tab_map::Snapshot, - wraps_snapshot: wrap_map::Snapshot, +pub struct DisplaySnapshot { + pub buffer_snapshot: language::BufferSnapshot, + folds_snapshot: fold_map::FoldSnapshot, + tabs_snapshot: tab_map::TabSnapshot, + wraps_snapshot: wrap_map::WrapSnapshot, blocks_snapshot: block_map::BlockSnapshot, } -impl DisplayMapSnapshot { +impl DisplaySnapshot { #[cfg(test)] pub fn fold_count(&self) -> usize { self.folds_snapshot.fold_count() @@ -194,7 +196,7 @@ impl DisplayMapSnapshot { self.buffer_snapshot.len() == 0 } - pub fn buffer_rows<'a>(&'a self, start_row: u32) -> BufferRows<'a> { + pub fn buffer_rows<'a>(&'a self, start_row: u32) -> DisplayBufferRows<'a> { self.blocks_snapshot.buffer_rows(start_row) } @@ -260,7 +262,7 @@ impl DisplayMapSnapshot { &'a self, display_rows: Range, theme: Option<&'a SyntaxTheme>, - ) -> block_map::Chunks<'a> { + ) -> DisplayChunks<'a> { self.blocks_snapshot.chunks(display_rows, theme) } @@ -420,11 +422,11 @@ impl DisplayPoint { &mut self.0.column } - pub fn to_point(self, map: &DisplayMapSnapshot) -> Point { + pub fn to_point(self, map: &DisplaySnapshot) -> Point { map.display_point_to_point(self, Bias::Left) } - pub fn to_offset(self, map: &DisplayMapSnapshot, bias: Bias) -> usize { + pub fn to_offset(self, map: &DisplaySnapshot, bias: Bias) -> usize { let unblocked_point = map.blocks_snapshot.to_wrap_point(self.0); let unwrapped_point = map.wraps_snapshot.to_tab_point(unblocked_point); let unexpanded_point = map.tabs_snapshot.to_fold_point(unwrapped_point, bias).0; @@ -433,19 +435,19 @@ impl DisplayPoint { } impl ToDisplayPoint for usize { - fn to_display_point(&self, map: &DisplayMapSnapshot) -> DisplayPoint { + fn to_display_point(&self, map: &DisplaySnapshot) -> DisplayPoint { map.point_to_display_point(self.to_point(&map.buffer_snapshot), Bias::Left) } } impl ToDisplayPoint for Point { - fn to_display_point(&self, map: &DisplayMapSnapshot) -> DisplayPoint { + fn to_display_point(&self, map: &DisplaySnapshot) -> DisplayPoint { map.point_to_display_point(*self, Bias::Left) } } impl ToDisplayPoint for Anchor { - fn to_display_point(&self, map: &DisplayMapSnapshot) -> DisplayPoint { + fn to_display_point(&self, map: &DisplaySnapshot) -> DisplayPoint { self.to_point(&map.buffer_snapshot).to_display_point(map) } } diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 58ae22403df48903b4ef4a45957340c2c0c27d3d..02cbc0ab3284a6113c1c67547e251d3707e953fb 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -1,4 +1,4 @@ -use super::wrap_map::{self, Edit as WrapEdit, Snapshot as WrapSnapshot, WrapPoint}; +use super::wrap_map::{self, Edit as WrapEdit, WrapPoint, WrapSnapshot}; use gpui::{AppContext, ElementBox, ModelHandle}; use language::{Buffer, Chunk}; use parking_lot::Mutex; @@ -93,17 +93,17 @@ struct TransformSummary { output_rows: u32, } -pub struct Chunks<'a> { +pub struct BlockChunks<'a> { transforms: sum_tree::Cursor<'a, Transform, (BlockRow, WrapRow)>, - input_chunks: wrap_map::Chunks<'a>, + input_chunks: wrap_map::WrapChunks<'a>, input_chunk: Chunk<'a>, output_row: u32, max_output_row: u32, } -pub struct BufferRows<'a> { +pub struct BlockBufferRows<'a> { transforms: sum_tree::Cursor<'a, Transform, (BlockRow, WrapRow)>, - input_buffer_rows: wrap_map::BufferRows<'a>, + input_buffer_rows: wrap_map::WrapBufferRows<'a>, output_row: u32, started: bool, } @@ -476,7 +476,11 @@ impl BlockSnapshot { .collect() } - pub fn chunks<'a>(&'a self, rows: Range, theme: Option<&'a SyntaxTheme>) -> Chunks<'a> { + pub fn chunks<'a>( + &'a self, + rows: Range, + theme: Option<&'a SyntaxTheme>, + ) -> BlockChunks<'a> { let max_output_row = cmp::min(rows.end, self.transforms.summary().output_rows); let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(); let input_end = { @@ -503,7 +507,7 @@ impl BlockSnapshot { }; cursor.start().1 .0 + overshoot }; - Chunks { + BlockChunks { input_chunks: self.wrap_snapshot.chunks(input_start..input_end, theme), input_chunk: Default::default(), transforms: cursor, @@ -512,7 +516,7 @@ impl BlockSnapshot { } } - pub fn buffer_rows<'a>(&'a self, start_row: u32) -> BufferRows<'a> { + pub fn buffer_rows<'a>(&'a self, start_row: u32) -> BlockBufferRows<'a> { let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(); cursor.seek(&BlockRow(start_row), Bias::Right, &()); let (output_start, input_start) = cursor.start(); @@ -522,7 +526,7 @@ impl BlockSnapshot { 0 }; let input_start_row = input_start.0 + overshoot; - BufferRows { + BlockBufferRows { transforms: cursor, input_buffer_rows: self.wrap_snapshot.buffer_rows(input_start_row), output_row: start_row, @@ -693,7 +697,7 @@ impl Transform { } } -impl<'a> Iterator for Chunks<'a> { +impl<'a> Iterator for BlockChunks<'a> { type Item = Chunk<'a>; fn next(&mut self) -> Option { @@ -756,7 +760,7 @@ impl<'a> Iterator for Chunks<'a> { } } -impl<'a> Iterator for BufferRows<'a> { +impl<'a> Iterator for BlockBufferRows<'a> { type Item = Option; fn next(&mut self) -> Option { diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index 31edebe99c1d7fff23bfe8ee06a55864f5fb59bb..1f8f710ce1764d2fff543ba5e3d93965ea14bd51 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -1,6 +1,5 @@ use language::{ - Anchor, AnchorRangeExt, Chunk, Edit, Point, PointUtf16, Snapshot as BufferSnapshot, - TextSummary, ToOffset, + Anchor, AnchorRangeExt, BufferSnapshot, Chunk, Edit, Point, PointUtf16, TextSummary, ToOffset, }; use parking_lot::Mutex; use std::{ @@ -13,7 +12,7 @@ use sum_tree::{Bias, Cursor, FilterCursor, SumTree}; use theme::SyntaxTheme; pub trait ToFoldPoint { - fn to_fold_point(&self, snapshot: &Snapshot, bias: Bias) -> FoldPoint; + fn to_fold_point(&self, snapshot: &FoldSnapshot, bias: Bias) -> FoldPoint; } #[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)] @@ -41,14 +40,14 @@ impl FoldPoint { &mut self.0.column } - pub fn to_buffer_point(&self, snapshot: &Snapshot) -> Point { + pub fn to_buffer_point(&self, snapshot: &FoldSnapshot) -> Point { let mut cursor = snapshot.transforms.cursor::<(FoldPoint, Point)>(); cursor.seek(self, Bias::Right, &()); let overshoot = self.0 - cursor.start().0 .0; cursor.start().1 + overshoot } - pub fn to_buffer_offset(&self, snapshot: &Snapshot) -> usize { + pub fn to_buffer_offset(&self, snapshot: &FoldSnapshot) -> usize { let mut cursor = snapshot.transforms.cursor::<(FoldPoint, Point)>(); cursor.seek(self, Bias::Right, &()); let overshoot = self.0 - cursor.start().0 .0; @@ -57,7 +56,7 @@ impl FoldPoint { .to_offset(cursor.start().1 + overshoot) } - pub fn to_offset(&self, snapshot: &Snapshot) -> FoldOffset { + pub fn to_offset(&self, snapshot: &FoldSnapshot) -> FoldOffset { let mut cursor = snapshot .transforms .cursor::<(FoldPoint, TransformSummary)>(); @@ -77,7 +76,7 @@ impl FoldPoint { } impl ToFoldPoint for Point { - fn to_fold_point(&self, snapshot: &Snapshot, bias: Bias) -> FoldPoint { + fn to_fold_point(&self, snapshot: &FoldSnapshot, bias: Bias) -> FoldPoint { let mut cursor = snapshot.transforms.cursor::<(Point, FoldPoint)>(); cursor.seek(self, Bias::Right, &()); if cursor.item().map_or(false, |t| t.is_fold()) { @@ -102,7 +101,7 @@ impl<'a> FoldMapWriter<'a> { pub fn fold( &mut self, ranges: impl IntoIterator>, - ) -> (Snapshot, Vec) { + ) -> (FoldSnapshot, Vec) { let mut edits = Vec::new(); let mut folds = Vec::new(); let buffer = self.0.buffer.lock().clone(); @@ -133,7 +132,7 @@ impl<'a> FoldMapWriter<'a> { consolidate_buffer_edits(&mut edits); let edits = self.0.sync(buffer.clone(), edits); - let snapshot = Snapshot { + let snapshot = FoldSnapshot { transforms: self.0.transforms.lock().clone(), folds: self.0.folds.clone(), buffer_snapshot: buffer, @@ -145,7 +144,7 @@ impl<'a> FoldMapWriter<'a> { pub fn unfold( &mut self, ranges: impl IntoIterator>, - ) -> (Snapshot, Vec) { + ) -> (FoldSnapshot, Vec) { let mut edits = Vec::new(); let mut fold_ixs_to_delete = Vec::new(); let buffer = self.0.buffer.lock().clone(); @@ -179,7 +178,7 @@ impl<'a> FoldMapWriter<'a> { consolidate_buffer_edits(&mut edits); let edits = self.0.sync(buffer.clone(), edits); - let snapshot = Snapshot { + let snapshot = FoldSnapshot { transforms: self.0.transforms.lock().clone(), folds: self.0.folds.clone(), buffer_snapshot: buffer, @@ -197,7 +196,7 @@ pub struct FoldMap { } impl FoldMap { - pub fn new(buffer: BufferSnapshot) -> (Self, Snapshot) { + pub fn new(buffer: BufferSnapshot) -> (Self, FoldSnapshot) { let this = Self { buffer: Mutex::new(buffer.clone()), folds: Default::default(), @@ -214,7 +213,7 @@ impl FoldMap { version: Default::default(), }; - let snapshot = Snapshot { + let snapshot = FoldSnapshot { transforms: this.transforms.lock().clone(), folds: this.folds.clone(), buffer_snapshot: this.buffer.lock().clone(), @@ -227,10 +226,10 @@ impl FoldMap { &self, buffer: BufferSnapshot, edits: Vec>, - ) -> (Snapshot, Vec) { + ) -> (FoldSnapshot, Vec) { let edits = self.sync(buffer, edits); self.check_invariants(); - let snapshot = Snapshot { + let snapshot = FoldSnapshot { transforms: self.transforms.lock().clone(), folds: self.folds.clone(), buffer_snapshot: self.buffer.lock().clone(), @@ -243,7 +242,7 @@ impl FoldMap { &mut self, buffer: BufferSnapshot, edits: Vec>, - ) -> (FoldMapWriter, Snapshot, Vec) { + ) -> (FoldMapWriter, FoldSnapshot, Vec) { let (snapshot, edits) = self.read(buffer, edits); (FoldMapWriter(self), snapshot, edits) } @@ -474,14 +473,14 @@ impl FoldMap { } #[derive(Clone)] -pub struct Snapshot { +pub struct FoldSnapshot { transforms: SumTree, folds: SumTree, - buffer_snapshot: language::Snapshot, + buffer_snapshot: language::BufferSnapshot, pub version: usize, } -impl Snapshot { +impl FoldSnapshot { #[cfg(test)] pub fn text(&self) -> String { self.chunks(FoldOffset(0)..self.len(), None) @@ -553,7 +552,7 @@ impl Snapshot { (line_end - line_start) as u32 } - pub fn buffer_rows(&self, start_row: u32) -> BufferRows { + pub fn buffer_rows(&self, start_row: u32) -> FoldBufferRows { if start_row > self.transforms.summary().output.lines.row { panic!("invalid display row {}", start_row); } @@ -561,7 +560,7 @@ impl Snapshot { let fold_point = FoldPoint::new(start_row, 0); let mut cursor = self.transforms.cursor(); cursor.seek(&fold_point, Bias::Left, &()); - BufferRows { fold_point, cursor } + FoldBufferRows { fold_point, cursor } } pub fn max_point(&self) -> FoldPoint { @@ -624,7 +623,7 @@ impl Snapshot { &'a self, range: Range, theme: Option<&'a SyntaxTheme>, - ) -> Chunks<'a> { + ) -> FoldChunks<'a> { let mut transform_cursor = self.transforms.cursor::<(FoldOffset, usize)>(); transform_cursor.seek(&range.end, Bias::Right, &()); @@ -635,7 +634,7 @@ impl Snapshot { let overshoot = range.start.0 - transform_cursor.start().0 .0; let buffer_start = transform_cursor.start().1 + overshoot; - Chunks { + FoldChunks { transform_cursor, buffer_chunks: self.buffer_snapshot.chunks(buffer_start..buffer_end, theme), buffer_chunk: None, @@ -700,7 +699,7 @@ impl Snapshot { } fn intersecting_folds<'a, T>( - buffer: &'a text::Snapshot, + buffer: &'a text::BufferSnapshot, folds: &'a SumTree, range: Range, inclusive: bool, @@ -851,9 +850,9 @@ impl Default for FoldSummary { } impl sum_tree::Summary for FoldSummary { - type Context = text::Snapshot; + type Context = text::BufferSnapshot; - fn add_summary(&mut self, other: &Self, buffer: &text::Snapshot) { + fn add_summary(&mut self, other: &Self, buffer: &text::BufferSnapshot) { if other.min_start.cmp(&self.min_start, buffer).unwrap() == Ordering::Less { self.min_start = other.min_start.clone(); } @@ -877,30 +876,30 @@ impl sum_tree::Summary for FoldSummary { } impl<'a> sum_tree::Dimension<'a, FoldSummary> for Fold { - fn add_summary(&mut self, summary: &'a FoldSummary, _: &text::Snapshot) { + fn add_summary(&mut self, summary: &'a FoldSummary, _: &text::BufferSnapshot) { self.0.start = summary.start.clone(); self.0.end = summary.end.clone(); } } impl<'a> sum_tree::SeekTarget<'a, FoldSummary, Fold> for Fold { - fn cmp(&self, other: &Self, buffer: &text::Snapshot) -> Ordering { + fn cmp(&self, other: &Self, buffer: &text::BufferSnapshot) -> Ordering { self.0.cmp(&other.0, buffer).unwrap() } } impl<'a> sum_tree::Dimension<'a, FoldSummary> for usize { - fn add_summary(&mut self, summary: &'a FoldSummary, _: &text::Snapshot) { + fn add_summary(&mut self, summary: &'a FoldSummary, _: &text::BufferSnapshot) { *self += summary.count; } } -pub struct BufferRows<'a> { +pub struct FoldBufferRows<'a> { cursor: Cursor<'a, Transform, (FoldPoint, Point)>, fold_point: FoldPoint, } -impl<'a> Iterator for BufferRows<'a> { +impl<'a> Iterator for FoldBufferRows<'a> { type Item = u32; fn next(&mut self) -> Option { @@ -923,16 +922,16 @@ impl<'a> Iterator for BufferRows<'a> { } } -pub struct Chunks<'a> { +pub struct FoldChunks<'a> { transform_cursor: Cursor<'a, Transform, (FoldOffset, usize)>, - buffer_chunks: language::Chunks<'a>, + buffer_chunks: language::BufferChunks<'a>, buffer_chunk: Option<(usize, Chunk<'a>)>, buffer_offset: usize, output_offset: usize, max_output_offset: usize, } -impl<'a> Iterator for Chunks<'a> { +impl<'a> Iterator for FoldChunks<'a> { type Item = Chunk<'a>; fn next(&mut self) -> Option { @@ -1006,7 +1005,7 @@ impl<'a> sum_tree::Dimension<'a, TransformSummary> for FoldPoint { pub struct FoldOffset(pub usize); impl FoldOffset { - pub fn to_point(&self, snapshot: &Snapshot) -> FoldPoint { + pub fn to_point(&self, snapshot: &FoldSnapshot) -> FoldPoint { let mut cursor = snapshot .transforms .cursor::<(FoldOffset, TransformSummary)>(); @@ -1520,7 +1519,10 @@ mod tests { merged_ranges } - pub fn randomly_mutate(&mut self, rng: &mut impl Rng) -> Vec<(Snapshot, Vec)> { + pub fn randomly_mutate( + &mut self, + rng: &mut impl Rng, + ) -> Vec<(FoldSnapshot, Vec)> { let mut snapshot_edits = Vec::new(); match rng.gen_range(0..=100) { 0..=39 if !self.folds.is_empty() => { diff --git a/crates/editor/src/display_map/tab_map.rs b/crates/editor/src/display_map/tab_map.rs index 1a9163ba2b0227b07bba913508dd160d70ebd749..69a70ecc12a5454679b5fd93a17f012f36aa6953 100644 --- a/crates/editor/src/display_map/tab_map.rs +++ b/crates/editor/src/display_map/tab_map.rs @@ -1,4 +1,4 @@ -use super::fold_map::{self, FoldEdit, FoldPoint, Snapshot as FoldSnapshot, ToFoldPoint}; +use super::fold_map::{self, FoldEdit, FoldPoint, FoldSnapshot, ToFoldPoint}; use language::{rope, Chunk}; use parking_lot::Mutex; use std::{cmp, mem, ops::Range}; @@ -6,11 +6,11 @@ use sum_tree::Bias; use text::Point; use theme::SyntaxTheme; -pub struct TabMap(Mutex); +pub struct TabMap(Mutex); impl TabMap { - pub fn new(input: FoldSnapshot, tab_size: usize) -> (Self, Snapshot) { - let snapshot = Snapshot { + pub fn new(input: FoldSnapshot, tab_size: usize) -> (Self, TabSnapshot) { + let snapshot = TabSnapshot { fold_snapshot: input, tab_size, }; @@ -21,10 +21,10 @@ impl TabMap { &self, fold_snapshot: FoldSnapshot, mut fold_edits: Vec, - ) -> (Snapshot, Vec) { + ) -> (TabSnapshot, Vec) { let mut old_snapshot = self.0.lock(); let max_offset = old_snapshot.fold_snapshot.len(); - let new_snapshot = Snapshot { + let new_snapshot = TabSnapshot { fold_snapshot, tab_size: old_snapshot.tab_size, }; @@ -93,12 +93,12 @@ impl TabMap { } #[derive(Clone)] -pub struct Snapshot { +pub struct TabSnapshot { pub fold_snapshot: FoldSnapshot, pub tab_size: usize, } -impl Snapshot { +impl TabSnapshot { pub fn text_summary(&self) -> TextSummary { self.text_summary_for_range(TabPoint::zero()..self.max_point()) } @@ -155,7 +155,7 @@ impl Snapshot { &'a self, range: Range, theme: Option<&'a SyntaxTheme>, - ) -> Chunks<'a> { + ) -> TabChunks<'a> { let (input_start, expanded_char_column, to_next_stop) = self.to_fold_point(range.start, Bias::Left); let input_start = input_start.to_offset(&self.fold_snapshot); @@ -169,7 +169,7 @@ impl Snapshot { to_next_stop }; - Chunks { + TabChunks { fold_chunks: self.fold_snapshot.chunks(input_start..input_end, theme), column: expanded_char_column, output_position: range.start.0, @@ -183,7 +183,7 @@ impl Snapshot { } } - pub fn buffer_rows(&self, row: u32) -> fold_map::BufferRows { + pub fn buffer_rows(&self, row: u32) -> fold_map::FoldBufferRows { self.fold_snapshot.buffer_rows(row) } @@ -380,8 +380,8 @@ impl<'a> std::ops::AddAssign<&'a Self> for TextSummary { // Handles a tab width <= 16 const SPACES: &'static str = " "; -pub struct Chunks<'a> { - fold_chunks: fold_map::Chunks<'a>, +pub struct TabChunks<'a> { + fold_chunks: fold_map::FoldChunks<'a>, chunk: Chunk<'a>, column: usize, output_position: Point, @@ -390,7 +390,7 @@ pub struct Chunks<'a> { skip_leading_tab: bool, } -impl<'a> Iterator for Chunks<'a> { +impl<'a> Iterator for TabChunks<'a> { type Item = Chunk<'a>; fn next(&mut self) -> Option { @@ -457,9 +457,9 @@ mod tests { #[test] fn test_expand_tabs() { - assert_eq!(Snapshot::expand_tabs("\t".chars(), 0, 4), 0); - assert_eq!(Snapshot::expand_tabs("\t".chars(), 1, 4), 4); - assert_eq!(Snapshot::expand_tabs("\ta".chars(), 2, 4), 5); + assert_eq!(TabSnapshot::expand_tabs("\t".chars(), 0, 4), 0); + assert_eq!(TabSnapshot::expand_tabs("\t".chars(), 1, 4), 4); + assert_eq!(TabSnapshot::expand_tabs("\ta".chars(), 2, 4), 5); } #[gpui::test(iterations = 100)] diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index 04b6c00d6b0476a52666631b9098c8777ed585f2..5eaea6d402271c56625b72e0d58faa7f0c663771 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -1,6 +1,6 @@ use super::{ fold_map, - tab_map::{self, Edit as TabEdit, Snapshot as TabSnapshot, TabPoint}, + tab_map::{self, Edit as TabEdit, TabPoint, TabSnapshot}, }; use gpui::{ fonts::FontId, text_layout::LineWrapper, Entity, ModelContext, ModelHandle, MutableAppContext, @@ -18,7 +18,7 @@ pub use super::tab_map::TextSummary; pub type Edit = text::Edit; pub struct WrapMap { - snapshot: Snapshot, + snapshot: WrapSnapshot, pending_edits: VecDeque<(TabSnapshot, Vec)>, interpolated_edits: Patch, edits_since_sync: Patch, @@ -32,7 +32,7 @@ impl Entity for WrapMap { } #[derive(Clone)] -pub struct Snapshot { +pub struct WrapSnapshot { tab_snapshot: TabSnapshot, transforms: SumTree, interpolated: bool, @@ -53,16 +53,16 @@ struct TransformSummary { #[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)] pub struct WrapPoint(pub super::Point); -pub struct Chunks<'a> { - input_chunks: tab_map::Chunks<'a>, +pub struct WrapChunks<'a> { + input_chunks: tab_map::TabChunks<'a>, input_chunk: Chunk<'a>, output_position: WrapPoint, max_output_row: u32, transforms: Cursor<'a, Transform, (WrapPoint, TabPoint)>, } -pub struct BufferRows<'a> { - input_buffer_rows: fold_map::BufferRows<'a>, +pub struct WrapBufferRows<'a> { + input_buffer_rows: fold_map::FoldBufferRows<'a>, input_buffer_row: u32, output_row: u32, soft_wrapped: bool, @@ -77,7 +77,7 @@ impl WrapMap { font_size: f32, wrap_width: Option, cx: &mut MutableAppContext, - ) -> (ModelHandle, Snapshot) { + ) -> (ModelHandle, WrapSnapshot) { let handle = cx.add_model(|cx| { let mut this = Self { font: (font_id, font_size), @@ -85,7 +85,7 @@ impl WrapMap { pending_edits: Default::default(), interpolated_edits: Default::default(), edits_since_sync: Default::default(), - snapshot: Snapshot::new(tab_snapshot), + snapshot: WrapSnapshot::new(tab_snapshot), background_task: None, }; this.set_wrap_width(wrap_width, cx); @@ -106,7 +106,7 @@ impl WrapMap { tab_snapshot: TabSnapshot, edits: Vec, cx: &mut ModelContext, - ) -> (Snapshot, Vec) { + ) -> (WrapSnapshot, Vec) { if self.wrap_width.is_some() { self.pending_edits.push_back((tab_snapshot, edits)); self.flush_edits(cx); @@ -291,7 +291,7 @@ impl WrapMap { } } -impl Snapshot { +impl WrapSnapshot { fn new(tab_snapshot: TabSnapshot) -> Self { let mut transforms = SumTree::new(); let extent = tab_snapshot.text_summary(); @@ -364,7 +364,7 @@ impl Snapshot { let old_snapshot = mem::replace( self, - Snapshot { + WrapSnapshot { tab_snapshot: new_tab_snapshot, transforms: new_transforms, interpolated: true, @@ -513,7 +513,7 @@ impl Snapshot { let old_snapshot = mem::replace( self, - Snapshot { + WrapSnapshot { tab_snapshot: new_tab_snapshot, transforms: new_transforms, interpolated: false, @@ -523,7 +523,7 @@ impl Snapshot { old_snapshot.compute_edits(tab_edits, self) } - fn compute_edits(&self, tab_edits: &[TabEdit], new_snapshot: &Snapshot) -> Patch { + fn compute_edits(&self, tab_edits: &[TabEdit], new_snapshot: &WrapSnapshot) -> Patch { let mut wrap_edits = Vec::new(); let mut old_cursor = self.transforms.cursor::(); let mut new_cursor = new_snapshot.transforms.cursor::(); @@ -564,7 +564,11 @@ impl Snapshot { .map(|h| h.text) } - pub fn chunks<'a>(&'a self, rows: Range, theme: Option<&'a SyntaxTheme>) -> Chunks<'a> { + pub fn chunks<'a>( + &'a self, + rows: Range, + theme: Option<&'a SyntaxTheme>, + ) -> WrapChunks<'a> { let output_start = WrapPoint::new(rows.start, 0); let output_end = WrapPoint::new(rows.end, 0); let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(); @@ -576,7 +580,7 @@ impl Snapshot { let input_end = self .to_tab_point(output_end) .min(self.tab_snapshot.max_point()); - Chunks { + WrapChunks { input_chunks: self.tab_snapshot.chunks(input_start..input_end, theme), input_chunk: Default::default(), output_position: output_start, @@ -622,7 +626,7 @@ impl Snapshot { self.transforms.summary().output.longest_row } - pub fn buffer_rows(&self, start_row: u32) -> BufferRows { + pub fn buffer_rows(&self, start_row: u32) -> WrapBufferRows { let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(); transforms.seek(&WrapPoint::new(start_row, 0), Bias::Left, &()); let mut input_row = transforms.start().1.row(); @@ -632,7 +636,7 @@ impl Snapshot { let soft_wrapped = transforms.item().map_or(false, |t| !t.is_isomorphic()); let mut input_buffer_rows = self.tab_snapshot.buffer_rows(input_row); let input_buffer_row = input_buffer_rows.next().unwrap(); - BufferRows { + WrapBufferRows { transforms, input_buffer_row, input_buffer_rows, @@ -727,7 +731,7 @@ impl Snapshot { } } -impl<'a> Iterator for Chunks<'a> { +impl<'a> Iterator for WrapChunks<'a> { type Item = Chunk<'a>; fn next(&mut self) -> Option { @@ -790,7 +794,7 @@ impl<'a> Iterator for Chunks<'a> { } } -impl<'a> Iterator for BufferRows<'a> { +impl<'a> Iterator for WrapBufferRows<'a> { type Item = Option; fn next(&mut self) -> Option { @@ -1224,7 +1228,7 @@ mod tests { } } - impl Snapshot { + impl WrapSnapshot { pub fn text(&self) -> String { self.text_chunks(0).collect() } diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 4299807f97a233720df9c339724cf2c66c1e41ea..294e9f229ed9992dc359df445694b36dc7e868b0 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -273,11 +273,11 @@ pub fn init(cx: &mut MutableAppContext, entry_openers: &mut Vec Range; + fn display_range(&self, map: &DisplaySnapshot) -> Range; fn spanned_rows( &self, include_end_if_at_line_start: bool, - map: &DisplayMapSnapshot, + map: &DisplaySnapshot, ) -> SpannedRows; } @@ -371,9 +371,9 @@ pub struct Editor { highlighted_row: Option, } -pub struct Snapshot { +pub struct EditorSnapshot { pub mode: EditorMode, - pub display_snapshot: DisplayMapSnapshot, + pub display_snapshot: DisplaySnapshot, pub placeholder_text: Option>, is_focused: bool, scroll_position: Vector2F, @@ -533,8 +533,8 @@ impl Editor { &self.buffer } - pub fn snapshot(&mut self, cx: &mut MutableAppContext) -> Snapshot { - Snapshot { + pub fn snapshot(&mut self, cx: &mut MutableAppContext) -> EditorSnapshot { + EditorSnapshot { mode: self.mode, display_snapshot: self.display_map.update(cx, |map, cx| map.snapshot(cx)), scroll_position: self.scroll_position, @@ -986,7 +986,7 @@ impl Editor { tail: DisplayPoint, head: DisplayPoint, overshoot: u32, - display_map: &DisplayMapSnapshot, + display_map: &DisplaySnapshot, cx: &mut ViewContext, ) { let start_row = cmp::min(tail.row(), head.row()); @@ -2966,7 +2966,7 @@ impl Editor { fn build_columnar_selection( &mut self, - display_map: &DisplayMapSnapshot, + display_map: &DisplaySnapshot, row: u32, columns: &Range, reversed: bool, @@ -3271,7 +3271,7 @@ impl Editor { self.unfold_ranges(ranges, cx); } - fn is_line_foldable(&self, display_map: &DisplayMapSnapshot, display_row: u32) -> bool { + fn is_line_foldable(&self, display_map: &DisplaySnapshot, display_row: u32) -> bool { let max_point = display_map.max_point(); if display_row >= max_point.row() { false @@ -3293,7 +3293,7 @@ impl Editor { fn foldable_range_for_line( &self, - display_map: &DisplayMapSnapshot, + display_map: &DisplaySnapshot, start_row: u32, ) -> Range { let max_point = display_map.max_point(); @@ -3450,7 +3450,7 @@ impl Editor { } } -impl Snapshot { +impl EditorSnapshot { pub fn is_focused(&self) -> bool { self.is_focused } @@ -3468,8 +3468,8 @@ impl Snapshot { } } -impl Deref for Snapshot { - type Target = DisplayMapSnapshot; +impl Deref for EditorSnapshot { + type Target = DisplaySnapshot; fn deref(&self) -> &Self::Target { &self.display_snapshot @@ -3525,7 +3525,7 @@ impl EditorSettings { } fn compute_scroll_position( - snapshot: &DisplayMapSnapshot, + snapshot: &DisplaySnapshot, mut scroll_position: Vector2F, scroll_top_anchor: &Anchor, ) -> Vector2F { @@ -3606,7 +3606,7 @@ impl View for Editor { } impl SelectionExt for Selection { - fn display_range(&self, map: &DisplayMapSnapshot) -> Range { + fn display_range(&self, map: &DisplaySnapshot) -> Range { let start = self.start.to_display_point(map); let end = self.end.to_display_point(map); if self.reversed { @@ -3619,7 +3619,7 @@ impl SelectionExt for Selection { fn spanned_rows( &self, include_end_if_at_line_start: bool, - map: &DisplayMapSnapshot, + map: &DisplaySnapshot, ) -> SpannedRows { let display_start = self.start.to_display_point(map); let mut display_end = self.end.to_display_point(map); diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 45764e65fb662cbdcba6d188c81b73d6b4d6b523..a53fdd3a8bb1f8f2d1600dba7b8f5757d88ccf9b 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -1,8 +1,8 @@ use crate::display_map::{BlockContext, ToDisplayPoint}; use super::{ - DisplayPoint, Editor, EditorMode, EditorSettings, EditorStyle, Input, Scroll, Select, - SelectPhase, Snapshot, SoftWrap, MAX_LINE_LEN, + DisplayPoint, Editor, EditorMode, EditorSettings, EditorSnapshot, EditorStyle, Input, Scroll, + Select, SelectPhase, SoftWrap, MAX_LINE_LEN, }; use clock::ReplicaId; use gpui::{ @@ -49,7 +49,7 @@ impl EditorElement { self.view.upgrade(cx).unwrap().update(cx, f) } - fn snapshot(&self, cx: &mut MutableAppContext) -> Snapshot { + fn snapshot(&self, cx: &mut MutableAppContext) -> EditorSnapshot { self.update_view(cx, |view, cx| view.snapshot(cx)) } @@ -434,7 +434,7 @@ impl EditorElement { } } - fn max_line_number_width(&self, snapshot: &Snapshot, cx: &LayoutContext) -> f32 { + fn max_line_number_width(&self, snapshot: &EditorSnapshot, cx: &LayoutContext) -> f32 { let digit_count = (snapshot.buffer_row_count() as f32).log10().floor() as usize + 1; let style = &self.settings.style; @@ -458,7 +458,7 @@ impl EditorElement { &self, rows: Range, active_rows: &BTreeMap, - snapshot: &Snapshot, + snapshot: &EditorSnapshot, cx: &LayoutContext, ) -> Vec> { let style = &self.settings.style; @@ -504,7 +504,7 @@ impl EditorElement { fn layout_lines( &mut self, mut rows: Range, - snapshot: &mut Snapshot, + snapshot: &mut EditorSnapshot, cx: &LayoutContext, ) -> Vec { rows.end = cmp::min(rows.end, snapshot.max_point().row() + 1); @@ -623,7 +623,7 @@ impl EditorElement { fn layout_blocks( &mut self, rows: Range, - snapshot: &Snapshot, + snapshot: &EditorSnapshot, text_width: f32, line_height: f32, style: &EditorStyle, @@ -923,7 +923,7 @@ pub struct LayoutState { gutter_padding: f32, text_size: Vector2F, style: EditorStyle, - snapshot: Snapshot, + snapshot: EditorSnapshot, active_rows: BTreeMap, highlighted_row: Option, line_layouts: Vec, @@ -961,7 +961,7 @@ impl LayoutState { fn layout_line( row: u32, - snapshot: &Snapshot, + snapshot: &EditorSnapshot, style: &EditorStyle, layout_cache: &TextLayoutCache, ) -> text_layout::Line { @@ -998,7 +998,7 @@ pub struct PaintState { impl PaintState { fn point_for_position( &self, - snapshot: &Snapshot, + snapshot: &EditorSnapshot, layout: &LayoutState, position: Vector2F, ) -> (DisplayPoint, u32) { diff --git a/crates/editor/src/movement.rs b/crates/editor/src/movement.rs index bffd21be2afd696cd824e0290eed2ac7ab32f538..9ba6cbc08dfd4a91e23e6195bad9cecfe5deb585 100644 --- a/crates/editor/src/movement.rs +++ b/crates/editor/src/movement.rs @@ -1,9 +1,9 @@ -use super::{Bias, DisplayMapSnapshot, DisplayPoint, SelectionGoal, ToDisplayPoint}; +use super::{Bias, DisplayPoint, DisplaySnapshot, SelectionGoal, ToDisplayPoint}; use anyhow::Result; use std::{cmp, ops::Range}; use text::ToPoint; -pub fn left(map: &DisplayMapSnapshot, mut point: DisplayPoint) -> Result { +pub fn left(map: &DisplaySnapshot, mut point: DisplayPoint) -> Result { if point.column() > 0 { *point.column_mut() -= 1; } else if point.row() > 0 { @@ -13,7 +13,7 @@ pub fn left(map: &DisplayMapSnapshot, mut point: DisplayPoint) -> Result Result { +pub fn right(map: &DisplaySnapshot, mut point: DisplayPoint) -> Result { let max_column = map.line_len(point.row()); if point.column() < max_column { *point.column_mut() += 1; @@ -25,7 +25,7 @@ pub fn right(map: &DisplayMapSnapshot, mut point: DisplayPoint) -> Result Result<(DisplayPoint, SelectionGoal)> { @@ -61,7 +61,7 @@ pub fn up( } pub fn down( - map: &DisplayMapSnapshot, + map: &DisplaySnapshot, mut point: DisplayPoint, goal: SelectionGoal, ) -> Result<(DisplayPoint, SelectionGoal)> { @@ -98,7 +98,7 @@ pub fn down( } pub fn line_beginning( - map: &DisplayMapSnapshot, + map: &DisplaySnapshot, point: DisplayPoint, toggle_indent: bool, ) -> DisplayPoint { @@ -110,12 +110,12 @@ pub fn line_beginning( } } -pub fn line_end(map: &DisplayMapSnapshot, point: DisplayPoint) -> DisplayPoint { +pub fn line_end(map: &DisplaySnapshot, point: DisplayPoint) -> DisplayPoint { let line_end = DisplayPoint::new(point.row(), map.line_len(point.row())); map.clip_point(line_end, Bias::Left) } -pub fn prev_word_boundary(map: &DisplayMapSnapshot, mut point: DisplayPoint) -> DisplayPoint { +pub fn prev_word_boundary(map: &DisplaySnapshot, mut point: DisplayPoint) -> DisplayPoint { let mut line_start = 0; if point.row() > 0 { if let Some(indent) = map.soft_wrap_indent(point.row() - 1) { @@ -154,7 +154,7 @@ pub fn prev_word_boundary(map: &DisplayMapSnapshot, mut point: DisplayPoint) -> boundary } -pub fn next_word_boundary(map: &DisplayMapSnapshot, mut point: DisplayPoint) -> DisplayPoint { +pub fn next_word_boundary(map: &DisplaySnapshot, mut point: DisplayPoint) -> DisplayPoint { let mut prev_char_kind = None; for c in map.chars_at(point) { let char_kind = char_kind(c); @@ -181,7 +181,7 @@ pub fn next_word_boundary(map: &DisplayMapSnapshot, mut point: DisplayPoint) -> point } -pub fn is_inside_word(map: &DisplayMapSnapshot, point: DisplayPoint) -> bool { +pub fn is_inside_word(map: &DisplaySnapshot, point: DisplayPoint) -> bool { let ix = map.clip_point(point, Bias::Left).to_offset(map, Bias::Left); let text = &map.buffer_snapshot; let next_char_kind = text.chars_at(ix).next().map(char_kind); @@ -189,7 +189,7 @@ pub fn is_inside_word(map: &DisplayMapSnapshot, point: DisplayPoint) -> bool { prev_char_kind.zip(next_char_kind) == Some((CharKind::Word, CharKind::Word)) } -pub fn surrounding_word(map: &DisplayMapSnapshot, point: DisplayPoint) -> Range { +pub fn surrounding_word(map: &DisplaySnapshot, point: DisplayPoint) -> Range { let mut start = map.clip_point(point, Bias::Left).to_offset(map, Bias::Left); let mut end = start; diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 55346fc9dd410c5617c2c1186ca839cc9c0ca32e..aadf5c5d5980f794ea713767459fa4c11687f597 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -68,8 +68,8 @@ pub struct Buffer { pub(crate) operations: Vec, } -pub struct Snapshot { - text: text::Snapshot, +pub struct BufferSnapshot { + text: text::BufferSnapshot, tree: Option, diagnostics: AnchorRangeMultimap, diagnostics_update_count: usize, @@ -96,7 +96,7 @@ struct LanguageServerState { #[derive(Clone)] struct LanguageServerSnapshot { - buffer_snapshot: text::Snapshot, + buffer_snapshot: text::BufferSnapshot, version: usize, path: Arc, } @@ -172,7 +172,7 @@ struct SyntaxTree { #[derive(Clone)] struct AutoindentRequest { selection_set_ids: HashSet, - before_edit: Snapshot, + before_edit: BufferSnapshot, edited: AnchorSet, inserted: Option, } @@ -185,7 +185,7 @@ struct IndentSuggestion { struct TextProvider<'a>(&'a Rope); -struct Highlights<'a> { +struct BufferChunkHighlights<'a> { captures: tree_sitter::QueryCaptures<'a, 'a, TextProvider<'a>>, next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>, stack: Vec<(usize, HighlightId)>, @@ -194,7 +194,7 @@ struct Highlights<'a> { _query_cursor: QueryCursorHandle, } -pub struct Chunks<'a> { +pub struct BufferChunks<'a> { range: Range, chunks: rope::Chunks<'a>, diagnostic_endpoints: Peekable>, @@ -202,7 +202,7 @@ pub struct Chunks<'a> { warning_depth: usize, information_depth: usize, hint_depth: usize, - highlights: Option>, + highlights: Option>, } #[derive(Clone, Copy, Debug, Default)] @@ -336,8 +336,8 @@ impl Buffer { } } - pub fn snapshot(&self) -> Snapshot { - Snapshot { + pub fn snapshot(&self) -> BufferSnapshot { + BufferSnapshot { text: self.text.snapshot(), tree: self.syntax_tree(), diagnostics: self.diagnostics.clone(), @@ -1512,7 +1512,7 @@ impl Deref for Buffer { } } -impl Snapshot { +impl BufferSnapshot { fn suggest_autoindents<'a>( &'a self, row_range: Range, @@ -1626,7 +1626,7 @@ impl Snapshot { &'a self, range: Range, theme: Option<&'a SyntaxTheme>, - ) -> Chunks<'a> { + ) -> BufferChunks<'a> { let range = range.start.to_offset(self)..range.end.to_offset(self); let mut highlights = None; @@ -1662,7 +1662,7 @@ impl Snapshot { tree.root_node(), TextProvider(self.text.as_rope()), ); - highlights = Some(Highlights { + highlights = Some(BufferChunkHighlights { captures, next_capture: None, stack: Default::default(), @@ -1676,7 +1676,7 @@ impl Snapshot { let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable(); let chunks = self.text.as_rope().chunks_in_range(range.clone()); - Chunks { + BufferChunks { range, chunks, diagnostic_endpoints, @@ -1703,7 +1703,7 @@ impl Snapshot { } } -impl Clone for Snapshot { +impl Clone for BufferSnapshot { fn clone(&self) -> Self { Self { text: self.text.clone(), @@ -1717,8 +1717,8 @@ impl Clone for Snapshot { } } -impl Deref for Snapshot { - type Target = text::Snapshot; +impl Deref for BufferSnapshot { + type Target = text::BufferSnapshot; fn deref(&self) -> &Self::Target { &self.text @@ -1743,9 +1743,9 @@ impl<'a> Iterator for ByteChunks<'a> { } } -unsafe impl<'a> Send for Chunks<'a> {} +unsafe impl<'a> Send for BufferChunks<'a> {} -impl<'a> Chunks<'a> { +impl<'a> BufferChunks<'a> { pub fn seek(&mut self, offset: usize) { self.range.start = offset; self.chunks.seek(self.range.start); @@ -1804,7 +1804,7 @@ impl<'a> Chunks<'a> { } } -impl<'a> Iterator for Chunks<'a> { +impl<'a> Iterator for BufferChunks<'a> { type Item = Chunk<'a>; fn next(&mut self) -> Option { diff --git a/crates/language/src/excerpt_list.rs b/crates/language/src/excerpt_list.rs index bd2fa558a6a73d9c0646b67b2c429309b708f7e6..1791031de9c111cf1901656bf3b20020339f041d 100644 --- a/crates/language/src/excerpt_list.rs +++ b/crates/language/src/excerpt_list.rs @@ -47,7 +47,7 @@ pub struct ExcerptProperties<'a, T> { #[derive(Clone)] struct Excerpt { id: ExcerptId, - buffer: buffer::Snapshot, + buffer: buffer::BufferSnapshot, range: Range, text_summary: TextSummary, header_height: u8, @@ -66,7 +66,7 @@ pub struct Chunks<'a> { range: Range, cursor: Cursor<'a, Excerpt, usize>, header_height: u8, - entry_chunks: Option>, + entry_chunks: Option>, theme: Option<&'a SyntaxTheme>, } @@ -339,7 +339,7 @@ impl Snapshot { impl Excerpt { fn new( id: ExcerptId, - buffer: buffer::Snapshot, + buffer: buffer::BufferSnapshot, range: Range, header_height: u8, ) -> Self { diff --git a/crates/text/src/anchor.rs b/crates/text/src/anchor.rs index 846c57274b3724e5a2f3680e8d5bf43fe16b4fda..5f02a0e03bb229bebec1f0c664964c38fea6334e 100644 --- a/crates/text/src/anchor.rs +++ b/crates/text/src/anchor.rs @@ -1,4 +1,4 @@ -use crate::{rope::TextDimension, Snapshot}; +use crate::{rope::TextDimension, BufferSnapshot}; use super::{Buffer, FromAnchor, FullOffset, Point, ToOffset}; use anyhow::Result; @@ -83,7 +83,7 @@ impl Anchor { } } - pub fn cmp<'a>(&self, other: &Anchor, buffer: &Snapshot) -> Result { + pub fn cmp<'a>(&self, other: &Anchor, buffer: &BufferSnapshot) -> Result { if self == other { return Ok(Ordering::Equal); } @@ -115,7 +115,7 @@ impl Anchor { } } - pub fn summary<'a, D>(&self, content: &'a Snapshot) -> D + pub fn summary<'a, D>(&self, content: &'a BufferSnapshot) -> D where D: TextDimension<'a>, { @@ -132,7 +132,10 @@ impl AnchorMap { self.entries.len() } - pub fn iter<'a, D>(&'a self, snapshot: &'a Snapshot) -> impl Iterator + 'a + pub fn iter<'a, D>( + &'a self, + snapshot: &'a BufferSnapshot, + ) -> impl Iterator + 'a where D: 'a + TextDimension<'a>, { @@ -155,7 +158,7 @@ impl AnchorSet { self.0.len() } - pub fn iter<'a, D>(&'a self, content: &'a Snapshot) -> impl Iterator + 'a + pub fn iter<'a, D>(&'a self, content: &'a BufferSnapshot) -> impl Iterator + 'a where D: 'a + TextDimension<'a>, { @@ -188,7 +191,7 @@ impl AnchorRangeMap { pub fn ranges<'a, D>( &'a self, - content: &'a Snapshot, + content: &'a BufferSnapshot, ) -> impl Iterator, &'a T)> + 'a where D: 'a + TextDimension<'a>, @@ -206,7 +209,7 @@ impl AnchorRangeMap { pub fn intersecting_ranges<'a, D, I>( &'a self, range: Range<(I, Bias)>, - content: &'a Snapshot, + content: &'a BufferSnapshot, ) -> impl Iterator, &'a T)> + 'a where D: 'a + TextDimension<'a>, @@ -243,7 +246,7 @@ impl AnchorRangeMap { pub fn min_by_key<'a, D, F, K>( &self, - content: &'a Snapshot, + content: &'a BufferSnapshot, mut extract_key: F, ) -> Option<(Range, &T)> where @@ -259,7 +262,7 @@ impl AnchorRangeMap { pub fn max_by_key<'a, D, F, K>( &self, - content: &'a Snapshot, + content: &'a BufferSnapshot, mut extract_key: F, ) -> Option<(Range, &T)> where @@ -273,7 +276,11 @@ impl AnchorRangeMap { .map(|(range, value)| (self.resolve_range(range, &content), value)) } - fn resolve_range<'a, D>(&self, range: &Range, content: &'a Snapshot) -> Range + fn resolve_range<'a, D>( + &self, + range: &Range, + content: &'a BufferSnapshot, + ) -> Range where D: 'a + TextDimension<'a>, { @@ -330,7 +337,10 @@ impl AnchorRangeSet { self.0.version() } - pub fn ranges<'a, D>(&'a self, content: &'a Snapshot) -> impl 'a + Iterator> + pub fn ranges<'a, D>( + &'a self, + content: &'a BufferSnapshot, + ) -> impl 'a + Iterator> where D: 'a + TextDimension<'a>, { @@ -357,7 +367,7 @@ impl AnchorRangeMultimap { pub fn intersecting_ranges<'a, I, O>( &'a self, range: Range, - content: &'a Snapshot, + content: &'a BufferSnapshot, inclusive: bool, ) -> impl Iterator, &T)> + 'a where @@ -451,7 +461,7 @@ impl AnchorRangeMultimap { pub fn filter<'a, O, F>( &'a self, - content: &'a Snapshot, + content: &'a BufferSnapshot, mut f: F, ) -> impl 'a + Iterator, &T)> where @@ -560,19 +570,19 @@ impl<'a> sum_tree::SeekTarget<'a, AnchorRangeMultimapSummary, FullOffsetRange> f } pub trait AnchorRangeExt { - fn cmp(&self, b: &Range, buffer: &Snapshot) -> Result; - fn to_offset(&self, content: &Snapshot) -> Range; + fn cmp(&self, b: &Range, buffer: &BufferSnapshot) -> Result; + fn to_offset(&self, content: &BufferSnapshot) -> Range; } impl AnchorRangeExt for Range { - fn cmp(&self, other: &Range, buffer: &Snapshot) -> Result { + fn cmp(&self, other: &Range, buffer: &BufferSnapshot) -> Result { Ok(match self.start.cmp(&other.start, buffer)? { Ordering::Equal => other.end.cmp(&self.end, buffer)?, ord @ _ => ord, }) } - fn to_offset(&self, content: &Snapshot) -> Range { + fn to_offset(&self, content: &BufferSnapshot) -> Range { self.start.to_offset(&content)..self.end.to_offset(&content) } } diff --git a/crates/text/src/selection.rs b/crates/text/src/selection.rs index eaa2409772744806c0f2018946781b4b4117f8f3..6af10395272e0fcf7ebe0255a2acecdc9f0724ad 100644 --- a/crates/text/src/selection.rs +++ b/crates/text/src/selection.rs @@ -1,6 +1,6 @@ use sum_tree::Bias; -use crate::{rope::TextDimension, Snapshot}; +use crate::{rope::TextDimension, BufferSnapshot}; use super::{AnchorRangeMap, Buffer, Point, ToOffset, ToPoint}; use std::{cmp::Ordering, ops::Range, sync::Arc}; @@ -105,7 +105,7 @@ impl SelectionSet { pub fn selections<'a, D>( &'a self, - content: &'a Snapshot, + content: &'a BufferSnapshot, ) -> impl 'a + Iterator> where D: 'a + TextDimension<'a>, @@ -124,7 +124,7 @@ impl SelectionSet { pub fn intersecting_selections<'a, D, I>( &'a self, range: Range<(I, Bias)>, - content: &'a Snapshot, + content: &'a BufferSnapshot, ) -> impl 'a + Iterator> where D: 'a + TextDimension<'a>, @@ -141,7 +141,7 @@ impl SelectionSet { }) } - pub fn oldest_selection<'a, D>(&'a self, content: &'a Snapshot) -> Option> + pub fn oldest_selection<'a, D>(&'a self, content: &'a BufferSnapshot) -> Option> where D: 'a + TextDimension<'a>, { @@ -156,7 +156,7 @@ impl SelectionSet { }) } - pub fn newest_selection<'a, D>(&'a self, content: &'a Snapshot) -> Option> + pub fn newest_selection<'a, D>(&'a self, content: &'a BufferSnapshot) -> Option> where D: 'a + TextDimension<'a>, { diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 3a19977fa5f2457a9d47baa8e7af19346de478d8..f99de77ab6e1c8bdd82e763de158709453628cea 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -37,7 +37,7 @@ pub use sum_tree::Bias; use sum_tree::{FilterCursor, SumTree}; pub struct Buffer { - snapshot: Snapshot, + snapshot: BufferSnapshot, last_edit: clock::Local, history: History, selections: HashMap, @@ -51,7 +51,7 @@ pub struct Buffer { } #[derive(Clone, Debug)] -pub struct Snapshot { +pub struct BufferSnapshot { visible_text: Rope, deleted_text: Rope, undo_map: UndoMap, @@ -473,7 +473,7 @@ impl Buffer { } Buffer { - snapshot: Snapshot { + snapshot: BufferSnapshot { visible_text, deleted_text: Rope::new(), fragments, @@ -497,8 +497,8 @@ impl Buffer { self.version.clone() } - pub fn snapshot(&self) -> Snapshot { - Snapshot { + pub fn snapshot(&self) -> BufferSnapshot { + BufferSnapshot { visible_text: self.visible_text.clone(), deleted_text: self.deleted_text.clone(), undo_map: self.undo_map.clone(), @@ -1476,14 +1476,14 @@ impl Buffer { } impl Deref for Buffer { - type Target = Snapshot; + type Target = BufferSnapshot; fn deref(&self) -> &Self::Target { &self.snapshot } } -impl Snapshot { +impl BufferSnapshot { pub fn as_rope(&self) -> &Rope { &self.visible_text } @@ -2254,9 +2254,9 @@ impl Operation { } pub trait ToOffset { - fn to_offset<'a>(&self, content: &Snapshot) -> usize; + fn to_offset<'a>(&self, content: &BufferSnapshot) -> usize; - fn to_full_offset<'a>(&self, content: &Snapshot, bias: Bias) -> FullOffset { + fn to_full_offset<'a>(&self, content: &BufferSnapshot, bias: Bias) -> FullOffset { let offset = self.to_offset(&content); let mut cursor = content.fragments.cursor::(); cursor.seek(&offset, bias, &None); @@ -2265,30 +2265,30 @@ pub trait ToOffset { } impl ToOffset for Point { - fn to_offset<'a>(&self, content: &Snapshot) -> usize { + fn to_offset<'a>(&self, content: &BufferSnapshot) -> usize { content.visible_text.point_to_offset(*self) } } impl ToOffset for PointUtf16 { - fn to_offset<'a>(&self, content: &Snapshot) -> usize { + fn to_offset<'a>(&self, content: &BufferSnapshot) -> usize { content.visible_text.point_utf16_to_offset(*self) } } impl ToOffset for usize { - fn to_offset<'a>(&self, content: &Snapshot) -> usize { + fn to_offset<'a>(&self, content: &BufferSnapshot) -> usize { assert!(*self <= content.len(), "offset is out of range"); *self } } impl ToOffset for Anchor { - fn to_offset<'a>(&self, content: &Snapshot) -> usize { + fn to_offset<'a>(&self, content: &BufferSnapshot) -> usize { content.summary_for_anchor(self) } - fn to_full_offset<'a>(&self, content: &Snapshot, bias: Bias) -> FullOffset { + fn to_full_offset<'a>(&self, content: &BufferSnapshot, bias: Bias) -> FullOffset { if content.version == self.version { self.full_offset } else { @@ -2312,45 +2312,45 @@ impl ToOffset for Anchor { } impl<'a> ToOffset for &'a Anchor { - fn to_offset(&self, content: &Snapshot) -> usize { + fn to_offset(&self, content: &BufferSnapshot) -> usize { content.summary_for_anchor(self) } } pub trait ToPoint { - fn to_point<'a>(&self, content: &Snapshot) -> Point; + fn to_point<'a>(&self, content: &BufferSnapshot) -> Point; } impl ToPoint for Anchor { - fn to_point<'a>(&self, content: &Snapshot) -> Point { + fn to_point<'a>(&self, content: &BufferSnapshot) -> Point { content.summary_for_anchor(self) } } impl ToPoint for usize { - fn to_point<'a>(&self, content: &Snapshot) -> Point { + fn to_point<'a>(&self, content: &BufferSnapshot) -> Point { content.visible_text.offset_to_point(*self) } } impl ToPoint for Point { - fn to_point<'a>(&self, _: &Snapshot) -> Point { + fn to_point<'a>(&self, _: &BufferSnapshot) -> Point { *self } } pub trait FromAnchor { - fn from_anchor(anchor: &Anchor, content: &Snapshot) -> Self; + fn from_anchor(anchor: &Anchor, content: &BufferSnapshot) -> Self; } impl FromAnchor for Point { - fn from_anchor(anchor: &Anchor, content: &Snapshot) -> Self { + fn from_anchor(anchor: &Anchor, content: &BufferSnapshot) -> Self { anchor.to_point(content) } } impl FromAnchor for usize { - fn from_anchor(anchor: &Anchor, content: &Snapshot) -> Self { + fn from_anchor(anchor: &Anchor, content: &BufferSnapshot) -> Self { anchor.to_offset(content) } } From 6a44a7448ed0c67d6b63df9f3ffdec08cebf13d1 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 8 Dec 2021 09:33:55 -0800 Subject: [PATCH 019/196] Consolidate Edit types in editor crate --- crates/editor/src/display_map/block_map.rs | 2 +- crates/editor/src/display_map/fold_map.rs | 72 ++++++++------------ crates/editor/src/display_map/tab_map.rs | 46 ++++--------- crates/editor/src/display_map/wrap_map.rs | 79 +++++++++++----------- 4 files changed, 83 insertions(+), 116 deletions(-) diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 02cbc0ab3284a6113c1c67547e251d3707e953fb..71aa3838578d85a1663385dde062e8a5a0a8ca23 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -1,4 +1,4 @@ -use super::wrap_map::{self, Edit as WrapEdit, WrapPoint, WrapSnapshot}; +use super::wrap_map::{self, WrapEdit, WrapPoint, WrapSnapshot}; use gpui::{AppContext, ElementBox, ModelHandle}; use language::{Buffer, Chunk}; use parking_lot::Mutex; diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index 1f8f710ce1764d2fff543ba5e3d93965ea14bd51..eea8ebe4f5144ba6ebf84ec56ad67ce2501e73ad 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -5,7 +5,7 @@ use parking_lot::Mutex; use std::{ cmp::{self, Ordering}, iter, - ops::Range, + ops::{Range, Sub}, sync::atomic::{AtomicUsize, Ordering::SeqCst}, }; use sum_tree::{Bias, Cursor, FilterCursor, SumTree}; @@ -456,8 +456,8 @@ impl FoldMap { new_transforms.start().1 .0 + (edit.new.end - new_transforms.start().0); fold_edits.push(FoldEdit { - old_bytes: FoldOffset(old_start)..FoldOffset(old_end), - new_bytes: FoldOffset(new_start)..FoldOffset(new_end), + old: FoldOffset(old_start)..FoldOffset(old_end), + new: FoldOffset(new_start)..FoldOffset(new_end), }); } @@ -749,20 +749,20 @@ fn consolidate_buffer_edits(edits: &mut Vec>) { fn consolidate_fold_edits(edits: &mut Vec) { edits.sort_unstable_by(|a, b| { - a.old_bytes + a.old .start - .cmp(&b.old_bytes.start) - .then_with(|| b.old_bytes.end.cmp(&a.old_bytes.end)) + .cmp(&b.old.start) + .then_with(|| b.old.end.cmp(&a.old.end)) }); let mut i = 1; while i < edits.len() { let edit = edits[i].clone(); let prev_edit = &mut edits[i - 1]; - if prev_edit.old_bytes.end >= edit.old_bytes.start { - prev_edit.old_bytes.end = prev_edit.old_bytes.end.max(edit.old_bytes.end); - prev_edit.new_bytes.start = prev_edit.new_bytes.start.min(edit.new_bytes.start); - prev_edit.new_bytes.end = prev_edit.new_bytes.end.max(edit.new_bytes.end); + if prev_edit.old.end >= edit.old.start { + prev_edit.old.end = prev_edit.old.end.max(edit.old.end); + prev_edit.new.start = prev_edit.new.start.min(edit.new.start); + prev_edit.new.end = prev_edit.new.end.max(edit.new.end); edits.remove(i); continue; } @@ -1021,6 +1021,14 @@ impl FoldOffset { } } +impl Sub for FoldOffset { + type Output = Self; + + fn sub(self, rhs: Self) -> Self::Output { + Self(self.0 - rhs.0) + } +} + impl<'a> sum_tree::Dimension<'a, TransformSummary> for FoldOffset { fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += &summary.output.bytes; @@ -1039,26 +1047,7 @@ impl<'a> sum_tree::Dimension<'a, TransformSummary> for usize { } } -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct FoldEdit { - pub old_bytes: Range, - pub new_bytes: Range, -} - -#[cfg(test)] -impl FoldEdit { - pub fn delta(&self) -> isize { - self.inserted_bytes() as isize - self.deleted_bytes() as isize - } - - pub fn deleted_bytes(&self) -> usize { - self.old_bytes.end.0 - self.old_bytes.start.0 - } - - pub fn inserted_bytes(&self) -> usize { - self.new_bytes.end.0 - self.new_bytes.start.0 - } -} +pub type FoldEdit = Edit; #[cfg(test)] mod tests { @@ -1087,12 +1076,12 @@ mod tests { edits, &[ FoldEdit { - old_bytes: FoldOffset(2)..FoldOffset(16), - new_bytes: FoldOffset(2)..FoldOffset(5), + old: FoldOffset(2)..FoldOffset(16), + new: FoldOffset(2)..FoldOffset(5), }, FoldEdit { - old_bytes: FoldOffset(18)..FoldOffset(29), - new_bytes: FoldOffset(7)..FoldOffset(10) + old: FoldOffset(18)..FoldOffset(29), + new: FoldOffset(7)..FoldOffset(10) }, ] ); @@ -1115,12 +1104,12 @@ mod tests { edits, &[ FoldEdit { - old_bytes: FoldOffset(0)..FoldOffset(1), - new_bytes: FoldOffset(0)..FoldOffset(3), + old: FoldOffset(0)..FoldOffset(1), + new: FoldOffset(0)..FoldOffset(3), }, FoldEdit { - old_bytes: FoldOffset(6)..FoldOffset(6), - new_bytes: FoldOffset(8)..FoldOffset(11), + old: FoldOffset(6)..FoldOffset(6), + new: FoldOffset(8)..FoldOffset(11), }, ] ); @@ -1454,12 +1443,9 @@ mod tests { let mut text = initial_snapshot.text(); for (snapshot, edits) in snapshot_edits.drain(..) { let new_text = snapshot.text(); - let mut delta = 0isize; for edit in edits { - let old_bytes = ((edit.old_bytes.start.0 as isize) + delta) as usize - ..((edit.old_bytes.end.0 as isize) + delta) as usize; - let new_bytes = edit.new_bytes.start.0..edit.new_bytes.end.0; - delta += edit.delta(); + let old_bytes = edit.new.start.0..edit.new.start.0 + edit.old_len().0; + let new_bytes = edit.new.start.0..edit.new.end.0; text.replace_range(old_bytes, &new_text[new_bytes]); } diff --git a/crates/editor/src/display_map/tab_map.rs b/crates/editor/src/display_map/tab_map.rs index 69a70ecc12a5454679b5fd93a17f012f36aa6953..c0c67097ba54e8122c7290f5b5c2190fac4d367c 100644 --- a/crates/editor/src/display_map/tab_map.rs +++ b/crates/editor/src/display_map/tab_map.rs @@ -21,7 +21,7 @@ impl TabMap { &self, fold_snapshot: FoldSnapshot, mut fold_edits: Vec, - ) -> (TabSnapshot, Vec) { + ) -> (TabSnapshot, Vec) { let mut old_snapshot = self.0.lock(); let max_offset = old_snapshot.fold_snapshot.len(); let new_snapshot = TabSnapshot { @@ -34,13 +34,13 @@ impl TabMap { let mut delta = 0; for chunk in old_snapshot .fold_snapshot - .chunks(fold_edit.old_bytes.end..max_offset, None) + .chunks(fold_edit.old.end..max_offset, None) { let patterns: &[_] = &['\t', '\n']; if let Some(ix) = chunk.text.find(patterns) { if &chunk.text[ix..ix + 1] == "\t" { - fold_edit.old_bytes.end.0 += delta + ix + 1; - fold_edit.new_bytes.end.0 += delta + ix + 1; + fold_edit.old.end.0 += delta + ix + 1; + fold_edit.new.end.0 += delta + ix + 1; } break; @@ -55,9 +55,9 @@ impl TabMap { let (prev_edits, next_edits) = fold_edits.split_at_mut(ix); let prev_edit = prev_edits.last_mut().unwrap(); let edit = &next_edits[0]; - if prev_edit.old_bytes.end >= edit.old_bytes.start { - prev_edit.old_bytes.end = edit.old_bytes.end; - prev_edit.new_bytes.end = edit.new_bytes.end; + if prev_edit.old.end >= edit.old.start { + prev_edit.old.end = edit.old.end; + prev_edit.new.end = edit.new.end; fold_edits.remove(ix); } else { ix += 1; @@ -65,25 +65,13 @@ impl TabMap { } for fold_edit in fold_edits { - let old_start = fold_edit - .old_bytes - .start - .to_point(&old_snapshot.fold_snapshot); - let old_end = fold_edit - .old_bytes - .end - .to_point(&old_snapshot.fold_snapshot); - let new_start = fold_edit - .new_bytes - .start - .to_point(&new_snapshot.fold_snapshot); - let new_end = fold_edit - .new_bytes - .end - .to_point(&new_snapshot.fold_snapshot); - tab_edits.push(Edit { - old_lines: old_snapshot.to_tab_point(old_start)..old_snapshot.to_tab_point(old_end), - new_lines: new_snapshot.to_tab_point(new_start)..new_snapshot.to_tab_point(new_end), + let old_start = fold_edit.old.start.to_point(&old_snapshot.fold_snapshot); + let old_end = fold_edit.old.end.to_point(&old_snapshot.fold_snapshot); + let new_start = fold_edit.new.start.to_point(&new_snapshot.fold_snapshot); + let new_end = fold_edit.new.end.to_point(&new_snapshot.fold_snapshot); + tab_edits.push(TabEdit { + old: old_snapshot.to_tab_point(old_start)..old_snapshot.to_tab_point(old_end), + new: new_snapshot.to_tab_point(new_start)..new_snapshot.to_tab_point(new_end), }); } @@ -322,11 +310,7 @@ impl From for TabPoint { } } -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct Edit { - pub old_lines: Range, - pub new_lines: Range, -} +pub type TabEdit = text::Edit; #[derive(Clone, Debug, Default, Eq, PartialEq)] pub struct TextSummary { diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index 5eaea6d402271c56625b72e0d58faa7f0c663771..d51f8373ad5b23111b0ad40e57ac1098af511f60 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -1,6 +1,6 @@ use super::{ fold_map, - tab_map::{self, Edit as TabEdit, TabPoint, TabSnapshot}, + tab_map::{self, TabEdit, TabPoint, TabSnapshot}, }; use gpui::{ fonts::FontId, text_layout::LineWrapper, Entity, ModelContext, ModelHandle, MutableAppContext, @@ -15,7 +15,7 @@ use text::Patch; use theme::SyntaxTheme; pub use super::tab_map::TextSummary; -pub type Edit = text::Edit; +pub type WrapEdit = text::Edit; pub struct WrapMap { snapshot: WrapSnapshot, @@ -106,7 +106,7 @@ impl WrapMap { tab_snapshot: TabSnapshot, edits: Vec, cx: &mut ModelContext, - ) -> (WrapSnapshot, Vec) { + ) -> (WrapSnapshot, Vec) { if self.wrap_width.is_some() { self.pending_edits.push_back((tab_snapshot, edits)); self.flush_edits(cx); @@ -157,8 +157,8 @@ impl WrapMap { .update( tab_snapshot, &[TabEdit { - old_lines: range.clone(), - new_lines: range.clone(), + old: range.clone(), + new: range.clone(), }], wrap_width, &mut line_wrapper, @@ -203,7 +203,7 @@ impl WrapMap { } let new_rows = self.snapshot.transforms.summary().output.lines.row + 1; self.snapshot.interpolated = false; - self.edits_since_sync = self.edits_since_sync.compose(&Patch::new(vec![Edit { + self.edits_since_sync = self.edits_since_sync.compose(&Patch::new(vec![WrapEdit { old: 0..old_rows, new: 0..new_rows, }])); @@ -313,47 +313,44 @@ impl WrapSnapshot { let mut old_cursor = self.transforms.cursor::(); let mut tab_edits_iter = tab_edits.iter().peekable(); - new_transforms = old_cursor.slice( - &tab_edits_iter.peek().unwrap().old_lines.start, - Bias::Right, - &(), - ); + new_transforms = + old_cursor.slice(&tab_edits_iter.peek().unwrap().old.start, Bias::Right, &()); while let Some(edit) = tab_edits_iter.next() { - if edit.new_lines.start > TabPoint::from(new_transforms.summary().input.lines) { + if edit.new.start > TabPoint::from(new_transforms.summary().input.lines) { let summary = new_tab_snapshot.text_summary_for_range( - TabPoint::from(new_transforms.summary().input.lines)..edit.new_lines.start, + TabPoint::from(new_transforms.summary().input.lines)..edit.new.start, ); new_transforms.push_or_extend(Transform::isomorphic(summary)); } - if !edit.new_lines.is_empty() { + if !edit.new.is_empty() { new_transforms.push_or_extend(Transform::isomorphic( - new_tab_snapshot.text_summary_for_range(edit.new_lines.clone()), + new_tab_snapshot.text_summary_for_range(edit.new.clone()), )); } - old_cursor.seek_forward(&edit.old_lines.end, Bias::Right, &()); + old_cursor.seek_forward(&edit.old.end, Bias::Right, &()); if let Some(next_edit) = tab_edits_iter.peek() { - if next_edit.old_lines.start > old_cursor.end(&()) { - if old_cursor.end(&()) > edit.old_lines.end { + if next_edit.old.start > old_cursor.end(&()) { + if old_cursor.end(&()) > edit.old.end { let summary = self .tab_snapshot - .text_summary_for_range(edit.old_lines.end..old_cursor.end(&())); + .text_summary_for_range(edit.old.end..old_cursor.end(&())); new_transforms.push_or_extend(Transform::isomorphic(summary)); } old_cursor.next(&()); new_transforms.push_tree( - old_cursor.slice(&next_edit.old_lines.start, Bias::Right, &()), + old_cursor.slice(&next_edit.old.start, Bias::Right, &()), &(), ); } } else { - if old_cursor.end(&()) > edit.old_lines.end { + if old_cursor.end(&()) > edit.old.end { let summary = self .tab_snapshot - .text_summary_for_range(edit.old_lines.end..old_cursor.end(&())); + .text_summary_for_range(edit.old.end..old_cursor.end(&())); new_transforms.push_or_extend(Transform::isomorphic(summary)); } old_cursor.next(&()); @@ -391,14 +388,14 @@ impl WrapSnapshot { let mut row_edits = Vec::new(); while let Some(edit) = tab_edits_iter.next() { let mut row_edit = RowEdit { - old_rows: edit.old_lines.start.row()..edit.old_lines.end.row() + 1, - new_rows: edit.new_lines.start.row()..edit.new_lines.end.row() + 1, + old_rows: edit.old.start.row()..edit.old.end.row() + 1, + new_rows: edit.new.start.row()..edit.new.end.row() + 1, }; while let Some(next_edit) = tab_edits_iter.peek() { - if next_edit.old_lines.start.row() <= row_edit.old_rows.end { - row_edit.old_rows.end = next_edit.old_lines.end.row() + 1; - row_edit.new_rows.end = next_edit.new_lines.end.row() + 1; + if next_edit.old.start.row() <= row_edit.old_rows.end { + row_edit.old_rows.end = next_edit.old.end.row() + 1; + row_edit.new_rows.end = next_edit.new.end.row() + 1; tab_edits_iter.next(); } else { break; @@ -528,28 +525,28 @@ impl WrapSnapshot { let mut old_cursor = self.transforms.cursor::(); let mut new_cursor = new_snapshot.transforms.cursor::(); for mut tab_edit in tab_edits.iter().cloned() { - tab_edit.old_lines.start.0.column = 0; - tab_edit.old_lines.end.0 += Point::new(1, 0); - tab_edit.new_lines.start.0.column = 0; - tab_edit.new_lines.end.0 += Point::new(1, 0); + tab_edit.old.start.0.column = 0; + tab_edit.old.end.0 += Point::new(1, 0); + tab_edit.new.start.0.column = 0; + tab_edit.new.end.0 += Point::new(1, 0); - old_cursor.seek(&tab_edit.old_lines.start, Bias::Right, &()); + old_cursor.seek(&tab_edit.old.start, Bias::Right, &()); let mut old_start = old_cursor.start().output.lines; - old_start += tab_edit.old_lines.start.0 - old_cursor.start().input.lines; + old_start += tab_edit.old.start.0 - old_cursor.start().input.lines; - old_cursor.seek(&tab_edit.old_lines.end, Bias::Right, &()); + old_cursor.seek(&tab_edit.old.end, Bias::Right, &()); let mut old_end = old_cursor.start().output.lines; - old_end += tab_edit.old_lines.end.0 - old_cursor.start().input.lines; + old_end += tab_edit.old.end.0 - old_cursor.start().input.lines; - new_cursor.seek(&tab_edit.new_lines.start, Bias::Right, &()); + new_cursor.seek(&tab_edit.new.start, Bias::Right, &()); let mut new_start = new_cursor.start().output.lines; - new_start += tab_edit.new_lines.start.0 - new_cursor.start().input.lines; + new_start += tab_edit.new.start.0 - new_cursor.start().input.lines; - new_cursor.seek(&tab_edit.new_lines.end, Bias::Right, &()); + new_cursor.seek(&tab_edit.new.end, Bias::Right, &()); let mut new_end = new_cursor.start().output.lines; - new_end += tab_edit.new_lines.end.0 - new_cursor.start().input.lines; + new_end += tab_edit.new.end.0 - new_cursor.start().input.lines; - wrap_edits.push(Edit { + wrap_edits.push(WrapEdit { old: old_start.row..old_end.row, new: new_start.row..new_end.row, }); @@ -955,7 +952,7 @@ impl<'a> sum_tree::Dimension<'a, TransformSummary> for WrapPoint { } } -fn consolidate_wrap_edits(edits: &mut Vec) { +fn consolidate_wrap_edits(edits: &mut Vec) { let mut i = 1; while i < edits.len() { let edit = edits[i].clone(); From a88cff4fa099dc10bd75307c71c7ee2432ba03fe Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 7 Dec 2021 16:52:15 +0100 Subject: [PATCH 020/196] Remove lifetime parameter from TextDimension trait Co-Authored-By: Antonio Scandurra --- crates/editor/src/editor.rs | 17 ++++------------- crates/go_to_line/src/go_to_line.rs | 2 +- crates/text/src/anchor.rs | 18 +++++++++--------- crates/text/src/rope.rs | 14 +++++++------- crates/text/src/selection.rs | 17 ++++++++--------- crates/text/src/text.rs | 22 ++++++++++------------ 6 files changed, 39 insertions(+), 51 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 294e9f229ed9992dc359df445694b36dc7e868b0..848edc643f33bd2158dfa0b90ff34fa9d73ae395 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -3056,7 +3056,7 @@ impl Editor { pub fn selections<'a, D>(&self, cx: &'a AppContext) -> impl 'a + Iterator> where - D: 'a + TextDimension<'a> + Ord, + D: 'a + TextDimension + Ord, { let buffer = self.buffer.read(cx); let mut selections = self.selection_set(cx).selections::(buffer).peekable(); @@ -3086,10 +3086,7 @@ impl Editor { }) } - fn pending_selection<'a, D>(&self, cx: &'a AppContext) -> Option> - where - D: 'a + TextDimension<'a>, - { + fn pending_selection(&self, cx: &AppContext) -> Option> { let buffer = self.buffer.read(cx); self.pending_selection.as_ref().map(|pending| Selection { id: pending.selection.id, @@ -3108,10 +3105,7 @@ impl Editor { selection_count } - pub fn oldest_selection<'a, T>(&self, cx: &'a AppContext) -> Selection - where - T: 'a + TextDimension<'a>, - { + pub fn oldest_selection(&self, cx: &AppContext) -> Selection { let buffer = self.buffer.read(cx); self.selection_set(cx) .oldest_selection(buffer) @@ -3119,10 +3113,7 @@ impl Editor { .unwrap() } - pub fn newest_selection<'a, T>(&self, cx: &'a AppContext) -> Selection - where - T: 'a + TextDimension<'a>, - { + pub fn newest_selection(&self, cx: &AppContext) -> Selection { let buffer = self.buffer.read(cx); self.pending_selection(cx) .or_else(|| self.selection_set(cx).newest_selection(buffer)) diff --git a/crates/go_to_line/src/go_to_line.rs b/crates/go_to_line/src/go_to_line.rs index 77942b105fd315eddbf737be1d6968360c1b0b53..dbd36b1139402e535a8ee3c3fcf2257b503903bb 100644 --- a/crates/go_to_line/src/go_to_line.rs +++ b/crates/go_to_line/src/go_to_line.rs @@ -1,10 +1,10 @@ -use text::{Bias, Point, Selection}; use editor::{display_map::ToDisplayPoint, Autoscroll, Editor, EditorSettings}; use gpui::{ action, elements::*, geometry::vector::Vector2F, keymap::Binding, Axis, Entity, MutableAppContext, RenderContext, View, ViewContext, ViewHandle, }; use postage::watch; +use text::{Bias, Point, Selection}; use workspace::{Settings, Workspace}; action!(Toggle); diff --git a/crates/text/src/anchor.rs b/crates/text/src/anchor.rs index 5f02a0e03bb229bebec1f0c664964c38fea6334e..5653124c68f4ec6f7207c87398b765d3dbeb51ca 100644 --- a/crates/text/src/anchor.rs +++ b/crates/text/src/anchor.rs @@ -117,7 +117,7 @@ impl Anchor { pub fn summary<'a, D>(&self, content: &'a BufferSnapshot) -> D where - D: TextDimension<'a>, + D: TextDimension, { content.summary_for_anchor(self) } @@ -137,7 +137,7 @@ impl AnchorMap { snapshot: &'a BufferSnapshot, ) -> impl Iterator + 'a where - D: 'a + TextDimension<'a>, + D: TextDimension, { snapshot .summaries_for_anchors( @@ -160,7 +160,7 @@ impl AnchorSet { pub fn iter<'a, D>(&'a self, content: &'a BufferSnapshot) -> impl Iterator + 'a where - D: 'a + TextDimension<'a>, + D: TextDimension, { self.0.iter(content).map(|(position, _)| position) } @@ -194,7 +194,7 @@ impl AnchorRangeMap { content: &'a BufferSnapshot, ) -> impl Iterator, &'a T)> + 'a where - D: 'a + TextDimension<'a>, + D: TextDimension, { content .summaries_for_anchor_ranges( @@ -212,7 +212,7 @@ impl AnchorRangeMap { content: &'a BufferSnapshot, ) -> impl Iterator, &'a T)> + 'a where - D: 'a + TextDimension<'a>, + D: TextDimension, I: ToOffset, { let range = content.anchor_at(range.start.0, range.start.1) @@ -250,7 +250,7 @@ impl AnchorRangeMap { mut extract_key: F, ) -> Option<(Range, &T)> where - D: 'a + TextDimension<'a>, + D: TextDimension, F: FnMut(&T) -> K, K: Ord, { @@ -266,7 +266,7 @@ impl AnchorRangeMap { mut extract_key: F, ) -> Option<(Range, &T)> where - D: 'a + TextDimension<'a>, + D: TextDimension, F: FnMut(&T) -> K, K: Ord, { @@ -282,7 +282,7 @@ impl AnchorRangeMap { content: &'a BufferSnapshot, ) -> Range where - D: 'a + TextDimension<'a>, + D: TextDimension, { let mut anchor = Anchor { full_offset: range.start, @@ -342,7 +342,7 @@ impl AnchorRangeSet { content: &'a BufferSnapshot, ) -> impl 'a + Iterator> where - D: 'a + TextDimension<'a>, + D: TextDimension, { self.0.ranges(content).map(|(range, _)| range) } diff --git a/crates/text/src/rope.rs b/crates/text/src/rope.rs index f8170a4ac086f69adebf1749c107df66f0ee76b9..ffc1b74c55de7f6c5bd631d4c4258382fa6bc262 100644 --- a/crates/text/src/rope.rs +++ b/crates/text/src/rope.rs @@ -327,7 +327,7 @@ impl<'a> Cursor<'a> { slice } - pub fn summary>(&mut self, end_offset: usize) -> D { + pub fn summary(&mut self, end_offset: usize) -> D { debug_assert!(end_offset >= self.offset); let mut summary = D::default(); @@ -719,12 +719,12 @@ impl std::ops::AddAssign for TextSummary { } } -pub trait TextDimension<'a>: Dimension<'a, TextSummary> { +pub trait TextDimension: 'static + for<'a> Dimension<'a, TextSummary> { fn from_text_summary(summary: &TextSummary) -> Self; fn add_assign(&mut self, other: &Self); } -impl<'a, D1: TextDimension<'a>, D2: TextDimension<'a>> TextDimension<'a> for (D1, D2) { +impl<'a, D1: TextDimension, D2: TextDimension> TextDimension for (D1, D2) { fn from_text_summary(summary: &TextSummary) -> Self { ( D1::from_text_summary(summary), @@ -738,7 +738,7 @@ impl<'a, D1: TextDimension<'a>, D2: TextDimension<'a>> TextDimension<'a> for (D1 } } -impl<'a> TextDimension<'a> for TextSummary { +impl TextDimension for TextSummary { fn from_text_summary(summary: &TextSummary) -> Self { summary.clone() } @@ -754,7 +754,7 @@ impl<'a> sum_tree::Dimension<'a, TextSummary> for usize { } } -impl<'a> TextDimension<'a> for usize { +impl TextDimension for usize { fn from_text_summary(summary: &TextSummary) -> Self { summary.bytes } @@ -770,7 +770,7 @@ impl<'a> sum_tree::Dimension<'a, TextSummary> for Point { } } -impl<'a> TextDimension<'a> for Point { +impl TextDimension for Point { fn from_text_summary(summary: &TextSummary) -> Self { summary.lines } @@ -786,7 +786,7 @@ impl<'a> sum_tree::Dimension<'a, TextSummary> for PointUtf16 { } } -impl<'a> TextDimension<'a> for PointUtf16 { +impl TextDimension for PointUtf16 { fn from_text_summary(summary: &TextSummary) -> Self { summary.lines_utf16 } diff --git a/crates/text/src/selection.rs b/crates/text/src/selection.rs index 6af10395272e0fcf7ebe0255a2acecdc9f0724ad..6c04da016a1a6bb0782ba3a56ccbb8a1c54d4111 100644 --- a/crates/text/src/selection.rs +++ b/crates/text/src/selection.rs @@ -1,9 +1,8 @@ -use sum_tree::Bias; - -use crate::{rope::TextDimension, BufferSnapshot}; - -use super::{AnchorRangeMap, Buffer, Point, ToOffset, ToPoint}; +use crate::{ + rope::TextDimension, AnchorRangeMap, Buffer, BufferSnapshot, Point, ToOffset, ToPoint, +}; use std::{cmp::Ordering, ops::Range, sync::Arc}; +use sum_tree::Bias; pub type SelectionSetId = clock::Lamport; pub type SelectionsVersion = usize; @@ -108,7 +107,7 @@ impl SelectionSet { content: &'a BufferSnapshot, ) -> impl 'a + Iterator> where - D: 'a + TextDimension<'a>, + D: TextDimension, { self.selections .ranges(content) @@ -127,7 +126,7 @@ impl SelectionSet { content: &'a BufferSnapshot, ) -> impl 'a + Iterator> where - D: 'a + TextDimension<'a>, + D: TextDimension, I: 'a + ToOffset, { self.selections @@ -143,7 +142,7 @@ impl SelectionSet { pub fn oldest_selection<'a, D>(&'a self, content: &'a BufferSnapshot) -> Option> where - D: 'a + TextDimension<'a>, + D: TextDimension, { self.selections .min_by_key(content, |selection| selection.id) @@ -158,7 +157,7 @@ impl SelectionSet { pub fn newest_selection<'a, D>(&'a self, content: &'a BufferSnapshot) -> Option> where - D: 'a + TextDimension<'a>, + D: TextDimension, { self.selections .max_by_key(content, |selection| selection.id) diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index f99de77ab6e1c8bdd82e763de158709453628cea..5fcd40ad89e0a08c9fd6d5c029f6b0bb6e47975c 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -295,7 +295,7 @@ impl UndoMap { } } -struct Edits<'a, D: TextDimension<'a>, F: FnMut(&FragmentSummary) -> bool> { +struct Edits<'a, D: TextDimension, F: FnMut(&FragmentSummary) -> bool> { visible_cursor: rope::Cursor<'a>, deleted_cursor: rope::Cursor<'a>, fragments_cursor: Option>, @@ -1447,7 +1447,7 @@ impl Buffer { #[cfg(test)] pub fn selection_ranges<'a, D>(&'a self, set_id: SelectionSetId) -> Result>> where - D: 'a + TextDimension<'a>, + D: TextDimension, { Ok(self .selection_set(set_id)? @@ -1467,7 +1467,7 @@ impl Buffer { &'a self, ) -> impl 'a + Iterator>)> where - D: 'a + TextDimension<'a>, + D: TextDimension, { self.selections .keys() @@ -1596,7 +1596,7 @@ impl BufferSnapshot { fn summary_for_anchor<'a, D>(&'a self, anchor: &Anchor) -> D where - D: TextDimension<'a>, + D: TextDimension, { let cx = Some(anchor.version.clone()); let mut cursor = self.fragments.cursor::<(VersionedFullOffset, usize)>(); @@ -1615,7 +1615,7 @@ impl BufferSnapshot { pub fn text_summary_for_range<'a, D, O: ToOffset>(&'a self, range: Range) -> D where - D: TextDimension<'a>, + D: TextDimension, { self.visible_text .cursor(range.start.to_offset(self)) @@ -1629,7 +1629,7 @@ impl BufferSnapshot { ranges: I, ) -> impl 'a + Iterator where - D: 'a + TextDimension<'a>, + D: TextDimension, I: 'a + IntoIterator, { let cx = Some(version.clone()); @@ -1656,7 +1656,7 @@ impl BufferSnapshot { ranges: I, ) -> impl 'a + Iterator> where - D: 'a + TextDimension<'a>, + D: TextDimension, I: 'a + IntoIterator>, { let cx = Some(version); @@ -1855,7 +1855,7 @@ impl BufferSnapshot { since: &'a clock::Global, ) -> impl 'a + Iterator> where - D: 'a + TextDimension<'a> + Ord, + D: TextDimension + Ord, { self.edits_since_in_range(since, Anchor::min()..Anchor::max()) } @@ -1866,7 +1866,7 @@ impl BufferSnapshot { range: Range, ) -> impl 'a + Iterator> where - D: 'a + TextDimension<'a> + Ord, + D: TextDimension + Ord, { let fragments_cursor = if *since == self.version { None @@ -1964,9 +1964,7 @@ impl<'a> RopeBuilder<'a> { } } -impl<'a, D: TextDimension<'a> + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator - for Edits<'a, D, F> -{ +impl<'a, D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Edits<'a, D, F> { type Item = Edit; fn next(&mut self) -> Option { From 5f8e406c1868824eee0c53cd6cd0f0a6dd3924ce Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 8 Dec 2021 10:04:22 -0800 Subject: [PATCH 021/196] Fill out ExcerptList API This restores the improvements that we had made on the `project-diagnostics-generic` branch. Co-Authored-By: Antonio Scandurra Co-Authored-By: Nathan Sobo --- crates/editor/src/display_map/fold_map.rs | 6 +- crates/language/src/excerpt_list.rs | 467 ++++++++++++++++++---- crates/text/src/text.rs | 66 +-- 3 files changed, 432 insertions(+), 107 deletions(-) diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index eea8ebe4f5144ba6ebf84ec56ad67ce2501e73ad..bdbe002dc116ba433bcb39e0bc6d321ca35a62e9 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -53,7 +53,7 @@ impl FoldPoint { let overshoot = self.0 - cursor.start().0 .0; snapshot .buffer_snapshot - .to_offset(cursor.start().1 + overshoot) + .point_to_offset(cursor.start().1 + overshoot) } pub fn to_offset(&self, snapshot: &FoldSnapshot) -> FoldOffset { @@ -68,7 +68,7 @@ impl FoldPoint { assert!(transform.output_text.is_none()); let end_buffer_offset = snapshot .buffer_snapshot - .to_offset(cursor.start().1.input.lines + overshoot); + .point_to_offset(cursor.start().1.input.lines + overshoot); offset += end_buffer_offset - cursor.start().1.input.bytes; } FoldOffset(offset) @@ -1014,7 +1014,7 @@ impl FoldOffset { Point::new(0, (self.0 - cursor.start().0 .0) as u32) } else { let buffer_offset = cursor.start().1.input.bytes + self.0 - cursor.start().0 .0; - let buffer_point = snapshot.buffer_snapshot.to_point(buffer_offset); + let buffer_point = snapshot.buffer_snapshot.offset_to_point(buffer_offset); buffer_point - cursor.start().1.input.lines }; FoldPoint(cursor.start().1.output.lines + overshoot) diff --git a/crates/language/src/excerpt_list.rs b/crates/language/src/excerpt_list.rs index 1791031de9c111cf1901656bf3b20020339f041d..0e6a0e2356090b3ffd3de6d57ee32a05bd852c6c 100644 --- a/crates/language/src/excerpt_list.rs +++ b/crates/language/src/excerpt_list.rs @@ -1,4 +1,4 @@ -use crate::{buffer, Buffer, Chunk}; +use crate::buffer::{self, Buffer, Chunk, ToOffset as _, ToPoint as _}; use collections::HashMap; use gpui::{AppContext, Entity, ModelContext, ModelHandle}; use parking_lot::Mutex; @@ -6,26 +6,31 @@ use smallvec::{smallvec, SmallVec}; use std::{cmp, iter, ops::Range}; use sum_tree::{Bias, Cursor, SumTree}; use text::{ + rope::TextDimension, subscription::{Subscription, Topic}, - Anchor, AnchorRangeExt, Edit, Point, TextSummary, + Anchor, AnchorRangeExt, Edit, Point, PointUtf16, TextSummary, }; use theme::SyntaxTheme; const NEWLINES: &'static [u8] = &[b'\n'; u8::MAX as usize]; -pub trait ToOffset { - fn to_offset<'a>(&self, content: &Snapshot) -> usize; -} - pub type ExcerptId = Location; #[derive(Default)] pub struct ExcerptList { - snapshot: Mutex, + snapshot: Mutex, buffers: HashMap, subscriptions: Topic, } +pub trait ToOffset { + fn to_offset<'a>(&self, snapshot: &ExcerptListSnapshot) -> usize; +} + +pub trait ToPoint { + fn to_point<'a>(&self, snapshot: &ExcerptListSnapshot) -> Point; +} + #[derive(Debug)] struct BufferState { buffer: ModelHandle, @@ -34,7 +39,7 @@ struct BufferState { } #[derive(Clone, Default)] -pub struct Snapshot { +pub struct ExcerptListSnapshot { excerpts: SumTree, } @@ -75,7 +80,7 @@ impl ExcerptList { Self::default() } - pub fn snapshot(&self, cx: &AppContext) -> Snapshot { + pub fn snapshot(&self, cx: &AppContext) -> ExcerptListSnapshot { self.sync(cx); self.snapshot.lock().clone() } @@ -191,7 +196,7 @@ impl Entity for ExcerptList { type Event = (); } -impl Snapshot { +impl ExcerptListSnapshot { pub fn text(&self) -> String { self.chunks(0..self.len(), None) .map(|chunk| chunk.text) @@ -213,76 +218,74 @@ impl Snapshot { let mut cursor = self.excerpts.cursor::(); cursor.seek(&offset, Bias::Right, &()); if let Some(excerpt) = cursor.item() { - let overshoot = offset - cursor.start(); - let header_height = excerpt.header_height as usize; - if overshoot < header_height { + let start_after_header = *cursor.start() + excerpt.header_height as usize; + if offset < start_after_header { *cursor.start() } else { - let excerpt_start = - text::ToOffset::to_offset(&excerpt.range.start, &excerpt.buffer); - let buffer_offset = excerpt.buffer.clip_offset( - excerpt_start + (offset - header_height - cursor.start()), - bias, - ); + let excerpt_start = excerpt.range.start.to_offset(&excerpt.buffer); + let buffer_offset = excerpt + .buffer + .clip_offset(excerpt_start + (offset - start_after_header), bias); let offset_in_excerpt = if buffer_offset > excerpt_start { buffer_offset - excerpt_start } else { 0 }; - cursor.start() + header_height + offset_in_excerpt + start_after_header + offset_in_excerpt } } else { self.excerpts.summary().text.bytes } } - pub fn to_point(&self, offset: usize) -> Point { - let mut cursor = self.excerpts.cursor::<(usize, Point)>(); - cursor.seek(&offset, Bias::Right, &()); + pub fn clip_point(&self, point: Point, bias: Bias) -> Point { + let mut cursor = self.excerpts.cursor::(); + cursor.seek(&point, Bias::Right, &()); if let Some(excerpt) = cursor.item() { - let overshoot = offset - cursor.start().0; - let header_height = excerpt.header_height as usize; - if overshoot < header_height { - cursor.start().1 + let start_after_header = *cursor.start() + Point::new(excerpt.header_height as u32, 0); + if point < start_after_header { + *cursor.start() } else { - let excerpt_start_offset = - text::ToOffset::to_offset(&excerpt.range.start, &excerpt.buffer); - let excerpt_start_point = - text::ToPoint::to_point(&excerpt.range.start, &excerpt.buffer); + let excerpt_start = excerpt.range.start.to_point(&excerpt.buffer); let buffer_point = excerpt .buffer - .to_point(excerpt_start_offset + (offset - header_height - cursor.start().0)); - cursor.start().1 - + Point::new(header_height as u32, 0) - + (buffer_point - excerpt_start_point) + .clip_point(excerpt_start + (point - start_after_header), bias); + let point_in_excerpt = if buffer_point > excerpt_start { + buffer_point - excerpt_start + } else { + Point::zero() + }; + start_after_header + point_in_excerpt } } else { self.excerpts.summary().text.lines } } - pub fn to_offset(&self, point: Point) -> usize { - let mut cursor = self.excerpts.cursor::<(Point, usize)>(); + pub fn clip_point_utf16(&self, point: PointUtf16, bias: Bias) -> PointUtf16 { + let mut cursor = self.excerpts.cursor::(); cursor.seek(&point, Bias::Right, &()); if let Some(excerpt) = cursor.item() { - let overshoot = point - cursor.start().0; - let header_height = Point::new(excerpt.header_height as u32, 0); - if overshoot < header_height { - cursor.start().1 + let start_after_header = + *cursor.start() + PointUtf16::new(excerpt.header_height as u32, 0); + if point < start_after_header { + *cursor.start() } else { - let excerpt_start_offset = - text::ToOffset::to_offset(&excerpt.range.start, &excerpt.buffer); - let excerpt_start_point = - text::ToPoint::to_point(&excerpt.range.start, &excerpt.buffer); - let buffer_offset = excerpt + let excerpt_start = excerpt .buffer - .to_offset(excerpt_start_point + (point - header_height - cursor.start().0)); - cursor.start().1 - + excerpt.header_height as usize - + (buffer_offset - excerpt_start_offset) + .offset_to_point_utf16(excerpt.range.start.to_offset(&excerpt.buffer)); + let buffer_point = excerpt + .buffer + .clip_point_utf16(excerpt_start + (point - start_after_header), bias); + let point_in_excerpt = if buffer_point > excerpt_start { + buffer_point - excerpt_start + } else { + PointUtf16::new(0, 0) + }; + start_after_header + point_in_excerpt } } else { - self.excerpts.summary().text.bytes + self.excerpts.summary().text.lines_utf16 } } @@ -334,6 +337,200 @@ impl Snapshot { theme, } } + + pub fn offset_to_point(&self, offset: usize) -> Point { + let mut cursor = self.excerpts.cursor::<(usize, Point)>(); + cursor.seek(&offset, Bias::Right, &()); + if let Some(excerpt) = cursor.item() { + let (start_offset, start_point) = cursor.start(); + let overshoot = offset - start_offset; + let header_height = excerpt.header_height as usize; + if overshoot < header_height { + *start_point + } else { + let excerpt_start_offset = excerpt.range.start.to_offset(&excerpt.buffer); + let excerpt_start_point = excerpt.range.start.to_point(&excerpt.buffer); + let buffer_point = excerpt + .buffer + .offset_to_point(excerpt_start_offset + (overshoot - header_height)); + *start_point + + Point::new(header_height as u32, 0) + + (buffer_point - excerpt_start_point) + } + } else { + self.excerpts.summary().text.lines + } + } + + pub fn point_to_offset(&self, point: Point) -> usize { + let mut cursor = self.excerpts.cursor::<(Point, usize)>(); + cursor.seek(&point, Bias::Right, &()); + if let Some(excerpt) = cursor.item() { + let (start_point, start_offset) = cursor.start(); + let overshoot = point - start_point; + let header_height = Point::new(excerpt.header_height as u32, 0); + if overshoot < header_height { + *start_offset + } else { + let excerpt_start_offset = excerpt.range.start.to_offset(&excerpt.buffer); + let excerpt_start_point = excerpt.range.start.to_point(&excerpt.buffer); + let buffer_offset = excerpt + .buffer + .point_to_offset(excerpt_start_point + (overshoot - header_height)); + *start_offset + excerpt.header_height as usize + buffer_offset + - excerpt_start_offset + } + } else { + self.excerpts.summary().text.bytes + } + } + + pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize { + let mut cursor = self.excerpts.cursor::<(PointUtf16, usize)>(); + cursor.seek(&point, Bias::Right, &()); + if let Some(excerpt) = cursor.item() { + let (start_point, start_offset) = cursor.start(); + let overshoot = point - start_point; + let header_height = PointUtf16::new(excerpt.header_height as u32, 0); + if overshoot < header_height { + *start_offset + } else { + let excerpt_start_offset = excerpt.range.start.to_offset(&excerpt.buffer); + let excerpt_start_point = excerpt + .buffer + .offset_to_point_utf16(excerpt.range.start.to_offset(&excerpt.buffer)); + let buffer_offset = excerpt + .buffer + .point_utf16_to_offset(excerpt_start_point + (overshoot - header_height)); + *start_offset + + excerpt.header_height as usize + + (buffer_offset - excerpt_start_offset) + } + } else { + self.excerpts.summary().text.bytes + } + } + + pub fn line_len(&self, row: u32) -> u32 { + let mut cursor = self.excerpts.cursor::(); + cursor.seek(&Point::new(row, 0), Bias::Right, &()); + if let Some(excerpt) = cursor.item() { + let overshoot = row - cursor.start().row; + let header_height = excerpt.header_height as u32; + if overshoot < header_height { + 0 + } else { + let excerpt_start = excerpt.range.start.to_point(&excerpt.buffer); + let excerpt_end = excerpt.range.end.to_point(&excerpt.buffer); + let buffer_row = excerpt_start.row + overshoot - header_height; + let mut len = excerpt.buffer.line_len(buffer_row); + if buffer_row == excerpt_end.row { + len = excerpt_end.column; + } + if buffer_row == excerpt_start.row { + len -= excerpt_start.column + } + len + } + } else { + 0 + } + } + + pub fn max_point(&self) -> Point { + self.text_summary().lines + } + + pub fn text_summary(&self) -> TextSummary { + self.excerpts.summary().text + } + + pub fn text_summary_for_range<'a, D, O>(&'a self, range: Range) -> D + where + D: TextDimension, + O: ToOffset, + { + let mut summary = D::default(); + let mut range = range.start.to_offset(self)..range.end.to_offset(self); + let mut cursor = self.excerpts.cursor::(); + cursor.seek(&range.start, Bias::Right, &()); + if let Some(excerpt) = cursor.item() { + let start_after_header = cursor.start() + excerpt.header_height as usize; + if range.start < start_after_header { + let header_len = cmp::min(range.end, start_after_header) - range.start; + summary.add_assign(&D::from_text_summary(&TextSummary { + bytes: header_len, + lines: Point::new(header_len as u32, 0), + lines_utf16: PointUtf16::new(header_len as u32, 0), + first_line_chars: 0, + last_line_chars: 0, + longest_row: 0, + longest_row_chars: 0, + })); + range.start = start_after_header; + range.end = cmp::max(range.start, range.end); + } + + let end_before_newline = cursor.end(&()) - 1; + let excerpt_start = excerpt.range.start.to_offset(&excerpt.buffer); + let start_in_excerpt = excerpt_start + (range.start - start_after_header); + let end_in_excerpt = + excerpt_start + (cmp::min(end_before_newline, range.end) - start_after_header); + summary.add_assign( + &excerpt + .buffer + .text_summary_for_range(start_in_excerpt..end_in_excerpt), + ); + + if range.end > end_before_newline { + summary.add_assign(&D::from_text_summary(&TextSummary { + bytes: 1, + lines: Point::new(1 as u32, 0), + lines_utf16: PointUtf16::new(1 as u32, 0), + first_line_chars: 0, + last_line_chars: 0, + longest_row: 0, + longest_row_chars: 0, + })); + } + + cursor.next(&()); + } + + if range.end > *cursor.start() { + summary.add_assign(&D::from_text_summary(&cursor.summary::<_, TextSummary>( + &range.end, + Bias::Right, + &(), + ))); + if let Some(excerpt) = cursor.item() { + let start_after_header = cursor.start() + excerpt.header_height as usize; + let header_len = + cmp::min(range.end - cursor.start(), excerpt.header_height as usize); + summary.add_assign(&D::from_text_summary(&TextSummary { + bytes: header_len, + lines: Point::new(header_len as u32, 0), + lines_utf16: PointUtf16::new(header_len as u32, 0), + first_line_chars: 0, + last_line_chars: 0, + longest_row: 0, + longest_row_chars: 0, + })); + range.end = cmp::max(start_after_header, range.end); + + let excerpt_start = excerpt.range.start.to_offset(&excerpt.buffer); + let end_in_excerpt = excerpt_start + (range.end - start_after_header); + summary.add_assign( + &excerpt + .buffer + .text_summary_for_range(excerpt_start..end_in_excerpt), + ); + cursor.next(&()); + } + } + + summary + } } impl Excerpt { @@ -343,7 +540,8 @@ impl Excerpt { range: Range, header_height: u8, ) -> Self { - let mut text_summary = buffer.text_summary_for_range::(range.clone()); + let mut text_summary = + buffer.text_summary_for_range::(range.to_offset(&buffer)); if header_height > 0 { text_summary.first_line_chars = 0; text_summary.lines.row += header_height as u32; @@ -389,6 +587,12 @@ impl sum_tree::Summary for EntrySummary { } } +impl<'a> sum_tree::Dimension<'a, EntrySummary> for TextSummary { + fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) { + *self += &summary.text; + } +} + impl<'a> sum_tree::Dimension<'a, EntrySummary> for usize { fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) { *self += summary.text.bytes; @@ -401,6 +605,12 @@ impl<'a> sum_tree::Dimension<'a, EntrySummary> for Point { } } +impl<'a> sum_tree::Dimension<'a, EntrySummary> for PointUtf16 { + fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) { + *self += summary.text.lines_utf16 + } +} + impl<'a> sum_tree::Dimension<'a, EntrySummary> for Location { fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) { debug_assert!(summary.excerpt_id > *self); @@ -462,15 +672,34 @@ impl<'a> Iterator for Chunks<'a> { } } +impl ToOffset for Point { + fn to_offset<'a>(&self, snapshot: &ExcerptListSnapshot) -> usize { + snapshot.point_to_offset(*self) + } +} + +impl ToOffset for PointUtf16 { + fn to_offset<'a>(&self, snapshot: &ExcerptListSnapshot) -> usize { + snapshot.point_utf16_to_offset(*self) + } +} + impl ToOffset for usize { - fn to_offset<'a>(&self, _: &Snapshot) -> usize { + fn to_offset<'a>(&self, snapshot: &ExcerptListSnapshot) -> usize { + assert!(*self <= snapshot.len(), "offset is out of range"); *self } } -impl ToOffset for Point { - fn to_offset<'a>(&self, snapshot: &Snapshot) -> usize { - snapshot.to_offset(*self) +impl ToPoint for usize { + fn to_point<'a>(&self, snapshot: &ExcerptListSnapshot) -> Point { + snapshot.offset_to_point(*self) + } +} + +impl ToPoint for Point { + fn to_point<'a>(&self, _: &ExcerptListSnapshot) -> Point { + *self } } @@ -507,7 +736,7 @@ impl Location { #[cfg(test)] mod tests { use super::*; - use crate::Buffer; + use crate::buffer::Buffer; use gpui::MutableAppContext; use rand::prelude::*; use std::{env, mem}; @@ -685,33 +914,48 @@ mod tests { let snapshot = list.read(cx).snapshot(cx); + let mut excerpt_starts = Vec::new(); let mut expected_text = String::new(); for (buffer, range, header_height) in &expected_excerpts { - let buffer_id = buffer.id(); let buffer = buffer.read(cx); let buffer_range = range.to_offset(buffer); - let buffer_start_point = buffer.to_point(buffer_range.start); for _ in 0..*header_height { expected_text.push('\n'); } - let excerpt_start = TextSummary::from(expected_text.as_str()); + excerpt_starts.push(TextSummary::from(expected_text.as_str())); expected_text.extend(buffer.text_for_range(buffer_range.clone())); expected_text.push('\n'); + } + + assert_eq!(snapshot.text(), expected_text); - for buffer_offset in buffer_range.clone() { - let offset = excerpt_start.bytes + (buffer_offset - buffer_range.start); + let mut excerpt_starts = excerpt_starts.into_iter(); + for (buffer, range, _) in &expected_excerpts { + let buffer_id = buffer.id(); + let buffer = buffer.read(cx); + let buffer_range = range.to_offset(buffer); + let buffer_start_point = buffer.offset_to_point(buffer_range.start); + let buffer_start_point_utf16 = + buffer.text_summary_for_range::(0..buffer_range.start); + + let excerpt_start = excerpt_starts.next().unwrap(); + let mut offset = excerpt_start.bytes; + let mut buffer_offset = buffer_range.start; + let mut point = excerpt_start.lines; + let mut buffer_point = buffer_start_point; + let mut point_utf16 = excerpt_start.lines_utf16; + let mut buffer_point_utf16 = buffer_start_point_utf16; + for byte in buffer.bytes_in_range(buffer_range.clone()).flatten() { let left_offset = snapshot.clip_offset(offset, Bias::Left); let right_offset = snapshot.clip_offset(offset, Bias::Right); let buffer_left_offset = buffer.clip_offset(buffer_offset, Bias::Left); let buffer_right_offset = buffer.clip_offset(buffer_offset, Bias::Right); - let left_point = snapshot.to_point(left_offset); - assert_eq!( left_offset, excerpt_start.bytes + (buffer_left_offset - buffer_range.start), - "clip_offset({}, Left). buffer: {}, buffer offset: {}", + "clip_offset({:?}, Left). buffer: {:?}, buffer offset: {:?}", offset, buffer_id, buffer_offset, @@ -719,30 +963,95 @@ mod tests { assert_eq!( right_offset, excerpt_start.bytes + (buffer_right_offset - buffer_range.start), - "clip_offset({}, Right). buffer: {}, buffer offset: {}", + "clip_offset({:?}, Right). buffer: {:?}, buffer offset: {:?}", offset, buffer_id, buffer_offset, ); + + let left_point = snapshot.clip_point(point, Bias::Left); + let right_point = snapshot.clip_point(point, Bias::Right); + let buffer_left_point = buffer.clip_point(buffer_point, Bias::Left); + let buffer_right_point = buffer.clip_point(buffer_point, Bias::Right); assert_eq!( left_point, - excerpt_start.lines - + (buffer.to_point(buffer_left_offset) - buffer_start_point), - "to_point({}). buffer: {}, buffer offset: {}", - offset, + excerpt_start.lines + (buffer_left_point - buffer_start_point), + "clip_point({:?}, Left). buffer: {:?}, buffer point: {:?}", + point, buffer_id, - buffer_offset, + buffer_point, ); assert_eq!( - snapshot.to_offset(left_point), + right_point, + excerpt_start.lines + (buffer_right_point - buffer_start_point), + "clip_point({:?}, Right). buffer: {:?}, buffer point: {:?}", + point, + buffer_id, + buffer_point, + ); + + let left_point_utf16 = snapshot.clip_point_utf16(point_utf16, Bias::Left); + let right_point_utf16 = snapshot.clip_point_utf16(point_utf16, Bias::Right); + let buffer_left_point_utf16 = + buffer.clip_point_utf16(buffer_point_utf16, Bias::Left); + let buffer_right_point_utf16 = + buffer.clip_point_utf16(buffer_point_utf16, Bias::Right); + assert_eq!( + left_point_utf16, + excerpt_start.lines_utf16 + + (buffer_left_point_utf16 - buffer_start_point_utf16), + "clip_point_utf16({:?}, Left). buffer: {:?}, buffer point_utf16: {:?}", + point_utf16, + buffer_id, + buffer_point_utf16, + ); + assert_eq!( + right_point_utf16, + excerpt_start.lines_utf16 + + (buffer_right_point_utf16 - buffer_start_point_utf16), + "clip_point_utf16({:?}, Right). buffer: {:?}, buffer point_utf16: {:?}", + point_utf16, + buffer_id, + buffer_point_utf16, + ); + + assert_eq!( + snapshot.point_to_offset(left_point), left_offset, - "to_offset({:?})", + "point_to_offset({:?})", left_point, - ) + ); + assert_eq!( + snapshot.offset_to_point(left_offset), + left_point, + "offset_to_point({:?})", + left_offset, + ); + + offset += 1; + buffer_offset += 1; + if *byte == b'\n' { + point += Point::new(1, 0); + point_utf16 += PointUtf16::new(1, 0); + buffer_point += Point::new(1, 0); + buffer_point_utf16 += PointUtf16::new(1, 0); + } else { + point += Point::new(0, 1); + point_utf16 += PointUtf16::new(0, 1); + buffer_point += Point::new(0, 1); + buffer_point_utf16 += PointUtf16::new(0, 1); + } } } - assert_eq!(snapshot.text(), expected_text); + for (row, line) in expected_text.split('\n').enumerate() { + assert_eq!( + snapshot.line_len(row as u32), + line.len() as u32, + "line_len({}).", + row + ); + } for _ in 0..10 { let end_ix = snapshot.clip_offset(rng.gen_range(0..=snapshot.len()), Bias::Right); @@ -756,6 +1065,14 @@ mod tests { "incorrect text for range {:?}", start_ix..end_ix ); + + let expected_summary = TextSummary::from(&expected_text[start_ix..end_ix]); + assert_eq!( + snapshot.text_summary_for_range::(start_ix..end_ix), + expected_summary, + "incorrect summary for range {:?}", + start_ix..end_ix + ); } } diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 5fcd40ad89e0a08c9fd6d5c029f6b0bb6e47975c..5a4ee1ad93393fa9fa3315a92c305729068a7565 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -1530,14 +1530,22 @@ impl BufferSnapshot { self.visible_text.max_point() } - pub fn to_offset(&self, point: Point) -> usize { + pub fn point_to_offset(&self, point: Point) -> usize { self.visible_text.point_to_offset(point) } - pub fn to_point(&self, offset: usize) -> Point { + pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize { + self.visible_text.point_utf16_to_offset(point) + } + + pub fn offset_to_point(&self, offset: usize) -> Point { self.visible_text.offset_to_point(offset) } + pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 { + self.visible_text.offset_to_point_utf16(offset) + } + pub fn version(&self) -> &clock::Global { &self.version } @@ -2252,45 +2260,45 @@ impl Operation { } pub trait ToOffset { - fn to_offset<'a>(&self, content: &BufferSnapshot) -> usize; + fn to_offset<'a>(&self, snapshot: &BufferSnapshot) -> usize; - fn to_full_offset<'a>(&self, content: &BufferSnapshot, bias: Bias) -> FullOffset { - let offset = self.to_offset(&content); - let mut cursor = content.fragments.cursor::(); + fn to_full_offset<'a>(&self, snapshot: &BufferSnapshot, bias: Bias) -> FullOffset { + let offset = self.to_offset(&snapshot); + let mut cursor = snapshot.fragments.cursor::(); cursor.seek(&offset, bias, &None); FullOffset(offset + cursor.start().deleted) } } impl ToOffset for Point { - fn to_offset<'a>(&self, content: &BufferSnapshot) -> usize { - content.visible_text.point_to_offset(*self) + fn to_offset<'a>(&self, snapshot: &BufferSnapshot) -> usize { + snapshot.point_to_offset(*self) } } impl ToOffset for PointUtf16 { - fn to_offset<'a>(&self, content: &BufferSnapshot) -> usize { - content.visible_text.point_utf16_to_offset(*self) + fn to_offset<'a>(&self, snapshot: &BufferSnapshot) -> usize { + snapshot.point_utf16_to_offset(*self) } } impl ToOffset for usize { - fn to_offset<'a>(&self, content: &BufferSnapshot) -> usize { - assert!(*self <= content.len(), "offset is out of range"); + fn to_offset<'a>(&self, snapshot: &BufferSnapshot) -> usize { + assert!(*self <= snapshot.len(), "offset is out of range"); *self } } impl ToOffset for Anchor { - fn to_offset<'a>(&self, content: &BufferSnapshot) -> usize { - content.summary_for_anchor(self) + fn to_offset<'a>(&self, snapshot: &BufferSnapshot) -> usize { + snapshot.summary_for_anchor(self) } - fn to_full_offset<'a>(&self, content: &BufferSnapshot, bias: Bias) -> FullOffset { - if content.version == self.version { + fn to_full_offset<'a>(&self, snapshot: &BufferSnapshot, bias: Bias) -> FullOffset { + if snapshot.version == self.version { self.full_offset } else { - let mut cursor = content + let mut cursor = snapshot .fragments .cursor::<(VersionedFullOffset, FragmentTextSummary)>(); cursor.seek( @@ -2310,24 +2318,24 @@ impl ToOffset for Anchor { } impl<'a> ToOffset for &'a Anchor { - fn to_offset(&self, content: &BufferSnapshot) -> usize { - content.summary_for_anchor(self) + fn to_offset(&self, snapshot: &BufferSnapshot) -> usize { + snapshot.summary_for_anchor(self) } } pub trait ToPoint { - fn to_point<'a>(&self, content: &BufferSnapshot) -> Point; + fn to_point<'a>(&self, snapshot: &BufferSnapshot) -> Point; } impl ToPoint for Anchor { - fn to_point<'a>(&self, content: &BufferSnapshot) -> Point { - content.summary_for_anchor(self) + fn to_point<'a>(&self, snapshot: &BufferSnapshot) -> Point { + snapshot.summary_for_anchor(self) } } impl ToPoint for usize { - fn to_point<'a>(&self, content: &BufferSnapshot) -> Point { - content.visible_text.offset_to_point(*self) + fn to_point<'a>(&self, snapshot: &BufferSnapshot) -> Point { + snapshot.offset_to_point(*self) } } @@ -2338,17 +2346,17 @@ impl ToPoint for Point { } pub trait FromAnchor { - fn from_anchor(anchor: &Anchor, content: &BufferSnapshot) -> Self; + fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self; } impl FromAnchor for Point { - fn from_anchor(anchor: &Anchor, content: &BufferSnapshot) -> Self { - anchor.to_point(content) + fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self { + anchor.to_point(snapshot) } } impl FromAnchor for usize { - fn from_anchor(anchor: &Anchor, content: &BufferSnapshot) -> Self { - anchor.to_offset(content) + fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self { + anchor.to_offset(snapshot) } } From a7634ccd5f50fe3fcf2a66819d31673443384d3e Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 8 Dec 2021 10:07:15 -0800 Subject: [PATCH 022/196] Rename ExcerptList to MultiBuffer Co-Authored-By: Nathan Sobo --- crates/language/src/language.rs | 2 +- .../src/{excerpt_list.rs => multi_buffer.rs} | 74 +++++++++---------- 2 files changed, 38 insertions(+), 38 deletions(-) rename crates/language/src/{excerpt_list.rs => multi_buffer.rs} (95%) diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index de8d5fa8939315613e88178f879d51e89578078b..16dbd1c140e450f1def33665ee3ac9ba0c3af380 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -1,6 +1,6 @@ mod buffer; -mod excerpt_list; mod highlight_map; +mod multi_buffer; pub mod proto; #[cfg(test)] mod tests; diff --git a/crates/language/src/excerpt_list.rs b/crates/language/src/multi_buffer.rs similarity index 95% rename from crates/language/src/excerpt_list.rs rename to crates/language/src/multi_buffer.rs index 0e6a0e2356090b3ffd3de6d57ee32a05bd852c6c..7f6b599715f99c9a4ce12417cdff568816384d8f 100644 --- a/crates/language/src/excerpt_list.rs +++ b/crates/language/src/multi_buffer.rs @@ -17,18 +17,18 @@ const NEWLINES: &'static [u8] = &[b'\n'; u8::MAX as usize]; pub type ExcerptId = Location; #[derive(Default)] -pub struct ExcerptList { - snapshot: Mutex, +pub struct MultiBuffer { + snapshot: Mutex, buffers: HashMap, subscriptions: Topic, } pub trait ToOffset { - fn to_offset<'a>(&self, snapshot: &ExcerptListSnapshot) -> usize; + fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize; } pub trait ToPoint { - fn to_point<'a>(&self, snapshot: &ExcerptListSnapshot) -> Point; + fn to_point<'a>(&self, snapshot: &MultiBufferSnapshot) -> Point; } #[derive(Debug)] @@ -39,7 +39,7 @@ struct BufferState { } #[derive(Clone, Default)] -pub struct ExcerptListSnapshot { +pub struct MultiBufferSnapshot { excerpts: SumTree, } @@ -59,7 +59,7 @@ struct Excerpt { } #[derive(Clone, Debug, Default)] -struct EntrySummary { +struct ExcerptSummary { excerpt_id: ExcerptId, text: TextSummary, } @@ -71,16 +71,16 @@ pub struct Chunks<'a> { range: Range, cursor: Cursor<'a, Excerpt, usize>, header_height: u8, - entry_chunks: Option>, + excerpt_chunks: Option>, theme: Option<&'a SyntaxTheme>, } -impl ExcerptList { +impl MultiBuffer { pub fn new() -> Self { Self::default() } - pub fn snapshot(&self, cx: &AppContext) -> ExcerptListSnapshot { + pub fn snapshot(&self, cx: &AppContext) -> MultiBufferSnapshot { self.sync(cx); self.snapshot.lock().clone() } @@ -192,11 +192,11 @@ impl ExcerptList { } } -impl Entity for ExcerptList { +impl Entity for MultiBuffer { type Event = (); } -impl ExcerptListSnapshot { +impl MultiBufferSnapshot { pub fn text(&self) -> String { self.chunks(0..self.len(), None) .map(|chunk| chunk.text) @@ -299,7 +299,7 @@ impl ExcerptListSnapshot { cursor.seek(&range.start, Bias::Right, &()); let mut header_height: u8 = 0; - let entry_chunks = cursor.item().map(|excerpt| { + let excerpt_chunks = cursor.item().map(|excerpt| { let buffer_range = excerpt.range.to_offset(&excerpt.buffer); header_height = excerpt.header_height; @@ -333,7 +333,7 @@ impl ExcerptListSnapshot { range, cursor, header_height, - entry_chunks, + excerpt_chunks, theme, } } @@ -567,17 +567,17 @@ impl Excerpt { } impl sum_tree::Item for Excerpt { - type Summary = EntrySummary; + type Summary = ExcerptSummary; fn summary(&self) -> Self::Summary { - EntrySummary { + ExcerptSummary { excerpt_id: self.id.clone(), text: self.text_summary.clone(), } } } -impl sum_tree::Summary for EntrySummary { +impl sum_tree::Summary for ExcerptSummary { type Context = (); fn add_summary(&mut self, summary: &Self, _: &()) { @@ -587,32 +587,32 @@ impl sum_tree::Summary for EntrySummary { } } -impl<'a> sum_tree::Dimension<'a, EntrySummary> for TextSummary { - fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) { +impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for TextSummary { + fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) { *self += &summary.text; } } -impl<'a> sum_tree::Dimension<'a, EntrySummary> for usize { - fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) { +impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for usize { + fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) { *self += summary.text.bytes; } } -impl<'a> sum_tree::Dimension<'a, EntrySummary> for Point { - fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) { +impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Point { + fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) { *self += summary.text.lines; } } -impl<'a> sum_tree::Dimension<'a, EntrySummary> for PointUtf16 { - fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) { +impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for PointUtf16 { + fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) { *self += summary.text.lines_utf16 } } -impl<'a> sum_tree::Dimension<'a, EntrySummary> for Location { - fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) { +impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Location { + fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) { debug_assert!(summary.excerpt_id > *self); *self = summary.excerpt_id.clone(); } @@ -634,11 +634,11 @@ impl<'a> Iterator for Chunks<'a> { return Some(chunk); } - if let Some(entry_chunks) = self.entry_chunks.as_mut() { - if let Some(chunk) = entry_chunks.next() { + if let Some(excerpt_chunks) = self.excerpt_chunks.as_mut() { + if let Some(chunk) = excerpt_chunks.next() { return Some(chunk); } - self.entry_chunks.take(); + self.excerpt_chunks.take(); if self.cursor.end(&()) <= self.range.end { return Some(Chunk { text: "\n", @@ -663,7 +663,7 @@ impl<'a> Iterator for Chunks<'a> { ); self.header_height = excerpt.header_height; - self.entry_chunks = Some( + self.excerpt_chunks = Some( excerpt .buffer .chunks(buffer_range.start..buffer_end, self.theme), @@ -673,32 +673,32 @@ impl<'a> Iterator for Chunks<'a> { } impl ToOffset for Point { - fn to_offset<'a>(&self, snapshot: &ExcerptListSnapshot) -> usize { + fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize { snapshot.point_to_offset(*self) } } impl ToOffset for PointUtf16 { - fn to_offset<'a>(&self, snapshot: &ExcerptListSnapshot) -> usize { + fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize { snapshot.point_utf16_to_offset(*self) } } impl ToOffset for usize { - fn to_offset<'a>(&self, snapshot: &ExcerptListSnapshot) -> usize { + fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize { assert!(*self <= snapshot.len(), "offset is out of range"); *self } } impl ToPoint for usize { - fn to_point<'a>(&self, snapshot: &ExcerptListSnapshot) -> Point { + fn to_point<'a>(&self, snapshot: &MultiBufferSnapshot) -> Point { snapshot.offset_to_point(*self) } } impl ToPoint for Point { - fn to_point<'a>(&self, _: &ExcerptListSnapshot) -> Point { + fn to_point<'a>(&self, _: &MultiBufferSnapshot) -> Point { *self } } @@ -748,7 +748,7 @@ mod tests { let buffer_1 = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'g'), cx)); - let list = cx.add_model(|_| ExcerptList::new()); + let list = cx.add_model(|_| MultiBuffer::new()); let subscription = list.update(cx, |list, cx| { let subscription = list.subscribe(); @@ -857,7 +857,7 @@ mod tests { .unwrap_or(10); let mut buffers: Vec> = Vec::new(); - let list = cx.add_model(|_| ExcerptList::new()); + let list = cx.add_model(|_| MultiBuffer::new()); let mut excerpt_ids = Vec::new(); let mut expected_excerpts = Vec::new(); let mut old_versions = Vec::new(); From daedf179b231032f9d7b3753180dbbfc27174b01 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 8 Dec 2021 12:56:09 -0800 Subject: [PATCH 023/196] Implement Anchor, AnchorRangeMap, SelectionSet in multi_buffer Co-Authored-By: Nathan Sobo --- crates/language/src/multi_buffer.rs | 140 +++++---- crates/language/src/multi_buffer/anchor.rs | 280 ++++++++++++++++++ crates/language/src/multi_buffer/location.rs | 76 +++++ crates/language/src/multi_buffer/selection.rs | 91 ++++++ crates/text/src/anchor.rs | 7 +- crates/text/src/rope.rs | 9 + 6 files changed, 525 insertions(+), 78 deletions(-) create mode 100644 crates/language/src/multi_buffer/anchor.rs create mode 100644 crates/language/src/multi_buffer/location.rs create mode 100644 crates/language/src/multi_buffer/selection.rs diff --git a/crates/language/src/multi_buffer.rs b/crates/language/src/multi_buffer.rs index 7f6b599715f99c9a4ce12417cdff568816384d8f..6e545e3fd25f84c3a72f4008e388231f403721cd 100644 --- a/crates/language/src/multi_buffer.rs +++ b/crates/language/src/multi_buffer.rs @@ -1,21 +1,26 @@ -use crate::buffer::{self, Buffer, Chunk, ToOffset as _, ToPoint as _}; +mod anchor; +mod location; +mod selection; + +use self::location::*; +use crate::{ + buffer::{self, Buffer, Chunk, ToOffset as _, ToPoint as _}, + BufferSnapshot, +}; use collections::HashMap; use gpui::{AppContext, Entity, ModelContext, ModelHandle}; use parking_lot::Mutex; -use smallvec::{smallvec, SmallVec}; -use std::{cmp, iter, ops::Range}; +use std::{cmp, ops::Range}; use sum_tree::{Bias, Cursor, SumTree}; use text::{ rope::TextDimension, subscription::{Subscription, Topic}, - Anchor, AnchorRangeExt, Edit, Point, PointUtf16, TextSummary, + AnchorRangeExt, Edit, Point, PointUtf16, TextSummary, }; use theme::SyntaxTheme; const NEWLINES: &'static [u8] = &[b'\n'; u8::MAX as usize]; -pub type ExcerptId = Location; - #[derive(Default)] pub struct MultiBuffer { snapshot: Mutex, @@ -53,7 +58,7 @@ pub struct ExcerptProperties<'a, T> { struct Excerpt { id: ExcerptId, buffer: buffer::BufferSnapshot, - range: Range, + range: Range, text_summary: TextSummary, header_height: u8, } @@ -64,9 +69,6 @@ struct ExcerptSummary { text: TextSummary, } -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] -pub struct Location(SmallVec<[u8; 4]>); - pub struct Chunks<'a> { range: Range, cursor: Cursor<'a, Excerpt, usize>, @@ -531,13 +533,41 @@ impl MultiBufferSnapshot { summary } + + fn resolve_excerpt<'a, D: TextDimension>( + &'a self, + excerpt_id: &ExcerptId, + ) -> Option<(D, &'a BufferSnapshot)> { + let mut cursor = self.excerpts.cursor::<(ExcerptId, TextSummary)>(); + cursor.seek(excerpt_id, Bias::Left, &()); + if let Some(excerpt) = cursor.item() { + if cursor.start().0 == *excerpt_id { + return Some((D::from_text_summary(&cursor.start().1), &excerpt.buffer)); + } + } + None + } + + fn buffer_snapshot_for_excerpt<'a>( + &'a self, + excerpt_id: &ExcerptId, + ) -> Option<&'a BufferSnapshot> { + let mut cursor = self.excerpts.cursor::(); + cursor.seek(excerpt_id, Bias::Left, &()); + if let Some(excerpt) = cursor.item() { + if cursor.start() == excerpt_id { + return Some(&excerpt.buffer); + } + } + None + } } impl Excerpt { fn new( id: ExcerptId, buffer: buffer::BufferSnapshot, - range: Range, + range: Range, header_height: u8, ) -> Self { let mut text_summary = @@ -564,6 +594,18 @@ impl Excerpt { header_height, } } + + fn header_summary(&self) -> TextSummary { + TextSummary { + bytes: self.header_height as usize, + lines: Point::new(self.header_height as u32, 0), + lines_utf16: PointUtf16::new(self.header_height as u32, 0), + first_line_chars: 0, + last_line_chars: 0, + longest_row: 0, + longest_row_chars: 0, + } + } } impl sum_tree::Item for Excerpt { @@ -599,6 +641,18 @@ impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for usize { } } +impl<'a> sum_tree::SeekTarget<'a, ExcerptSummary, ExcerptSummary> for usize { + fn cmp(&self, cursor_location: &ExcerptSummary, _: &()) -> cmp::Ordering { + Ord::cmp(self, &cursor_location.text.bytes) + } +} + +impl<'a> sum_tree::SeekTarget<'a, ExcerptSummary, ExcerptSummary> for Location { + fn cmp(&self, cursor_location: &ExcerptSummary, _: &()) -> cmp::Ordering { + Ord::cmp(self, &cursor_location.excerpt_id) + } +} + impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Point { fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) { *self += summary.text.lines; @@ -703,43 +757,13 @@ impl ToPoint for Point { } } -impl Default for Location { - fn default() -> Self { - Self::min() - } -} - -impl Location { - pub fn min() -> Self { - Self(smallvec![u8::MIN]) - } - - pub fn max() -> Self { - Self(smallvec![u8::MAX]) - } - - pub fn between(lhs: &Self, rhs: &Self) -> Self { - let lhs = lhs.0.iter().copied().chain(iter::repeat(u8::MIN)); - let rhs = rhs.0.iter().copied().chain(iter::repeat(u8::MAX)); - let mut location = SmallVec::new(); - for (lhs, rhs) in lhs.zip(rhs) { - let mid = lhs + (rhs.saturating_sub(lhs)) / 2; - location.push(mid); - if mid > lhs { - break; - } - } - Self(location) - } -} - #[cfg(test)] mod tests { use super::*; use crate::buffer::Buffer; use gpui::MutableAppContext; use rand::prelude::*; - use std::{env, mem}; + use std::env; use text::{Point, RandomCharIter}; use util::test::sample_text; @@ -1094,36 +1118,4 @@ mod tests { assert_eq!(text.to_string(), snapshot.text()); } } - - #[gpui::test(iterations = 100)] - fn test_location(mut rng: StdRng) { - let mut lhs = Default::default(); - let mut rhs = Default::default(); - while lhs == rhs { - lhs = Location( - (0..rng.gen_range(1..=5)) - .map(|_| rng.gen_range(0..=100)) - .collect(), - ); - rhs = Location( - (0..rng.gen_range(1..=5)) - .map(|_| rng.gen_range(0..=100)) - .collect(), - ); - } - - if lhs > rhs { - mem::swap(&mut lhs, &mut rhs); - } - - let middle = Location::between(&lhs, &rhs); - assert!(middle > lhs); - assert!(middle < rhs); - for ix in 0..middle.0.len() - 1 { - assert!( - middle.0[ix] == *lhs.0.get(ix).unwrap_or(&0) - || middle.0[ix] == *rhs.0.get(ix).unwrap_or(&0) - ); - } - } } diff --git a/crates/language/src/multi_buffer/anchor.rs b/crates/language/src/multi_buffer/anchor.rs new file mode 100644 index 0000000000000000000000000000000000000000..e6b78eeefebd868d8774d6e144b3b7f8ce9fa0c5 --- /dev/null +++ b/crates/language/src/multi_buffer/anchor.rs @@ -0,0 +1,280 @@ +use super::{location::*, ExcerptSummary, MultiBufferSnapshot, ToOffset}; +use anyhow::{anyhow, Result}; +use smallvec::SmallVec; +use std::{cmp::Ordering, ops::Range}; +use sum_tree::Bias; +use text::{rope::TextDimension, AnchorRangeExt, ToOffset as _}; + +#[derive(Clone, Eq, PartialEq, Debug, Hash)] +pub struct Anchor { + excerpt_id: ExcerptId, + text_anchor: text::Anchor, +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct AnchorRangeMap { + entries: SmallVec<[(ExcerptId, text::AnchorRangeMap); 1]>, +} + +impl Anchor { + pub fn min() -> Self { + Self { + excerpt_id: ExcerptId::min(), + text_anchor: text::Anchor::min(), + } + } + + pub fn max() -> Self { + Self { + excerpt_id: ExcerptId::max(), + text_anchor: text::Anchor::max(), + } + } + + pub fn cmp<'a>(&self, other: &Anchor, snapshot: &MultiBufferSnapshot) -> Result { + let excerpt_id_cmp = self.excerpt_id.cmp(&other.excerpt_id); + if excerpt_id_cmp.is_eq() { + self.text_anchor.cmp( + &other.text_anchor, + snapshot + .buffer_snapshot_for_excerpt(&self.excerpt_id) + .ok_or_else(|| anyhow!("excerpt {:?} not found", self.excerpt_id))?, + ) + } else { + return Ok(excerpt_id_cmp); + } + } + + pub fn bias_left(&self, snapshot: &MultiBufferSnapshot) -> Anchor { + if self.text_anchor.bias != Bias::Left { + if let Some(buffer_snapshot) = snapshot.buffer_snapshot_for_excerpt(&self.excerpt_id) { + return Self { + excerpt_id: self.excerpt_id.clone(), + text_anchor: self.text_anchor.bias_left(buffer_snapshot), + }; + } + } + self.clone() + } + + pub fn bias_right(&self, snapshot: &MultiBufferSnapshot) -> Anchor { + if self.text_anchor.bias != Bias::Right { + if let Some(buffer_snapshot) = snapshot.buffer_snapshot_for_excerpt(&self.excerpt_id) { + return Self { + excerpt_id: self.excerpt_id.clone(), + text_anchor: self.text_anchor.bias_right(buffer_snapshot), + }; + } + } + self.clone() + } +} + +impl AnchorRangeMap { + pub fn len(&self) -> usize { + self.entries + .iter() + .map(|(_, text_map)| text_map.len()) + .sum() + } + + pub fn ranges<'a, D>( + &'a self, + snapshot: &'a MultiBufferSnapshot, + ) -> impl Iterator, &'a T)> + 'a + where + D: TextDimension + Clone, + { + let mut cursor = snapshot.excerpts.cursor::(); + self.entries + .iter() + .filter_map(move |(excerpt_id, text_map)| { + cursor.seek_forward(excerpt_id, Bias::Left, &()); + if let Some(excerpt) = cursor.item() { + if excerpt.id == *excerpt_id { + let mut excerpt_start = D::from_text_summary(&cursor.start().text); + excerpt_start.add_summary(&excerpt.header_summary(), &()); + return Some(text_map.ranges::(&excerpt.buffer).map( + move |(range, value)| { + let mut full_range = excerpt_start.clone()..excerpt_start.clone(); + full_range.start.add_assign(&range.start); + full_range.end.add_assign(&range.end); + (full_range, value) + }, + )); + } + } + None + }) + .flatten() + } + + pub fn intersecting_ranges<'a, D, I>( + &'a self, + range: Range<(I, Bias)>, + snapshot: &'a MultiBufferSnapshot, + ) -> impl Iterator, &'a T)> + 'a + where + D: TextDimension, + I: ToOffset, + { + let start_bias = range.start.1; + let end_bias = range.end.1; + let start_offset = range.start.0.to_offset(snapshot); + let end_offset = range.end.0.to_offset(snapshot); + + let mut cursor = snapshot.excerpts.cursor::(); + cursor.seek(&start_offset, start_bias, &()); + let start_excerpt_id = &cursor.start().excerpt_id; + let start_ix = match self + .entries + .binary_search_by_key(&start_excerpt_id, |e| &e.0) + { + Ok(ix) | Err(ix) => ix, + }; + + let mut entry_ranges = None; + let mut entries = self.entries[start_ix..].iter(); + std::iter::from_fn(move || loop { + match &mut entry_ranges { + None => { + let (excerpt_id, text_map) = entries.next()?; + cursor.seek(excerpt_id, Bias::Left, &()); + if cursor.start().text.bytes >= end_offset { + return None; + } + + if let Some(excerpt) = cursor.item() { + if excerpt.id == *excerpt_id { + let mut excerpt_start = D::from_text_summary(&cursor.start().text); + excerpt_start.add_summary(&excerpt.header_summary(), &()); + + let excerpt_start_offset = cursor.start().text.bytes; + let excerpt_end_offset = cursor.end(&()).text.bytes; + let excerpt_buffer_range = excerpt.range.to_offset(&excerpt.buffer); + + let start; + if start_offset >= excerpt_start_offset { + start = ( + excerpt_buffer_range.start + start_offset + - excerpt_start_offset, + start_bias, + ); + } else { + start = (excerpt_buffer_range.start, Bias::Left); + } + + let end; + if end_offset <= excerpt_end_offset { + end = ( + excerpt_buffer_range.start + end_offset - excerpt_start_offset, + end_bias, + ); + } else { + end = (excerpt_buffer_range.end, Bias::Right); + } + + entry_ranges = Some( + text_map + .intersecting_ranges(start..end, &excerpt.buffer) + .map(move |(range, value)| { + let mut full_range = + excerpt_start.clone()..excerpt_start.clone(); + full_range.start.add_assign(&range.start); + full_range.end.add_assign(&range.end); + (full_range, value) + }), + ); + } + } + } + Some(ranges) => { + if let Some(item) = ranges.next() { + return Some(item); + } else { + entry_ranges.take(); + } + } + } + }) + } + + pub fn min_by_key<'a, D, F, K>( + &self, + snapshot: &'a MultiBufferSnapshot, + extract_key: F, + ) -> Option<(Range, &T)> + where + D: TextDimension, + F: FnMut(&T) -> K, + K: Ord, + { + self.min_or_max_by_key(snapshot, Ordering::Less, extract_key) + } + + pub fn max_by_key<'a, D, F, K>( + &self, + snapshot: &'a MultiBufferSnapshot, + extract_key: F, + ) -> Option<(Range, &T)> + where + D: TextDimension, + F: FnMut(&T) -> K, + K: Ord, + { + self.min_or_max_by_key(snapshot, Ordering::Greater, extract_key) + } + + fn min_or_max_by_key<'a, D, F, K>( + &self, + snapshot: &'a MultiBufferSnapshot, + target_ordering: Ordering, + mut extract_key: F, + ) -> Option<(Range, &T)> + where + D: TextDimension, + F: FnMut(&T) -> K, + K: Ord, + { + let mut cursor = snapshot.excerpts.cursor::(); + let mut max = None; + for (excerpt_id, text_map) in &self.entries { + cursor.seek(excerpt_id, Bias::Left, &()); + if let Some(excerpt) = cursor.item() { + if excerpt.id == *excerpt_id { + if let Some((range, value)) = + text_map.max_by_key(&excerpt.buffer, &mut extract_key) + { + if max.as_ref().map_or(true, |(_, max_value)| { + extract_key(value).cmp(&extract_key(*max_value)) == target_ordering + }) { + let mut excerpt_start = D::from_text_summary(&cursor.start().text); + excerpt_start.add_summary(&excerpt.header_summary(), &()); + let mut full_range = excerpt_start.clone()..excerpt_start.clone(); + full_range.start.add_assign(&range.start); + full_range.end.add_assign(&range.end); + max = Some((full_range, value)); + } + } + } + } + } + max + } +} + +impl ToOffset for Anchor { + fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize { + let mut cursor = snapshot.excerpts.cursor::(); + cursor.seek(&self.excerpt_id, Bias::Left, &()); + if let Some(excerpt) = cursor.item() { + if excerpt.id == self.excerpt_id { + let buffer_offset = self.text_anchor.to_offset(&excerpt.buffer); + return cursor.start().text.bytes + + excerpt.header_height as usize + + buffer_offset.saturating_sub(excerpt.range.start.to_offset(&excerpt.buffer)); + } + } + cursor.start().text.bytes + } +} diff --git a/crates/language/src/multi_buffer/location.rs b/crates/language/src/multi_buffer/location.rs new file mode 100644 index 0000000000000000000000000000000000000000..a61b2a76301dec9321eb771b3c427a1bc2a1db43 --- /dev/null +++ b/crates/language/src/multi_buffer/location.rs @@ -0,0 +1,76 @@ +use smallvec::{smallvec, SmallVec}; +use std::iter; + +pub type ExcerptId = Location; + +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Location(SmallVec<[u8; 4]>); + +impl Location { + pub fn min() -> Self { + Self(smallvec![u8::MIN]) + } + + pub fn max() -> Self { + Self(smallvec![u8::MAX]) + } + + pub fn between(lhs: &Self, rhs: &Self) -> Self { + let lhs = lhs.0.iter().copied().chain(iter::repeat(u8::MIN)); + let rhs = rhs.0.iter().copied().chain(iter::repeat(u8::MAX)); + let mut location = SmallVec::new(); + for (lhs, rhs) in lhs.zip(rhs) { + let mid = lhs + (rhs.saturating_sub(lhs)) / 2; + location.push(mid); + if mid > lhs { + break; + } + } + Self(location) + } +} + +impl Default for Location { + fn default() -> Self { + Self::min() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use rand::prelude::*; + use std::mem; + + #[gpui::test(iterations = 100)] + fn test_location(mut rng: StdRng) { + let mut lhs = Default::default(); + let mut rhs = Default::default(); + while lhs == rhs { + lhs = Location( + (0..rng.gen_range(1..=5)) + .map(|_| rng.gen_range(0..=100)) + .collect(), + ); + rhs = Location( + (0..rng.gen_range(1..=5)) + .map(|_| rng.gen_range(0..=100)) + .collect(), + ); + } + + if lhs > rhs { + mem::swap(&mut lhs, &mut rhs); + } + + let middle = Location::between(&lhs, &rhs); + assert!(middle > lhs); + assert!(middle < rhs); + for ix in 0..middle.0.len() - 1 { + assert!( + middle.0[ix] == *lhs.0.get(ix).unwrap_or(&0) + || middle.0[ix] == *rhs.0.get(ix).unwrap_or(&0) + ); + } + } +} diff --git a/crates/language/src/multi_buffer/selection.rs b/crates/language/src/multi_buffer/selection.rs new file mode 100644 index 0000000000000000000000000000000000000000..825b6a27b84629baf9af021829e4c45ea0107e40 --- /dev/null +++ b/crates/language/src/multi_buffer/selection.rs @@ -0,0 +1,91 @@ +use super::{anchor::AnchorRangeMap, MultiBufferSnapshot, ToOffset}; +use std::{ops::Range, sync::Arc}; +use sum_tree::Bias; +use text::{rope::TextDimension, Selection, SelectionSetId, SelectionState}; + +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct SelectionSet { + pub id: SelectionSetId, + pub active: bool, + pub selections: Arc>, +} + +impl SelectionSet { + pub fn len(&self) -> usize { + self.selections.len() + } + + pub fn selections<'a, D>( + &'a self, + content: &'a MultiBufferSnapshot, + ) -> impl 'a + Iterator> + where + D: TextDimension, + { + self.selections + .ranges(content) + .map(|(range, state)| Selection { + id: state.id, + start: range.start, + end: range.end, + reversed: state.reversed, + goal: state.goal, + }) + } + + pub fn intersecting_selections<'a, D, I>( + &'a self, + range: Range<(I, Bias)>, + content: &'a MultiBufferSnapshot, + ) -> impl 'a + Iterator> + where + D: TextDimension, + I: 'a + ToOffset, + { + self.selections + .intersecting_ranges(range, content) + .map(|(range, state)| Selection { + id: state.id, + start: range.start, + end: range.end, + reversed: state.reversed, + goal: state.goal, + }) + } + + pub fn oldest_selection<'a, D>( + &'a self, + content: &'a MultiBufferSnapshot, + ) -> Option> + where + D: TextDimension, + { + self.selections + .min_by_key(content, |selection| selection.id) + .map(|(range, state)| Selection { + id: state.id, + start: range.start, + end: range.end, + reversed: state.reversed, + goal: state.goal, + }) + } + + pub fn newest_selection<'a, D>( + &'a self, + content: &'a MultiBufferSnapshot, + ) -> Option> + where + D: TextDimension, + { + self.selections + .max_by_key(content, |selection| selection.id) + .map(|(range, state)| Selection { + id: state.id, + start: range.start, + end: range.end, + reversed: state.reversed, + goal: state.goal, + }) + } +} diff --git a/crates/text/src/anchor.rs b/crates/text/src/anchor.rs index 5653124c68f4ec6f7207c87398b765d3dbeb51ca..6f38593a78f20c07d25c3c3f33ef051c8cd5b686 100644 --- a/crates/text/src/anchor.rs +++ b/crates/text/src/anchor.rs @@ -1,6 +1,5 @@ +use super::{FromAnchor, FullOffset, Point, ToOffset}; use crate::{rope::TextDimension, BufferSnapshot}; - -use super::{Buffer, FromAnchor, FullOffset, Point, ToOffset}; use anyhow::Result; use std::{ cmp::Ordering, @@ -99,7 +98,7 @@ impl Anchor { Ok(offset_comparison.then_with(|| self.bias.cmp(&other.bias))) } - pub fn bias_left(&self, buffer: &Buffer) -> Anchor { + pub fn bias_left(&self, buffer: &BufferSnapshot) -> Anchor { if self.bias == Bias::Left { self.clone() } else { @@ -107,7 +106,7 @@ impl Anchor { } } - pub fn bias_right(&self, buffer: &Buffer) -> Anchor { + pub fn bias_right(&self, buffer: &BufferSnapshot) -> Anchor { if self.bias == Bias::Right { self.clone() } else { diff --git a/crates/text/src/rope.rs b/crates/text/src/rope.rs index ffc1b74c55de7f6c5bd631d4c4258382fa6bc262..8b0965847542f1d30359872728d9e2779f0cb4a0 100644 --- a/crates/text/src/rope.rs +++ b/crates/text/src/rope.rs @@ -685,6 +685,15 @@ impl sum_tree::Summary for TextSummary { } } +impl<'a> std::ops::Add for TextSummary { + type Output = Self; + + fn add(mut self, rhs: Self) -> Self::Output { + self.add_assign(&rhs); + self + } +} + impl<'a> std::ops::AddAssign<&'a Self> for TextSummary { fn add_assign(&mut self, other: &'a Self) { let joined_chars = self.last_line_chars + other.first_line_chars; From 87d16c271e09d333771d0a4f70455bceba741c09 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Wed, 8 Dec 2021 19:23:04 -0700 Subject: [PATCH 024/196] Get Editor compiling with MultiBuffer as its buffer There's a bunch of unimplemented methods in MultiBuffer, but everything compiles. Co-Authored-By: Max Brunsfeld --- crates/editor/src/display_map.rs | 49 ++- crates/editor/src/display_map/block_map.rs | 72 ++-- crates/editor/src/display_map/fold_map.rs | 120 +++--- crates/editor/src/display_map/tab_map.rs | 12 +- crates/editor/src/display_map/wrap_map.rs | 14 +- crates/editor/src/editor.rs | 476 +++++++++++---------- crates/editor/src/element.rs | 19 +- crates/editor/src/items.rs | 38 +- crates/editor/src/movement.rs | 9 +- crates/go_to_line/src/go_to_line.rs | 2 +- crates/language/src/language.rs | 3 +- crates/language/src/multi_buffer.rs | 422 +++++++++++++++++- crates/language/src/multi_buffer/anchor.rs | 85 +++- crates/language/src/tests.rs | 2 +- crates/server/src/rpc.rs | 3 +- crates/text/src/patch.rs | 9 + crates/text/src/selection.rs | 24 +- crates/text/src/text.rs | 14 +- 18 files changed, 927 insertions(+), 446 deletions(-) diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index fc871c98622680024870d78f13b081de676dc1dd..e99df6b0a2e0ef2da2b11298383b169c4488d2d3 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -6,7 +6,10 @@ mod wrap_map; use block_map::{BlockMap, BlockPoint}; use fold_map::{FoldMap, ToFoldPoint as _}; use gpui::{fonts::FontId, ElementBox, Entity, ModelContext, ModelHandle}; -use language::{Anchor, Buffer, Point, Subscription as BufferSubscription, ToOffset, ToPoint}; +use language::{ + multi_buffer::{Anchor, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint}, + Point, Subscription as BufferSubscription, +}; use std::{ collections::{HashMap, HashSet}, ops::Range, @@ -26,7 +29,7 @@ pub trait ToDisplayPoint { } pub struct DisplayMap { - buffer: ModelHandle, + buffer: ModelHandle, buffer_subscription: BufferSubscription, fold_map: FoldMap, tab_map: TabMap, @@ -40,7 +43,7 @@ impl Entity for DisplayMap { impl DisplayMap { pub fn new( - buffer: ModelHandle, + buffer: ModelHandle, tab_size: usize, font_id: FontId, font_size: f32, @@ -48,7 +51,7 @@ impl DisplayMap { cx: &mut ModelContext, ) -> Self { let buffer_subscription = buffer.update(cx, |buffer, _| buffer.subscribe()); - let (fold_map, snapshot) = FoldMap::new(buffer.read(cx).snapshot()); + let (fold_map, snapshot) = FoldMap::new(buffer.read(cx).snapshot(cx)); let (tab_map, snapshot) = TabMap::new(snapshot, tab_size); let (wrap_map, snapshot) = WrapMap::new(snapshot, font_id, font_size, wrap_width, cx); let block_map = BlockMap::new(buffer.clone(), snapshot); @@ -64,7 +67,7 @@ impl DisplayMap { } pub fn snapshot(&self, cx: &mut ModelContext) -> DisplaySnapshot { - let buffer_snapshot = self.buffer.read(cx).snapshot(); + let buffer_snapshot = self.buffer.read(cx).snapshot(cx); let edits = self.buffer_subscription.consume().into_inner(); let (folds_snapshot, edits) = self.fold_map.read(buffer_snapshot, edits); let (tabs_snapshot, edits) = self.tab_map.sync(folds_snapshot.clone(), edits); @@ -74,7 +77,7 @@ impl DisplayMap { let blocks_snapshot = self.block_map.read(wraps_snapshot.clone(), edits, cx); DisplaySnapshot { - buffer_snapshot: self.buffer.read(cx).snapshot(), + buffer_snapshot: self.buffer.read(cx).snapshot(cx), folds_snapshot, tabs_snapshot, wraps_snapshot, @@ -87,7 +90,7 @@ impl DisplayMap { ranges: impl IntoIterator>, cx: &mut ModelContext, ) { - let snapshot = self.buffer.read(cx).snapshot(); + let snapshot = self.buffer.read(cx).snapshot(cx); let edits = self.buffer_subscription.consume().into_inner(); let (mut fold_map, snapshot, edits) = self.fold_map.write(snapshot, edits); let (snapshot, edits) = self.tab_map.sync(snapshot, edits); @@ -108,7 +111,7 @@ impl DisplayMap { ranges: impl IntoIterator>, cx: &mut ModelContext, ) { - let snapshot = self.buffer.read(cx).snapshot(); + let snapshot = self.buffer.read(cx).snapshot(cx); let edits = self.buffer_subscription.consume().into_inner(); let (mut fold_map, snapshot, edits) = self.fold_map.write(snapshot, edits); let (snapshot, edits) = self.tab_map.sync(snapshot, edits); @@ -132,7 +135,7 @@ impl DisplayMap { where P: ToOffset + Clone, { - let snapshot = self.buffer.read(cx).snapshot(); + let snapshot = self.buffer.read(cx).snapshot(cx); let edits = self.buffer_subscription.consume().into_inner(); let (snapshot, edits) = self.fold_map.read(snapshot, edits); let (snapshot, edits) = self.tab_map.sync(snapshot, edits); @@ -151,7 +154,7 @@ impl DisplayMap { } pub fn remove_blocks(&mut self, ids: HashSet, cx: &mut ModelContext) { - let snapshot = self.buffer.read(cx).snapshot(); + let snapshot = self.buffer.read(cx).snapshot(cx); let edits = self.buffer_subscription.consume().into_inner(); let (snapshot, edits) = self.fold_map.read(snapshot, edits); let (snapshot, edits) = self.tab_map.sync(snapshot, edits); @@ -179,7 +182,7 @@ impl DisplayMap { } pub struct DisplaySnapshot { - pub buffer_snapshot: language::BufferSnapshot, + pub buffer_snapshot: MultiBufferSnapshot, folds_snapshot: fold_map::FoldSnapshot, tabs_snapshot: tab_map::TabSnapshot, wraps_snapshot: wrap_map::WrapSnapshot, @@ -457,7 +460,7 @@ mod tests { use super::*; use crate::{movement, test::*}; use gpui::{color::Color, MutableAppContext}; - use language::{Language, LanguageConfig, RandomCharIter, SelectionGoal}; + use language::{Buffer, Language, LanguageConfig, RandomCharIter, SelectionGoal}; use rand::{prelude::StdRng, Rng}; use std::{env, sync::Arc}; use theme::SyntaxTheme; @@ -489,10 +492,10 @@ mod tests { log::info!("tab size: {}", tab_size); log::info!("wrap width: {:?}", wrap_width); - let buffer = cx.add_model(|cx| { + let buffer = cx.update(|cx| { let len = rng.gen_range(0..10); let text = RandomCharIter::new(&mut rng).take(len).collect::(); - Buffer::new(0, text, cx) + MultiBuffer::build_simple(&text, cx) }); let map = cx.add_model(|cx| { @@ -563,8 +566,10 @@ mod tests { assert_eq!(prev_display_bound.column(), 0); if next_display_bound < snapshot.max_point() { assert_eq!( - buffer - .read_with(&cx, |buffer, _| buffer.chars_at(next_buffer_bound).next()), + buffer.read_with(&cx, |buffer, _| buffer + .as_snapshot() + .chars_at(next_buffer_bound) + .next()), Some('\n') ) } @@ -651,7 +656,7 @@ mod tests { let wrap_width = Some(64.); let text = "one two three four five\nsix seven eight"; - let buffer = cx.add_model(|cx| Buffer::new(0, text.to_string(), cx)); + let buffer = MultiBuffer::build_simple(text, cx); let map = cx.add_model(|cx| { DisplayMap::new(buffer.clone(), tab_size, font_id, font_size, wrap_width, cx) }); @@ -724,7 +729,7 @@ mod tests { #[gpui::test] fn test_text_chunks(cx: &mut gpui::MutableAppContext) { let text = sample_text(6, 6, 'a'); - let buffer = cx.add_model(|cx| Buffer::new(0, text, cx)); + let buffer = MultiBuffer::build_simple(&text, cx); let tab_size = 4; let family_id = cx.font_cache().load_family(&["Helvetica"]).unwrap(); let font_id = cx @@ -803,6 +808,7 @@ mod tests { let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Some(lang), None, cx)); buffer.condition(&cx, |buf, _| !buf.is_parsing()).await; + let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); let tab_size = 2; let font_cache = cx.font_cache(); @@ -890,6 +896,7 @@ mod tests { let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Some(lang), None, cx)); buffer.condition(&cx, |buf, _| !buf.is_parsing()).await; + let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); let font_cache = cx.font_cache(); @@ -935,7 +942,7 @@ mod tests { let text = "\n'a', 'α',\t'✋',\t'❎', '🍐'\n"; let display_text = "\n'a', 'α', '✋', '❎', '🍐'\n"; - let buffer = cx.add_model(|cx| Buffer::new(0, text, cx)); + let buffer = MultiBuffer::build_simple(text, cx); let tab_size = 4; let font_cache = cx.font_cache(); @@ -979,7 +986,7 @@ mod tests { #[gpui::test] fn test_tabs_with_multibyte_chars(cx: &mut gpui::MutableAppContext) { let text = "✅\t\tα\nβ\t\n🏀β\t\tγ"; - let buffer = cx.add_model(|cx| Buffer::new(0, text, cx)); + let buffer = MultiBuffer::build_simple(text, cx); let tab_size = 4; let font_cache = cx.font_cache(); let family_id = font_cache.load_family(&["Helvetica"]).unwrap(); @@ -1038,7 +1045,7 @@ mod tests { #[gpui::test] fn test_max_point(cx: &mut gpui::MutableAppContext) { - let buffer = cx.add_model(|cx| Buffer::new(0, "aaa\n\t\tbbb", cx)); + let buffer = MultiBuffer::build_simple("aaa\n\t\tbbb", cx); let tab_size = 4; let font_cache = cx.font_cache(); let family_id = font_cache.load_family(&["Helvetica"]).unwrap(); diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 71aa3838578d85a1663385dde062e8a5a0a8ca23..00c70c361f67ec7f1b41313c42eededa528e0f88 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -1,6 +1,9 @@ use super::wrap_map::{self, WrapEdit, WrapPoint, WrapSnapshot}; use gpui::{AppContext, ElementBox, ModelHandle}; -use language::{Buffer, Chunk}; +use language::{ + multi_buffer::{Anchor, MultiBuffer, ToOffset, ToPoint as _}, + Chunk, +}; use parking_lot::Mutex; use std::{ cmp::{self, Ordering}, @@ -12,14 +15,14 @@ use std::{ Arc, }, }; -use sum_tree::SumTree; -use text::{Anchor, Bias, Edit, Point, ToOffset, ToPoint as _}; +use sum_tree::{Bias, SumTree}; +use text::{Edit, Point}; use theme::SyntaxTheme; const NEWLINES: &'static [u8] = &[b'\n'; u8::MAX as usize]; pub struct BlockMap { - buffer: ModelHandle, + buffer: ModelHandle, next_block_id: AtomicUsize, wrap_snapshot: Mutex, blocks: Vec>, @@ -109,7 +112,7 @@ pub struct BlockBufferRows<'a> { } impl BlockMap { - pub fn new(buffer: ModelHandle, wrap_snapshot: WrapSnapshot) -> Self { + pub fn new(buffer: ModelHandle, wrap_snapshot: WrapSnapshot) -> Self { Self { buffer, next_block_id: AtomicUsize::new(0), @@ -153,6 +156,7 @@ impl BlockMap { } let buffer = self.buffer.read(cx); + let buffer = buffer.as_snapshot(); let mut transforms = self.transforms.lock(); let mut new_transforms = SumTree::new(); let old_row_count = transforms.summary().input_rows; @@ -241,7 +245,7 @@ impl BlockMap { let start_block_ix = match self.blocks[last_block_ix..].binary_search_by(|probe| { probe .position - .cmp(&start_anchor, buffer) + .cmp(&start_anchor, &buffer) .unwrap() .then(Ordering::Greater) }) { @@ -255,7 +259,7 @@ impl BlockMap { match self.blocks[start_block_ix..].binary_search_by(|probe| { probe .position - .cmp(&end_anchor, buffer) + .cmp(&end_anchor, &buffer) .unwrap() .then(Ordering::Greater) }) { @@ -268,7 +272,7 @@ impl BlockMap { self.blocks[start_block_ix..end_block_ix] .iter() .map(|block| { - let mut position = block.position.to_point(buffer); + let mut position = block.position.to_point(&buffer); let column = wrap_snapshot.from_point(position, Bias::Left).column(); match block.disposition { BlockDisposition::Above => position.column = 0, @@ -380,6 +384,7 @@ impl<'a> BlockMapWriter<'a> { P: ToOffset + Clone, { let buffer = self.0.buffer.read(cx); + let buffer = buffer.as_snapshot(); let mut ids = Vec::new(); let mut edits = Vec::>::new(); let wrap_snapshot = &*self.0.wrap_snapshot.lock(); @@ -389,7 +394,7 @@ impl<'a> BlockMapWriter<'a> { ids.push(id); let position = buffer.anchor_after(block.position); - let point = position.to_point(buffer); + let point = position.to_point(&buffer); let start_row = wrap_snapshot .from_point(Point::new(point.row, 0), Bias::Left) .row(); @@ -404,7 +409,7 @@ impl<'a> BlockMapWriter<'a> { let block_ix = match self .0 .blocks - .binary_search_by(|probe| probe.position.cmp(&position, buffer).unwrap()) + .binary_search_by(|probe| probe.position.cmp(&position, &buffer).unwrap()) { Ok(ix) | Err(ix) => ix, }; @@ -436,12 +441,13 @@ impl<'a> BlockMapWriter<'a> { pub fn remove(&mut self, block_ids: HashSet, cx: &AppContext) { let buffer = self.0.buffer.read(cx); + let buffer = buffer.as_snapshot(); let wrap_snapshot = &*self.0.wrap_snapshot.lock(); let mut edits = Vec::new(); let mut last_block_buffer_row = None; self.0.blocks.retain(|block| { if block_ids.contains(&block.id) { - let buffer_row = block.position.to_point(buffer).row; + let buffer_row = block.position.to_point(&buffer).row; if last_block_buffer_row != Some(buffer_row) { last_block_buffer_row = Some(buffer_row); let start_row = wrap_snapshot @@ -877,7 +883,6 @@ mod tests { use super::*; use crate::display_map::{fold_map::FoldMap, tab_map::TabMap, wrap_map::WrapMap}; use gpui::{elements::Empty, Element}; - use language::Buffer; use rand::prelude::*; use std::env; use text::RandomCharIter; @@ -906,8 +911,9 @@ mod tests { let text = "aaa\nbbb\nccc\nddd"; - let buffer = cx.add_model(|cx| Buffer::new(0, text, cx)); - let (fold_map, folds_snapshot) = FoldMap::new(buffer.read(cx).snapshot()); + let buffer = MultiBuffer::build_simple(text, cx); + let subscription = buffer.update(cx, |buffer, _| buffer.subscribe()); + let (fold_map, folds_snapshot) = FoldMap::new(buffer.read(cx).snapshot(cx)); let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), 1); let (wrap_map, wraps_snapshot) = WrapMap::new(tabs_snapshot, font_id, 14.0, None, cx); let mut block_map = BlockMap::new(buffer.clone(), wraps_snapshot.clone()); @@ -1050,15 +1056,14 @@ mod tests { ] ); - // Insert a line break, separating two block decorations into separate - // lines. - let (buffer_snapshot, buffer_edits) = buffer.update(cx, |buffer, cx| { - let v0 = buffer.version(); + // Insert a line break, separating two block decorations into separate lines. + let buffer_snapshot = buffer.update(cx, |buffer, cx| { buffer.edit([Point::new(1, 1)..Point::new(1, 1)], "!!!\n", cx); - (buffer.snapshot(), buffer.edits_since(&v0).collect()) + buffer.snapshot(cx) }); - let (folds_snapshot, fold_edits) = fold_map.read(buffer_snapshot, buffer_edits); + let (folds_snapshot, fold_edits) = + fold_map.read(buffer_snapshot, subscription.consume().into_inner()); let (tabs_snapshot, tab_edits) = tab_map.sync(folds_snapshot, fold_edits); let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| { wrap_map.sync(tabs_snapshot, tab_edits, cx) @@ -1077,8 +1082,8 @@ mod tests { let text = "one two three\nfour five six\nseven eight"; - let buffer = cx.add_model(|cx| Buffer::new(0, text, cx)); - let (_, folds_snapshot) = FoldMap::new(buffer.read(cx).snapshot()); + let buffer = MultiBuffer::build_simple(text, cx); + let (_, folds_snapshot) = FoldMap::new(buffer.read(cx).snapshot(cx)); let (_, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), 1); let (_, wraps_snapshot) = WrapMap::new(tabs_snapshot, font_id, 14.0, Some(60.), cx); let mut block_map = BlockMap::new(buffer.clone(), wraps_snapshot.clone()); @@ -1132,13 +1137,12 @@ mod tests { log::info!("Wrap width: {:?}", wrap_width); - let buffer = cx.add_model(|cx| { - let len = rng.gen_range(0..10); - let text = RandomCharIter::new(&mut rng).take(len).collect::(); - log::info!("initial buffer text: {:?}", text); - Buffer::new(0, text, cx) - }); - let mut buffer_snapshot = buffer.read(cx).snapshot(); + let len = rng.gen_range(0..10); + let text = RandomCharIter::new(&mut rng).take(len).collect::(); + log::info!("initial buffer text: {:?}", text); + let buffer = MultiBuffer::build_simple(&text, cx); + + let mut buffer_snapshot = buffer.read(cx).snapshot(cx); let (fold_map, folds_snapshot) = FoldMap::new(buffer_snapshot.clone()); let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), tab_size); let (wrap_map, wraps_snapshot) = @@ -1176,7 +1180,7 @@ mod tests { log::info!( "inserting block {:?} {:?} with height {}", disposition, - position.to_point(buffer), + position.to_point(&buffer.as_snapshot()), height ); BlockProperties { @@ -1221,12 +1225,12 @@ mod tests { } _ => { buffer.update(cx, |buffer, cx| { - let v0 = buffer.version(); let edit_count = rng.gen_range(1..=5); + let subscription = buffer.subscribe(); buffer.randomly_edit(&mut rng, edit_count, cx); log::info!("buffer text: {:?}", buffer.text()); - buffer_edits.extend(buffer.edits_since(&v0)); - buffer_snapshot = buffer.snapshot(); + buffer_edits.extend(subscription.consume()); + buffer_snapshot = buffer.snapshot(cx); }); } } @@ -1248,7 +1252,7 @@ mod tests { .iter() .cloned() .map(|(id, block)| { - let mut position = block.position.to_point(buffer); + let mut position = block.position.to_point(&buffer.as_snapshot()); let column = wraps_snapshot.from_point(position, Bias::Left).column(); match block.disposition { BlockDisposition::Above => { diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index bdbe002dc116ba433bcb39e0bc6d321ca35a62e9..1b9402bcc5664b8558f59495c464c0d03359babc 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -1,5 +1,6 @@ use language::{ - Anchor, AnchorRangeExt, BufferSnapshot, Chunk, Edit, Point, PointUtf16, TextSummary, ToOffset, + multi_buffer::{Anchor, AnchorRangeExt, MultiBufferChunks, MultiBufferSnapshot, ToOffset}, + Chunk, Edit, Point, PointUtf16, TextSummary, }; use parking_lot::Mutex; use std::{ @@ -189,14 +190,14 @@ impl<'a> FoldMapWriter<'a> { } pub struct FoldMap { - buffer: Mutex, + buffer: Mutex, transforms: Mutex>, folds: SumTree, version: AtomicUsize, } impl FoldMap { - pub fn new(buffer: BufferSnapshot) -> (Self, FoldSnapshot) { + pub fn new(buffer: MultiBufferSnapshot) -> (Self, FoldSnapshot) { let this = Self { buffer: Mutex::new(buffer.clone()), folds: Default::default(), @@ -224,7 +225,7 @@ impl FoldMap { pub fn read( &self, - buffer: BufferSnapshot, + buffer: MultiBufferSnapshot, edits: Vec>, ) -> (FoldSnapshot, Vec) { let edits = self.sync(buffer, edits); @@ -240,7 +241,7 @@ impl FoldMap { pub fn write( &mut self, - buffer: BufferSnapshot, + buffer: MultiBufferSnapshot, edits: Vec>, ) -> (FoldMapWriter, FoldSnapshot, Vec) { let (snapshot, edits) = self.read(buffer, edits); @@ -259,7 +260,7 @@ impl FoldMap { fn sync( &self, - new_buffer: BufferSnapshot, + new_buffer: MultiBufferSnapshot, buffer_edits: Vec>, ) -> Vec { if buffer_edits.is_empty() { @@ -476,7 +477,7 @@ impl FoldMap { pub struct FoldSnapshot { transforms: SumTree, folds: SumTree, - buffer_snapshot: language::BufferSnapshot, + buffer_snapshot: MultiBufferSnapshot, pub version: usize, } @@ -699,7 +700,7 @@ impl FoldSnapshot { } fn intersecting_folds<'a, T>( - buffer: &'a text::BufferSnapshot, + buffer: &'a MultiBufferSnapshot, folds: &'a SumTree, range: Range, inclusive: bool, @@ -850,9 +851,9 @@ impl Default for FoldSummary { } impl sum_tree::Summary for FoldSummary { - type Context = text::BufferSnapshot; + type Context = MultiBufferSnapshot; - fn add_summary(&mut self, other: &Self, buffer: &text::BufferSnapshot) { + fn add_summary(&mut self, other: &Self, buffer: &MultiBufferSnapshot) { if other.min_start.cmp(&self.min_start, buffer).unwrap() == Ordering::Less { self.min_start = other.min_start.clone(); } @@ -876,20 +877,20 @@ impl sum_tree::Summary for FoldSummary { } impl<'a> sum_tree::Dimension<'a, FoldSummary> for Fold { - fn add_summary(&mut self, summary: &'a FoldSummary, _: &text::BufferSnapshot) { + fn add_summary(&mut self, summary: &'a FoldSummary, _: &MultiBufferSnapshot) { self.0.start = summary.start.clone(); self.0.end = summary.end.clone(); } } impl<'a> sum_tree::SeekTarget<'a, FoldSummary, Fold> for Fold { - fn cmp(&self, other: &Self, buffer: &text::BufferSnapshot) -> Ordering { + fn cmp(&self, other: &Self, buffer: &MultiBufferSnapshot) -> Ordering { self.0.cmp(&other.0, buffer).unwrap() } } impl<'a> sum_tree::Dimension<'a, FoldSummary> for usize { - fn add_summary(&mut self, summary: &'a FoldSummary, _: &text::BufferSnapshot) { + fn add_summary(&mut self, summary: &'a FoldSummary, _: &MultiBufferSnapshot) { *self += summary.count; } } @@ -924,7 +925,7 @@ impl<'a> Iterator for FoldBufferRows<'a> { pub struct FoldChunks<'a> { transform_cursor: Cursor<'a, Transform, (FoldOffset, usize)>, - buffer_chunks: language::BufferChunks<'a>, + buffer_chunks: MultiBufferChunks<'a>, buffer_chunk: Option<(usize, Chunk<'a>)>, buffer_offset: usize, output_offset: usize, @@ -1053,7 +1054,7 @@ pub type FoldEdit = Edit; mod tests { use super::*; use crate::ToPoint; - use language::Buffer; + use language::multi_buffer::MultiBuffer; use rand::prelude::*; use std::{env, mem}; use text::RandomCharIter; @@ -1062,8 +1063,9 @@ mod tests { #[gpui::test] fn test_basic_folds(cx: &mut gpui::MutableAppContext) { - let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(5, 6, 'a'), cx)); - let buffer_snapshot = buffer.read(cx).snapshot(); + let buffer = MultiBuffer::build_simple(&sample_text(5, 6, 'a'), cx); + let subscription = buffer.update(cx, |buffer, _| buffer.subscribe()); + let buffer_snapshot = buffer.read(cx).snapshot(cx); let mut map = FoldMap::new(buffer_snapshot.clone()).0; let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]); @@ -1086,8 +1088,7 @@ mod tests { ] ); - let (buffer_snapshot, edits) = buffer.update(cx, |buffer, cx| { - let v0 = buffer.version(); + let buffer_snapshot = buffer.update(cx, |buffer, cx| { buffer.edit( vec![ Point::new(0, 0)..Point::new(0, 1), @@ -1096,9 +1097,10 @@ mod tests { "123", cx, ); - (buffer.snapshot(), buffer.edits_since(&v0).collect()) + buffer.snapshot(cx) }); - let (snapshot3, edits) = map.read(buffer_snapshot.clone(), edits); + let (snapshot3, edits) = + map.read(buffer_snapshot.clone(), subscription.consume().into_inner()); assert_eq!(snapshot3.text(), "123a…c123c…eeeee"); assert_eq!( edits, @@ -1114,12 +1116,11 @@ mod tests { ] ); - let (buffer_snapshot, edits) = buffer.update(cx, |buffer, cx| { - let v0 = buffer.version(); + let buffer_snapshot = buffer.update(cx, |buffer, cx| { buffer.edit(vec![Point::new(2, 6)..Point::new(4, 3)], "456", cx); - (buffer.snapshot(), buffer.edits_since(&v0).collect()) + buffer.snapshot(cx) }); - let (snapshot4, _) = map.read(buffer_snapshot.clone(), edits); + let (snapshot4, _) = map.read(buffer_snapshot.clone(), subscription.consume().into_inner()); assert_eq!(snapshot4.text(), "123a…c123456eee"); let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]); @@ -1130,8 +1131,9 @@ mod tests { #[gpui::test] fn test_adjacent_folds(cx: &mut gpui::MutableAppContext) { - let buffer = cx.add_model(|cx| Buffer::new(0, "abcdefghijkl", cx)); - let buffer_snapshot = buffer.read(cx).snapshot(); + let buffer = MultiBuffer::build_simple("abcdefghijkl", cx); + let subscription = buffer.update(cx, |buffer, _| buffer.subscribe()); + let buffer_snapshot = buffer.read(cx).snapshot(cx); { let mut map = FoldMap::new(buffer_snapshot.clone()).0; @@ -1164,20 +1166,20 @@ mod tests { assert_eq!(snapshot.text(), "…fghijkl"); // Edit within one of the folds. - let (buffer_snapshot, edits) = buffer.update(cx, |buffer, cx| { - let v0 = buffer.version(); + let buffer_snapshot = buffer.update(cx, |buffer, cx| { buffer.edit(vec![0..1], "12345", cx); - (buffer.snapshot(), buffer.edits_since(&v0).collect()) + buffer.snapshot(cx) }); - let (snapshot, _) = map.read(buffer_snapshot.clone(), edits); + let (snapshot, _) = + map.read(buffer_snapshot.clone(), subscription.consume().into_inner()); assert_eq!(snapshot.text(), "12345…fghijkl"); } } #[gpui::test] fn test_overlapping_folds(cx: &mut gpui::MutableAppContext) { - let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(5, 6, 'a'), cx)); - let buffer_snapshot = buffer.read(cx).snapshot(); + let buffer = MultiBuffer::build_simple(&sample_text(5, 6, 'a'), cx); + let buffer_snapshot = buffer.read(cx).snapshot(cx); let mut map = FoldMap::new(buffer_snapshot.clone()).0; let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]); writer.fold(vec![ @@ -1192,8 +1194,9 @@ mod tests { #[gpui::test] fn test_merging_folds_via_edit(cx: &mut gpui::MutableAppContext) { - let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(5, 6, 'a'), cx)); - let buffer_snapshot = buffer.read(cx).snapshot(); + let buffer = MultiBuffer::build_simple(&sample_text(5, 6, 'a'), cx); + let subscription = buffer.update(cx, |buffer, _| buffer.subscribe()); + let buffer_snapshot = buffer.read(cx).snapshot(cx); let mut map = FoldMap::new(buffer_snapshot.clone()).0; let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]); @@ -1204,19 +1207,18 @@ mod tests { let (snapshot, _) = map.read(buffer_snapshot.clone(), vec![]); assert_eq!(snapshot.text(), "aa…cccc\nd…eeeee"); - let (buffer_snapshot, edits) = buffer.update(cx, |buffer, cx| { - let v0 = buffer.version(); + let buffer_snapshot = buffer.update(cx, |buffer, cx| { buffer.edit(Some(Point::new(2, 2)..Point::new(3, 1)), "", cx); - (buffer.snapshot(), buffer.edits_since(&v0).collect()) + buffer.snapshot(cx) }); - let (snapshot, _) = map.read(buffer_snapshot.clone(), edits); + let (snapshot, _) = map.read(buffer_snapshot.clone(), subscription.consume().into_inner()); assert_eq!(snapshot.text(), "aa…eeeee"); } #[gpui::test] fn test_folds_in_range(cx: &mut gpui::MutableAppContext) { - let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(5, 6, 'a'), cx)); - let buffer_snapshot = buffer.read(cx).snapshot(); + let buffer = MultiBuffer::build_simple(&sample_text(5, 6, 'a'), cx); + let buffer_snapshot = buffer.read(cx).snapshot(cx); let mut map = FoldMap::new(buffer_snapshot.clone()).0; let buffer = buffer.read(cx); @@ -1230,7 +1232,9 @@ mod tests { let (snapshot, _) = map.read(buffer_snapshot.clone(), vec![]); let fold_ranges = snapshot .folds_in_range(Point::new(1, 0)..Point::new(1, 3)) - .map(|fold| fold.start.to_point(buffer)..fold.end.to_point(buffer)) + .map(|fold| { + fold.start.to_point(&buffer.as_snapshot())..fold.end.to_point(&buffer.as_snapshot()) + }) .collect::>(); assert_eq!( fold_ranges, @@ -1247,12 +1251,10 @@ mod tests { .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) .unwrap_or(10); - let buffer = cx.add_model(|cx| { - let len = rng.gen_range(0..10); - let text = RandomCharIter::new(&mut rng).take(len).collect::(); - Buffer::new(0, text, cx) - }); - let buffer_snapshot = buffer.read(cx).snapshot(); + let len = rng.gen_range(0..10); + let text = RandomCharIter::new(&mut rng).take(len).collect::(); + let buffer = MultiBuffer::build_simple(&text, cx); + let buffer_snapshot = buffer.read(cx).snapshot(cx); let mut map = FoldMap::new(buffer_snapshot.clone()).0; let (mut initial_snapshot, _) = map.read(buffer_snapshot.clone(), vec![]); @@ -1260,23 +1262,21 @@ mod tests { for _ in 0..operations { log::info!("text: {:?}", buffer.read(cx).text()); - let buffer_edits = match rng.gen_range(0..=100) { + let mut buffer_edits = Vec::new(); + match rng.gen_range(0..=100) { 0..=59 => { snapshot_edits.extend(map.randomly_mutate(&mut rng)); - vec![] } _ => buffer.update(cx, |buffer, cx| { - let start_version = buffer.version.clone(); + let subscription = buffer.subscribe(); let edit_count = rng.gen_range(1..=5); buffer.randomly_edit(&mut rng, edit_count, cx); - let edits = buffer - .edits_since::(&start_version) - .collect::>(); + let edits = subscription.consume().into_inner(); log::info!("editing {:?}", edits); - buffer.edits_since::(&start_version).collect() + buffer_edits.extend(edits); }), }; - let buffer_snapshot = buffer.read(cx).snapshot(); + let buffer_snapshot = buffer.read(cx).snapshot(cx); let (snapshot, edits) = map.read(buffer_snapshot.clone(), buffer_edits); snapshot_edits.push((snapshot.clone(), edits)); @@ -1285,8 +1285,8 @@ mod tests { let mut expected_buffer_rows = Vec::new(); let mut next_row = buffer_snapshot.max_point().row; for fold_range in map.merged_fold_ranges().into_iter().rev() { - let fold_start = buffer_snapshot.point_for_offset(fold_range.start).unwrap(); - let fold_end = buffer_snapshot.point_for_offset(fold_range.end).unwrap(); + let fold_start = buffer_snapshot.offset_to_point(fold_range.start); + let fold_end = buffer_snapshot.offset_to_point(fold_range.end); expected_buffer_rows.extend((fold_end.row + 1..=next_row).rev()); next_row = fold_start.row; @@ -1458,9 +1458,9 @@ mod tests { #[gpui::test] fn test_buffer_rows(cx: &mut gpui::MutableAppContext) { let text = sample_text(6, 6, 'a') + "\n"; - let buffer = cx.add_model(|cx| Buffer::new(0, text, cx)); + let buffer = MultiBuffer::build_simple(&text, cx); - let buffer_snapshot = buffer.read(cx).snapshot(); + let buffer_snapshot = buffer.read(cx).snapshot(cx); let mut map = FoldMap::new(buffer_snapshot.clone()).0; let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]); diff --git a/crates/editor/src/display_map/tab_map.rs b/crates/editor/src/display_map/tab_map.rs index c0c67097ba54e8122c7290f5b5c2190fac4d367c..52b4c3e6a2ca127c9c8cabc9147ae38ec6bd85fe 100644 --- a/crates/editor/src/display_map/tab_map.rs +++ b/crates/editor/src/display_map/tab_map.rs @@ -435,7 +435,7 @@ impl<'a> Iterator for TabChunks<'a> { mod tests { use super::*; use crate::display_map::fold_map::FoldMap; - use language::Buffer; + use language::multi_buffer::MultiBuffer; use rand::{prelude::StdRng, Rng}; use text::{RandomCharIter, Rope}; @@ -449,12 +449,10 @@ mod tests { #[gpui::test(iterations = 100)] fn test_random(cx: &mut gpui::MutableAppContext, mut rng: StdRng) { let tab_size = rng.gen_range(1..=4); - let buffer = cx.add_model(|cx| { - let len = rng.gen_range(0..30); - let text = RandomCharIter::new(&mut rng).take(len).collect::(); - Buffer::new(0, text, cx) - }); - let buffer_snapshot = buffer.read(cx).snapshot(); + let len = rng.gen_range(0..30); + let text = RandomCharIter::new(&mut rng).take(len).collect::(); + let buffer = MultiBuffer::build_simple(&text, cx); + let buffer_snapshot = buffer.read(cx).snapshot(cx); log::info!("Buffer text: {:?}", buffer.read(cx).text()); let (mut fold_map, _) = FoldMap::new(buffer_snapshot.clone()); diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index d51f8373ad5b23111b0ad40e57ac1098af511f60..c346ab03c37647c2a2892fdb80eb172112519058 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -974,7 +974,7 @@ mod tests { display_map::{fold_map::FoldMap, tab_map::TabMap}, test::Observer, }; - use language::{Buffer, RandomCharIter}; + use language::{multi_buffer::MultiBuffer, RandomCharIter}; use rand::prelude::*; use std::{cmp, env}; use text::Rope; @@ -1004,12 +1004,12 @@ mod tests { log::info!("Tab size: {}", tab_size); log::info!("Wrap width: {:?}", wrap_width); - let buffer = cx.add_model(|cx| { + let buffer = cx.update(|cx| { let len = rng.gen_range(0..10); let text = RandomCharIter::new(&mut rng).take(len).collect::(); - Buffer::new(0, text, cx) + MultiBuffer::build_simple(&text, cx) }); - let buffer_snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot()); + let buffer_snapshot = buffer.read_with(&cx, |buffer, cx| buffer.snapshot(cx)); let (mut fold_map, folds_snapshot) = FoldMap::new(buffer_snapshot.clone()); let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), tab_size); log::info!( @@ -1074,15 +1074,15 @@ mod tests { } _ => { buffer.update(&mut cx, |buffer, cx| { - let v0 = buffer.version(); + let subscription = buffer.subscribe(); let edit_count = rng.gen_range(1..=5); buffer.randomly_edit(&mut rng, edit_count, cx); - buffer_edits.extend(buffer.edits_since(&v0)); + buffer_edits.extend(subscription.consume()); }); } } - let buffer_snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot()); + let buffer_snapshot = buffer.read_with(&cx, |buffer, cx| buffer.snapshot(cx)); log::info!("Unwrapped text (no folds): {:?}", buffer_snapshot.text()); let (folds_snapshot, fold_edits) = fold_map.read(buffer_snapshot, buffer_edits); log::info!( diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 848edc643f33bd2158dfa0b90ff34fa9d73ae395..a88d962ba93a14ac5d5b573fc519cfa90cd9cc7c 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -20,7 +20,14 @@ use gpui::{ MutableAppContext, RenderContext, View, ViewContext, WeakViewHandle, }; use items::BufferItemHandle; -use language::*; +use language::{ + multi_buffer::{ + Anchor, AnchorRangeExt, AnchorRangeSet, MultiBuffer, MultiBufferSnapshot, SelectionSet, + ToOffset, ToPoint, + }, + BracketPair, Buffer, Diagnostic, DiagnosticSeverity, Language, Point, Selection, SelectionGoal, + SelectionSetId, +}; use serde::{Deserialize, Serialize}; use smallvec::SmallVec; use smol::Timer; @@ -29,7 +36,7 @@ use std::{ cmp, collections::HashMap, iter, mem, - ops::{Deref, Range, RangeInclusive}, + ops::{Deref, Range, RangeInclusive, Sub}, rc::Rc, sync::Arc, time::Duration, @@ -273,6 +280,8 @@ pub fn init(cx: &mut MutableAppContext, entry_openers: &mut Vec Range; + fn point_range(&self, buffer: &MultiBufferSnapshot) -> Range; fn display_range(&self, map: &DisplaySnapshot) -> Range; fn spanned_rows( &self, @@ -347,7 +356,7 @@ pub enum SoftWrap { pub struct Editor { handle: WeakViewHandle, - buffer: ModelHandle, + buffer: ModelHandle, display_map: ModelHandle, selection_set_id: SelectionSetId, pending_selection: Option, @@ -422,6 +431,7 @@ impl Editor { cx: &mut ViewContext, ) -> Self { let buffer = cx.add_model(|cx| Buffer::new(0, String::new(), cx)); + let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); let mut view = Self::for_buffer(buffer, build_settings, cx); view.mode = EditorMode::SingleLine; view @@ -433,13 +443,14 @@ impl Editor { cx: &mut ViewContext, ) -> Self { let buffer = cx.add_model(|cx| Buffer::new(0, String::new(), cx)); + let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); let mut view = Self::for_buffer(buffer, build_settings, cx); view.mode = EditorMode::AutoHeight { max_lines }; view } pub fn for_buffer( - buffer: ModelHandle, + buffer: ModelHandle, build_settings: impl 'static + Fn(&AppContext) -> EditorSettings, cx: &mut ViewContext, ) -> Self { @@ -454,7 +465,7 @@ impl Editor { } pub fn new( - buffer: ModelHandle, + buffer: ModelHandle, build_settings: Rc EditorSettings>>, cx: &mut ViewContext, ) -> Self { @@ -522,6 +533,7 @@ impl Editor { let buffer = cx.add_model(|cx| { Buffer::new(0, "", cx).with_language(Some(language::PLAIN_TEXT.clone()), None, cx) }); + let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); workspace.add_item(BufferItemHandle(buffer), cx); } @@ -529,7 +541,7 @@ impl Editor { self.buffer.read(cx).replica_id() } - pub fn buffer(&self) -> &ModelHandle { + pub fn buffer(&self) -> &ModelHandle { &self.buffer } @@ -628,9 +640,9 @@ impl Editor { first_cursor_top = newest_selection.head().to_display_point(&display_map).row() as f32; last_cursor_bottom = first_cursor_top + 1.; } else { - let mut selections = self.selections::(cx).peekable(); + let selections = self.selections::(cx); first_cursor_top = selections - .peek() + .first() .unwrap() .head() .to_display_point(&display_map) @@ -756,11 +768,10 @@ impl Editor { click_count: usize, cx: &mut ViewContext, ) { + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let tail = self.newest_selection::(cx).tail(); - self.begin_selection(position, false, click_count, cx); - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let buffer = self.buffer.read(cx); let position = position.to_offset(&display_map, Bias::Left); let tail_anchor = buffer.anchor_before(tail); @@ -842,13 +853,9 @@ impl Editor { } else if click_count > 1 { // Remove the newest selection since it was only added as part of this multi-click. let newest_selection = self.newest_selection::(cx); - self.update_selections::( - self.selections(cx) - .filter(|selection| selection.id != newest_selection.id) - .collect(), - None, - cx, - ) + let mut selections = self.selections(cx); + selections.retain(|selection| selection.id != newest_selection.id); + self.update_selections::(selections, None, cx) } self.pending_selection = Some(PendingSelection { selection, mode }); @@ -895,13 +902,13 @@ impl Editor { let tail = tail.to_display_point(&display_map); self.select_columns(tail, position, overshoot, &display_map, cx); } else if let Some(PendingSelection { selection, mode }) = self.pending_selection.as_mut() { - let buffer = self.buffer.read(cx); + let buffer = self.buffer.read(cx).snapshot(cx); let head; let tail; match mode { SelectMode::Character => { head = position.to_point(&display_map); - tail = selection.tail().to_point(buffer); + tail = selection.tail().to_point(&buffer); } SelectMode::Word(original_range) => { let original_display_range = original_range.start.to_display_point(&display_map) @@ -976,7 +983,7 @@ impl Editor { fn end_selection(&mut self, cx: &mut ViewContext) { self.columnar_selection_tail.take(); if self.pending_selection.is_some() { - let selections = self.selections::(cx).collect::>(); + let selections = self.selections::(cx); self.update_selections(selections, None, cx); } } @@ -1029,19 +1036,20 @@ impl Editor { if self.active_diagnostics.is_some() { self.dismiss_diagnostics(cx); } else if let Some(PendingSelection { selection, .. }) = self.pending_selection.take() { - let buffer = self.buffer.read(cx); + let buffer = self.buffer.read(cx).snapshot(cx); let selection = Selection { id: selection.id, - start: selection.start.to_point(buffer), - end: selection.end.to_point(buffer), + start: selection.start.to_point(&buffer), + end: selection.end.to_point(&buffer), reversed: selection.reversed, goal: selection.goal, }; - if self.selections::(cx).next().is_none() { + if self.selections::(cx).is_empty() { self.update_selections(vec![selection], Some(Autoscroll::Fit), cx); } } else { - let mut oldest_selection = self.oldest_selection::(cx); + let buffer = self.buffer.read(cx).snapshot(cx); + let mut oldest_selection = self.oldest_selection::(&buffer, cx); if self.selection_count(cx) == 1 { oldest_selection.start = oldest_selection.head().clone(); oldest_selection.end = oldest_selection.head().clone(); @@ -1059,12 +1067,12 @@ impl Editor { I: IntoIterator>, T: ToOffset, { - let buffer = self.buffer.read(cx); + let buffer = self.buffer.read(cx).snapshot(cx); let selections = ranges .into_iter() .map(|range| { - let mut start = range.start.to_offset(buffer); - let mut end = range.end.to_offset(buffer); + let mut start = range.start.to_offset(&buffer); + let mut end = range.end.to_offset(&buffer); let reversed = if start > end { mem::swap(&mut start, &mut end); true @@ -1131,15 +1139,15 @@ impl Editor { self.start_transaction(cx); let mut old_selections = SmallVec::<[_; 32]>::new(); { - let selections = self.selections::(cx).collect::>(); - let buffer = self.buffer.read(cx); + let selections = self.selections::(cx); + let buffer = self.buffer.read(cx).snapshot(cx); for selection in selections.iter() { let start_point = selection.start; let indent = buffer .indent_column_for_line(start_point.row) .min(start_point.column); - let start = selection.start.to_offset(buffer); - let end = selection.end.to_offset(buffer); + let start = selection.start.to_offset(&buffer); + let end = selection.end.to_offset(&buffer); let mut insert_extra_newline = false; if let Some(language) = buffer.language() { @@ -1253,7 +1261,7 @@ impl Editor { fn insert(&mut self, text: &str, cx: &mut ViewContext) { self.start_transaction(cx); - let old_selections = self.selections::(cx).collect::>(); + let old_selections = self.selections::(cx); let mut new_selections = Vec::new(); self.buffer.update(cx, |buffer, cx| { let edit_ranges = old_selections.iter().map(|s| s.start..s.end); @@ -1284,19 +1292,20 @@ impl Editor { } fn autoclose_pairs(&mut self, cx: &mut ViewContext) { - let selections = self.selections::(cx).collect::>(); + let selections = self.selections::(cx); let new_autoclose_pair_state = self.buffer.update(cx, |buffer, cx| { - let autoclose_pair = buffer.language().and_then(|language| { + let buffer_snapshot = buffer.snapshot(cx); + let autoclose_pair = buffer_snapshot.language().and_then(|language| { let first_selection_start = selections.first().unwrap().start; let pair = language.brackets().iter().find(|pair| { - buffer.contains_str_at( + buffer_snapshot.contains_str_at( first_selection_start.saturating_sub(pair.start.len()), &pair.start, ) }); pair.and_then(|pair| { let should_autoclose = selections[1..].iter().all(|selection| { - buffer.contains_str_at( + buffer_snapshot.contains_str_at( selection.start.saturating_sub(pair.start.len()), &pair.start, ) @@ -1314,7 +1323,7 @@ impl Editor { let selection_ranges = selections .iter() .map(|selection| { - let start = selection.start.to_offset(&*buffer); + let start = selection.start.to_offset(&buffer_snapshot); start..start }) .collect::>(); @@ -1344,7 +1353,7 @@ impl Editor { } fn skip_autoclose_end(&mut self, text: &str, cx: &mut ViewContext) -> bool { - let old_selections = self.selections::(cx).collect::>(); + let old_selections = self.selections::(cx); let autoclose_pair_state = if let Some(autoclose_pair_state) = self.autoclose_stack.last() { autoclose_pair_state } else { @@ -1356,12 +1365,12 @@ impl Editor { debug_assert_eq!(old_selections.len(), autoclose_pair_state.ranges.len()); - let buffer = self.buffer.read(cx); + let buffer = self.buffer.read(cx).snapshot(cx); if old_selections .iter() - .zip(autoclose_pair_state.ranges.ranges::(buffer)) + .zip(autoclose_pair_state.ranges.ranges::(&buffer)) .all(|(selection, autoclose_range)| { - let autoclose_range_end = autoclose_range.end.to_offset(buffer); + let autoclose_range_end = autoclose_range.end.to_offset(&buffer); selection.is_empty() && selection.start == autoclose_range_end }) { @@ -1395,7 +1404,7 @@ impl Editor { pub fn backspace(&mut self, _: &Backspace, cx: &mut ViewContext) { self.start_transaction(cx); - let mut selections = self.selections::(cx).collect::>(); + let mut selections = self.selections::(cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); for selection in &mut selections { if selection.is_empty() { @@ -1415,7 +1424,7 @@ impl Editor { pub fn delete(&mut self, _: &Delete, cx: &mut ViewContext) { self.start_transaction(cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx).collect::>(); + let mut selections = self.selections::(cx); for selection in &mut selections { if selection.is_empty() { let head = selection.head().to_display_point(&display_map); @@ -1434,13 +1443,15 @@ impl Editor { pub fn tab(&mut self, _: &Tab, cx: &mut ViewContext) { self.start_transaction(cx); let tab_size = self.build_settings.borrow()(cx).tab_size; - let mut selections = self.selections::(cx).collect::>(); + let mut selections = self.selections::(cx); let mut last_indent = None; self.buffer.update(cx, |buffer, cx| { for selection in &mut selections { if selection.is_empty() { let char_column = buffer - .chars_for_range(Point::new(selection.start.row, 0)..selection.start) + .as_snapshot() + .text_for_range(Point::new(selection.start.row, 0)..selection.start) + .flat_map(str::chars) .count(); let chars_to_next_tab_stop = tab_size - (char_column % tab_size); buffer.edit( @@ -1504,7 +1515,7 @@ impl Editor { pub fn outdent(&mut self, _: &Outdent, cx: &mut ViewContext) { self.start_transaction(cx); let tab_size = self.build_settings.borrow()(cx).tab_size; - let selections = self.selections::(cx).collect::>(); + let selections = self.selections::(cx); let mut deletion_ranges = Vec::new(); let mut last_outdent = None; self.buffer.update(cx, |buffer, cx| { @@ -1541,20 +1552,16 @@ impl Editor { buffer.edit(deletion_ranges, "", cx); }); - self.update_selections( - self.selections::(cx).collect(), - Some(Autoscroll::Fit), - cx, - ); + self.update_selections(self.selections::(cx), Some(Autoscroll::Fit), cx); self.end_transaction(cx); } pub fn delete_line(&mut self, _: &DeleteLine, cx: &mut ViewContext) { self.start_transaction(cx); - let selections = self.selections::(cx).collect::>(); + let selections = self.selections::(cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let buffer = self.buffer.read(cx); + let buffer = self.buffer.read(cx).snapshot(cx); let mut row_delta = 0; let mut new_cursors = Vec::new(); @@ -1575,13 +1582,13 @@ impl Editor { } } - let mut edit_start = Point::new(rows.start, 0).to_offset(buffer); + let mut edit_start = Point::new(rows.start, 0).to_offset(&buffer); let edit_end; let cursor_buffer_row; if buffer.max_point().row >= rows.end { // If there's a line after the range, delete the \n from the end of the row range // and position the cursor on the next line. - edit_end = Point::new(rows.end, 0).to_offset(buffer); + edit_end = Point::new(rows.end, 0).to_offset(&buffer); cursor_buffer_row = rows.start; } else { // If there isn't a line after the range, delete the \n from the line before the @@ -1621,7 +1628,7 @@ impl Editor { pub fn duplicate_line(&mut self, _: &DuplicateLine, cx: &mut ViewContext) { self.start_transaction(cx); - let mut selections = self.selections::(cx).collect::>(); + let mut selections = self.selections::(cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let buffer = self.buffer.read(cx); @@ -1679,9 +1686,9 @@ impl Editor { pub fn move_line_up(&mut self, _: &MoveLineUp, cx: &mut ViewContext) { self.start_transaction(cx); - let selections = self.selections::(cx).collect::>(); + let selections = self.selections::(cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let buffer = self.buffer.read(cx); + let buffer = self.buffer.read(cx).snapshot(cx); let mut edits = Vec::new(); let mut new_selection_ranges = Vec::new(); @@ -1692,7 +1699,7 @@ impl Editor { let mut contiguous_selections = Vec::new(); while let Some(selection) = selections.next() { // Accumulate contiguous regions of rows that we want to move. - contiguous_selections.push(selection.point_range(buffer)); + contiguous_selections.push(selection.point_range(&buffer)); let SpannedRows { mut buffer_rows, mut display_rows, @@ -1706,7 +1713,7 @@ impl Editor { if next_buffer_rows.start <= buffer_rows.end { buffer_rows.end = next_buffer_rows.end; display_rows.end = next_display_rows.end; - contiguous_selections.push(next_selection.point_range(buffer)); + contiguous_selections.push(next_selection.point_range(&buffer)); selections.next().unwrap(); } else { break; @@ -1715,13 +1722,13 @@ impl Editor { // Cut the text from the selected rows and paste it at the start of the previous line. if display_rows.start != 0 { - let start = Point::new(buffer_rows.start, 0).to_offset(buffer); + let start = Point::new(buffer_rows.start, 0).to_offset(&buffer); let end = Point::new(buffer_rows.end - 1, buffer.line_len(buffer_rows.end - 1)) - .to_offset(buffer); + .to_offset(&buffer); let prev_row_display_start = DisplayPoint::new(display_rows.start - 1, 0); let prev_row_buffer_start = display_map.prev_row_boundary(prev_row_display_start).1; - let prev_row_buffer_start_offset = prev_row_buffer_start.to_offset(buffer); + let prev_row_buffer_start_offset = prev_row_buffer_start.to_offset(&buffer); let mut text = String::new(); text.extend(buffer.text_for_range(start..end)); @@ -1743,8 +1750,8 @@ impl Editor { // Move folds up. old_folds.push(start..end); for fold in display_map.folds_in_range(start..end) { - let mut start = fold.start.to_point(buffer); - let mut end = fold.end.to_point(buffer); + let mut start = fold.start.to_point(&buffer); + let mut end = fold.end.to_point(&buffer); start.row -= row_delta; end.row -= row_delta; new_folds.push(start..end); @@ -1769,9 +1776,9 @@ impl Editor { pub fn move_line_down(&mut self, _: &MoveLineDown, cx: &mut ViewContext) { self.start_transaction(cx); - let selections = self.selections::(cx).collect::>(); + let selections = self.selections::(cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let buffer = self.buffer.read(cx); + let buffer = self.buffer.read(cx).snapshot(cx); let mut edits = Vec::new(); let mut new_selection_ranges = Vec::new(); @@ -1782,7 +1789,7 @@ impl Editor { let mut contiguous_selections = Vec::new(); while let Some(selection) = selections.next() { // Accumulate contiguous regions of rows that we want to move. - contiguous_selections.push(selection.point_range(buffer)); + contiguous_selections.push(selection.point_range(&buffer)); let SpannedRows { mut buffer_rows, mut display_rows, @@ -1795,7 +1802,7 @@ impl Editor { if next_buffer_rows.start <= buffer_rows.end { buffer_rows.end = next_buffer_rows.end; display_rows.end = next_display_rows.end; - contiguous_selections.push(next_selection.point_range(buffer)); + contiguous_selections.push(next_selection.point_range(&buffer)); selections.next().unwrap(); } else { break; @@ -1804,14 +1811,14 @@ impl Editor { // Cut the text from the selected rows and paste it at the end of the next line. if display_rows.end <= display_map.max_point().row() { - let start = Point::new(buffer_rows.start, 0).to_offset(buffer); + let start = Point::new(buffer_rows.start, 0).to_offset(&buffer); let end = Point::new(buffer_rows.end - 1, buffer.line_len(buffer_rows.end - 1)) - .to_offset(buffer); + .to_offset(&buffer); let next_row_display_end = DisplayPoint::new(display_rows.end, display_map.line_len(display_rows.end)); let next_row_buffer_end = display_map.next_row_boundary(next_row_display_end).1; - let next_row_buffer_end_offset = next_row_buffer_end.to_offset(buffer); + let next_row_buffer_end_offset = next_row_buffer_end.to_offset(&buffer); let mut text = String::new(); text.push('\n'); @@ -1830,8 +1837,8 @@ impl Editor { // Move folds down. old_folds.push(start..end); for fold in display_map.folds_in_range(start..end) { - let mut start = fold.start.to_point(buffer); - let mut end = fold.end.to_point(buffer); + let mut start = fold.start.to_point(&buffer); + let mut end = fold.end.to_point(&buffer); start.row += row_delta; end.row += row_delta; new_folds.push(start..end); @@ -1856,7 +1863,7 @@ impl Editor { pub fn cut(&mut self, _: &Cut, cx: &mut ViewContext) { self.start_transaction(cx); let mut text = String::new(); - let mut selections = self.selections::(cx).collect::>(); + let mut selections = self.selections::(cx); let mut clipboard_selections = Vec::with_capacity(selections.len()); { let buffer = self.buffer.read(cx); @@ -1887,7 +1894,7 @@ impl Editor { } pub fn copy(&mut self, _: &Copy, cx: &mut ViewContext) { - let selections = self.selections::(cx).collect::>(); + let selections = self.selections::(cx); let buffer = self.buffer.read(cx); let max_point = buffer.max_point(); let mut text = String::new(); @@ -1919,7 +1926,7 @@ impl Editor { if let Some(item) = cx.as_mut().read_from_clipboard() { let clipboard_text = item.text(); if let Some(mut clipboard_selections) = item.metadata::>() { - let mut selections = self.selections::(cx).collect::>(); + let mut selections = self.selections::(cx); let all_selections_were_entire_line = clipboard_selections.iter().all(|s| s.is_entire_line); if clipboard_selections.len() != selections.len() { @@ -1950,7 +1957,8 @@ impl Editor { // selection was copied. If this selection is also currently empty, // then paste the line before the current line of the buffer. let range = if selection.is_empty() && entire_line { - let column = selection.start.to_point(&*buffer).column as usize; + let column = + selection.start.to_point(&*buffer.as_snapshot()).column as usize; let line_start = selection.start - column; line_start..line_start } else { @@ -1982,7 +1990,7 @@ impl Editor { pub fn move_left(&mut self, _: &MoveLeft, cx: &mut ViewContext) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx).collect::>(); + let mut selections = self.selections::(cx); for selection in &mut selections { let start = selection.start.to_display_point(&display_map); let end = selection.end.to_display_point(&display_map); @@ -2004,7 +2012,7 @@ impl Editor { pub fn select_left(&mut self, _: &SelectLeft, cx: &mut ViewContext) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx).collect::>(); + let mut selections = self.selections::(cx); for selection in &mut selections { let head = selection.head().to_display_point(&display_map); let cursor = movement::left(&display_map, head) @@ -2018,7 +2026,7 @@ impl Editor { pub fn move_right(&mut self, _: &MoveRight, cx: &mut ViewContext) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx).collect::>(); + let mut selections = self.selections::(cx); for selection in &mut selections { let start = selection.start.to_display_point(&display_map); let end = selection.end.to_display_point(&display_map); @@ -2040,7 +2048,7 @@ impl Editor { pub fn select_right(&mut self, _: &SelectRight, cx: &mut ViewContext) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx).collect::>(); + let mut selections = self.selections::(cx); for selection in &mut selections { let head = selection.head().to_display_point(&display_map); let cursor = movement::right(&display_map, head) @@ -2059,7 +2067,7 @@ impl Editor { } let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx).collect::>(); + let mut selections = self.selections::(cx); for selection in &mut selections { let start = selection.start.to_display_point(&display_map); let end = selection.end.to_display_point(&display_map); @@ -2079,7 +2087,7 @@ impl Editor { pub fn select_up(&mut self, _: &SelectUp, cx: &mut ViewContext) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx).collect::>(); + let mut selections = self.selections::(cx); for selection in &mut selections { let head = selection.head().to_display_point(&display_map); let (head, goal) = movement::up(&display_map, head, selection.goal).unwrap(); @@ -2097,7 +2105,7 @@ impl Editor { } let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx).collect::>(); + let mut selections = self.selections::(cx); for selection in &mut selections { let start = selection.start.to_display_point(&display_map); let end = selection.end.to_display_point(&display_map); @@ -2117,7 +2125,7 @@ impl Editor { pub fn select_down(&mut self, _: &SelectDown, cx: &mut ViewContext) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx).collect::>(); + let mut selections = self.selections::(cx); for selection in &mut selections { let head = selection.head().to_display_point(&display_map); let (head, goal) = movement::down(&display_map, head, selection.goal).unwrap(); @@ -2134,7 +2142,7 @@ impl Editor { cx: &mut ViewContext, ) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx).collect::>(); + let mut selections = self.selections::(cx); for selection in &mut selections { let head = selection.head().to_display_point(&display_map); let cursor = movement::prev_word_boundary(&display_map, head).to_point(&display_map); @@ -2152,7 +2160,7 @@ impl Editor { cx: &mut ViewContext, ) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx).collect::>(); + let mut selections = self.selections::(cx); for selection in &mut selections { let head = selection.head().to_display_point(&display_map); let cursor = movement::prev_word_boundary(&display_map, head).to_point(&display_map); @@ -2169,7 +2177,7 @@ impl Editor { ) { self.start_transaction(cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx).collect::>(); + let mut selections = self.selections::(cx); for selection in &mut selections { if selection.is_empty() { let head = selection.head().to_display_point(&display_map); @@ -2190,7 +2198,7 @@ impl Editor { cx: &mut ViewContext, ) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx).collect::>(); + let mut selections = self.selections::(cx); for selection in &mut selections { let head = selection.head().to_display_point(&display_map); let cursor = movement::next_word_boundary(&display_map, head).to_point(&display_map); @@ -2208,7 +2216,7 @@ impl Editor { cx: &mut ViewContext, ) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx).collect::>(); + let mut selections = self.selections::(cx); for selection in &mut selections { let head = selection.head().to_display_point(&display_map); let cursor = movement::next_word_boundary(&display_map, head).to_point(&display_map); @@ -2225,7 +2233,7 @@ impl Editor { ) { self.start_transaction(cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx).collect::>(); + let mut selections = self.selections::(cx); for selection in &mut selections { if selection.is_empty() { let head = selection.head().to_display_point(&display_map); @@ -2246,7 +2254,7 @@ impl Editor { cx: &mut ViewContext, ) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx).collect::>(); + let mut selections = self.selections::(cx); for selection in &mut selections { let head = selection.head().to_display_point(&display_map); let new_head = movement::line_beginning(&display_map, head, true); @@ -2265,7 +2273,7 @@ impl Editor { cx: &mut ViewContext, ) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx).collect::>(); + let mut selections = self.selections::(cx); for selection in &mut selections { let head = selection.head().to_display_point(&display_map); let new_head = movement::line_beginning(&display_map, head, *toggle_indent); @@ -2288,7 +2296,7 @@ impl Editor { pub fn move_to_end_of_line(&mut self, _: &MoveToEndOfLine, cx: &mut ViewContext) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx).collect::>(); + let mut selections = self.selections::(cx); { for selection in &mut selections { let head = selection.head().to_display_point(&display_map); @@ -2305,7 +2313,7 @@ impl Editor { pub fn select_to_end_of_line(&mut self, _: &SelectToEndOfLine, cx: &mut ViewContext) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx).collect::>(); + let mut selections = self.selections::(cx); for selection in &mut selections { let head = selection.head().to_display_point(&display_map); let new_head = movement::line_end(&display_map, head); @@ -2378,7 +2386,7 @@ impl Editor { pub fn select_line(&mut self, _: &SelectLine, cx: &mut ViewContext) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx).collect::>(); + let mut selections = self.selections::(cx); let buffer = self.buffer.read(cx); let max_point = buffer.max_point(); for selection in &mut selections { @@ -2395,7 +2403,7 @@ impl Editor { _: &SplitSelectionIntoLines, cx: &mut ViewContext, ) { - let selections = self.selections::(cx).collect::>(); + let selections = self.selections::(cx); let buffer = self.buffer.read(cx); let mut to_unfold = Vec::new(); @@ -2434,7 +2442,7 @@ impl Editor { fn add_selection(&mut self, above: bool, cx: &mut ViewContext) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx).collect::>(); + let mut selections = self.selections::(cx); let mut state = self.add_selections_state.take().unwrap_or_else(|| { let oldest_selection = selections.iter().min_by_key(|s| s.id).unwrap().clone(); let range = oldest_selection.display_range(&display_map).sorted(); @@ -2529,7 +2537,7 @@ impl Editor { let replace_newest = action.0; let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let buffer = &display_map.buffer_snapshot; - let mut selections = self.selections::(cx).collect::>(); + let mut selections = self.selections::(cx); if let Some(mut select_next_state) = self.select_next_state.take() { let query = &select_next_state.query; if !select_next_state.done { @@ -2636,11 +2644,12 @@ impl Editor { let comment_prefix_whitespace = &full_comment_prefix[comment_prefix.len()..]; self.start_transaction(cx); - let mut selections = self.selections::(cx).collect::>(); + let mut selections = self.selections::(cx); let mut all_selection_lines_are_comments = true; let mut edit_ranges = Vec::new(); let mut last_toggled_row = None; self.buffer.update(cx, |buffer, cx| { + let buffer_snapshot = buffer.snapshot(cx); for selection in &mut selections { edit_ranges.clear(); @@ -2660,12 +2669,12 @@ impl Editor { last_toggled_row = Some(row); } - if buffer.is_line_blank(row) { + if buffer_snapshot.is_line_blank(row) { continue; } - let start = Point::new(row, buffer.indent_column_for_line(row)); - let mut line_bytes = buffer + let start = Point::new(row, buffer_snapshot.indent_column_for_line(row)); + let mut line_bytes = buffer_snapshot .bytes_in_range(start..buffer.max_point()) .flatten() .copied(); @@ -2712,11 +2721,7 @@ impl Editor { } }); - self.update_selections( - self.selections::(cx).collect(), - Some(Autoscroll::Fit), - cx, - ); + self.update_selections(self.selections::(cx), Some(Autoscroll::Fit), cx); self.end_transaction(cx); } @@ -2725,9 +2730,9 @@ impl Editor { _: &SelectLargerSyntaxNode, cx: &mut ViewContext, ) { - let old_selections = self.selections::(cx).collect::>(); + let old_selections = self.selections::(cx).into_boxed_slice(); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let buffer = self.buffer.read(cx); + let buffer = self.buffer.read(cx).snapshot(cx); let mut stack = mem::take(&mut self.select_larger_syntax_node_stack); let mut selected_larger_node = false; @@ -2783,8 +2788,8 @@ impl Editor { _: &MoveToEnclosingBracket, cx: &mut ViewContext, ) { - let mut selections = self.selections::(cx).collect::>(); - let buffer = self.buffer.read(cx.as_ref()); + let mut selections = self.selections::(cx); + let buffer = self.buffer.read(cx).snapshot(cx); for selection in &mut selections { if let Some((open_range, close_range)) = buffer.enclosing_bracket_ranges(selection.start..selection.end) @@ -2806,12 +2811,12 @@ impl Editor { } pub fn show_next_diagnostic(&mut self, _: &ShowNextDiagnostic, cx: &mut ViewContext) { + let buffer = self.buffer.read(cx).snapshot(cx); let selection = self.newest_selection::(cx); - let buffer = self.buffer.read(cx.as_ref()); let active_primary_range = self.active_diagnostics.as_ref().map(|active_diagnostics| { active_diagnostics .primary_range - .to_offset(buffer) + .to_offset(&buffer) .to_inclusive() }); let mut search_start = if let Some(active_primary_range) = active_primary_range.as_ref() { @@ -2863,8 +2868,8 @@ impl Editor { fn refresh_active_diagnostics(&mut self, cx: &mut ViewContext) { if let Some(active_diagnostics) = self.active_diagnostics.as_mut() { - let buffer = self.buffer.read(cx); - let primary_range_start = active_diagnostics.primary_range.start.to_offset(buffer); + let buffer = self.buffer.read(cx).snapshot(cx); + let primary_range_start = active_diagnostics.primary_range.start.to_offset(&buffer); let is_valid = buffer .diagnostics_in_range::<_, usize>(active_diagnostics.primary_range.clone()) .any(|(range, diagnostic)| { @@ -2895,7 +2900,7 @@ impl Editor { fn activate_diagnostics(&mut self, group_id: usize, cx: &mut ViewContext) { self.dismiss_diagnostics(cx); self.active_diagnostics = self.display_map.update(cx, |display_map, cx| { - let buffer = self.buffer.read(cx); + let buffer = self.buffer.read(cx).snapshot(cx); let mut primary_range = None; let mut primary_message = None; @@ -3010,7 +3015,7 @@ impl Editor { set_id: SelectionSetId, range: Range, cx: &'a mut MutableAppContext, - ) -> impl 'a + Iterator> { + ) -> Vec> { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let buffer = self.buffer.read(cx); @@ -3036,14 +3041,10 @@ impl Editor { let range = (range.start.to_offset(&display_map, Bias::Left), Bias::Left) ..(range.end.to_offset(&display_map, Bias::Left), Bias::Right); - let selections = self - .buffer - .read(cx) + buffer .selection_set(set_id) .unwrap() - .intersecting_selections::(range, buffer); - - selections + .intersecting_selections::(range, &buffer.as_snapshot()) .map(move |s| Selection { id: s.id, start: s.start.to_display_point(&display_map), @@ -3052,15 +3053,17 @@ impl Editor { goal: s.goal, }) .chain(pending_selection) + .collect() } - pub fn selections<'a, D>(&self, cx: &'a AppContext) -> impl 'a + Iterator> + pub fn selections<'a, D>(&self, cx: &'a AppContext) -> Vec> where - D: 'a + TextDimension + Ord, + D: 'a + TextDimension + Ord + Sub, { - let buffer = self.buffer.read(cx); - let mut selections = self.selection_set(cx).selections::(buffer).peekable(); + let buffer = self.buffer.read(cx).snapshot(cx); + let mut selections = self.selection_set(cx).selections::(&buffer).peekable(); let mut pending_selection = self.pending_selection(cx); + iter::from_fn(move || { if let Some(pending) = pending_selection.as_mut() { while let Some(next_selection) = selections.peek() { @@ -3084,14 +3087,18 @@ impl Editor { selections.next() } }) + .collect() } - fn pending_selection(&self, cx: &AppContext) -> Option> { - let buffer = self.buffer.read(cx); + fn pending_selection>( + &self, + cx: &AppContext, + ) -> Option> { + let buffer = self.buffer.read(cx).as_snapshot(); self.pending_selection.as_ref().map(|pending| Selection { id: pending.selection.id, - start: pending.selection.start.summary::(buffer), - end: pending.selection.end.summary::(buffer), + start: pending.selection.start.summary::(&buffer), + end: pending.selection.end.summary::(&buffer), reversed: pending.selection.reversed, goal: pending.selection.goal, }) @@ -3105,18 +3112,26 @@ impl Editor { selection_count } - pub fn oldest_selection(&self, cx: &AppContext) -> Selection { - let buffer = self.buffer.read(cx); + pub fn oldest_selection>( + &self, + snapshot: &MultiBufferSnapshot, + cx: &AppContext, + ) -> Selection { self.selection_set(cx) - .oldest_selection(buffer) + .oldest_selection(snapshot) .or_else(|| self.pending_selection(cx)) .unwrap() } - pub fn newest_selection(&self, cx: &AppContext) -> Selection { - let buffer = self.buffer.read(cx); + pub fn newest_selection>( + &self, + cx: &AppContext, + ) -> Selection { self.pending_selection(cx) - .or_else(|| self.selection_set(cx).newest_selection(buffer)) + .or_else(|| { + self.selection_set(cx) + .newest_selection(&self.buffer.read(cx).as_snapshot()) + }) .unwrap() } @@ -3136,7 +3151,7 @@ impl Editor { T: ToOffset + ToPoint + Ord + std::marker::Copy + std::fmt::Debug, { // Merge overlapping selections. - let buffer = self.buffer.read(cx); + let buffer = self.buffer.read(cx).snapshot(cx); let mut i = 1; while i < selections.len() { if selections[i - 1].end >= selections[i].start { @@ -3161,9 +3176,9 @@ impl Editor { if selections.len() == autoclose_pair_state.ranges.len() { selections .iter() - .zip(autoclose_pair_state.ranges.ranges::(buffer)) + .zip(autoclose_pair_state.ranges.ranges::(&buffer)) .all(|(selection, autoclose_range)| { - let head = selection.head().to_point(&*buffer); + let head = selection.head().to_point(&buffer); autoclose_range.start <= head && autoclose_range.end >= head }) } else { @@ -3222,7 +3237,7 @@ impl Editor { pub fn fold(&mut self, _: &Fold, cx: &mut ViewContext) { let mut fold_ranges = Vec::new(); - let selections = self.selections::(cx).collect::>(); + let selections = self.selections::(cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); for selection in selections { let range = selection.display_range(&display_map).sorted(); @@ -3245,7 +3260,7 @@ impl Editor { } pub fn unfold(&mut self, _: &Unfold, cx: &mut ViewContext) { - let selections = self.selections::(cx).collect::>(); + let selections = self.selections::(cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let buffer = self.buffer.read(cx); let ranges = selections @@ -3306,12 +3321,17 @@ impl Editor { pub fn fold_selected_ranges(&mut self, _: &FoldSelectedRanges, cx: &mut ViewContext) { let selections = self.selections::(cx); - let ranges = selections.map(|s| s.start..s.end).collect(); + let ranges = selections.into_iter().map(|s| s.start..s.end); self.fold_ranges(ranges, cx); } - fn fold_ranges(&mut self, ranges: Vec>, cx: &mut ViewContext) { - if !ranges.is_empty() { + fn fold_ranges( + &mut self, + ranges: impl IntoIterator>, + cx: &mut ViewContext, + ) { + let mut ranges = ranges.into_iter().peekable(); + if ranges.peek().is_some() { self.display_map.update(cx, |map, cx| map.fold(ranges, cx)); self.request_autoscroll(Autoscroll::Fit, cx); cx.notify(); @@ -3414,14 +3434,14 @@ impl Editor { self.show_local_cursors } - fn on_buffer_changed(&mut self, _: ModelHandle, cx: &mut ViewContext) { + fn on_buffer_changed(&mut self, _: ModelHandle, cx: &mut ViewContext) { self.refresh_active_diagnostics(cx); cx.notify(); } fn on_buffer_event( &mut self, - _: ModelHandle, + _: ModelHandle, event: &language::Event, cx: &mut ViewContext, ) { @@ -3596,10 +3616,36 @@ impl View for Editor { } } -impl SelectionExt for Selection { +impl SelectionExt for Selection { + fn point_range(&self, buffer: &MultiBufferSnapshot) -> Range { + let start = self.start.to_point(buffer); + let end = self.end.to_point(buffer); + if self.reversed { + end..start + } else { + start..end + } + } + + fn offset_range(&self, buffer: &MultiBufferSnapshot) -> Range { + let start = self.start.to_offset(buffer); + let end = self.end.to_offset(buffer); + if self.reversed { + end..start + } else { + start..end + } + } + fn display_range(&self, map: &DisplaySnapshot) -> Range { - let start = self.start.to_display_point(map); - let end = self.end.to_display_point(map); + let start = self + .start + .to_point(&map.buffer_snapshot) + .to_display_point(map); + let end = self + .end + .to_point(&map.buffer_snapshot) + .to_display_point(map); if self.reversed { end..start } else { @@ -3612,8 +3658,14 @@ impl SelectionExt for Selection { include_end_if_at_line_start: bool, map: &DisplaySnapshot, ) -> SpannedRows { - let display_start = self.start.to_display_point(map); - let mut display_end = self.end.to_display_point(map); + let display_start = self + .start + .to_point(&map.buffer_snapshot) + .to_display_point(map); + let mut display_end = self + .end + .to_point(&map.buffer_snapshot) + .to_display_point(map); if !include_end_if_at_line_start && display_end.row() != map.max_point().row() && display_start.row() != display_end.row() @@ -3667,13 +3719,14 @@ pub fn diagnostic_style( #[cfg(test)] mod tests { use super::*; + use language::LanguageConfig; use text::Point; use unindent::Unindent; use util::test::sample_text; #[gpui::test] fn test_selection_with_mouse(cx: &mut gpui::MutableAppContext) { - let buffer = cx.add_model(|cx| Buffer::new(0, "aaaaaa\nbbbbbb\ncccccc\ndddddd\n", cx)); + let buffer = MultiBuffer::build_simple("aaaaaa\nbbbbbb\ncccccc\ndddddd\n", cx); let settings = EditorSettings::test(cx); let (_, editor) = cx.add_window(Default::default(), |cx| build_editor(buffer, settings, cx)); @@ -3740,7 +3793,7 @@ mod tests { #[gpui::test] fn test_canceling_pending_selection(cx: &mut gpui::MutableAppContext) { - let buffer = cx.add_model(|cx| Buffer::new(0, "aaaaaa\nbbbbbb\ncccccc\ndddddd\n", cx)); + let buffer = MultiBuffer::build_simple("aaaaaa\nbbbbbb\ncccccc\ndddddd\n", cx); let settings = EditorSettings::test(cx); let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, settings, cx)); @@ -3772,7 +3825,7 @@ mod tests { #[gpui::test] fn test_cancel(cx: &mut gpui::MutableAppContext) { - let buffer = cx.add_model(|cx| Buffer::new(0, "aaaaaa\nbbbbbb\ncccccc\ndddddd\n", cx)); + let buffer = MultiBuffer::build_simple("aaaaaa\nbbbbbb\ncccccc\ndddddd\n", cx); let settings = EditorSettings::test(cx); let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, settings, cx)); @@ -3812,30 +3865,27 @@ mod tests { #[gpui::test] fn test_fold(cx: &mut gpui::MutableAppContext) { - let buffer = cx.add_model(|cx| { - Buffer::new( - 0, - " - impl Foo { - // Hello! + let buffer = MultiBuffer::build_simple( + &" + impl Foo { + // Hello! - fn a() { - 1 - } + fn a() { + 1 + } - fn b() { - 2 - } + fn b() { + 2 + } - fn c() { - 3 - } + fn c() { + 3 } - " - .unindent(), - cx, - ) - }); + } + " + .unindent(), + cx, + ); let settings = EditorSettings::test(&cx); let (_, view) = cx.add_window(Default::default(), |cx| { build_editor(buffer.clone(), settings, cx) @@ -3903,7 +3953,7 @@ mod tests { #[gpui::test] fn test_move_cursor(cx: &mut gpui::MutableAppContext) { - let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'a'), cx)); + let buffer = MultiBuffer::build_simple(&sample_text(6, 6, 'a'), cx); let settings = EditorSettings::test(&cx); let (_, view) = cx.add_window(Default::default(), |cx| { build_editor(buffer.clone(), settings, cx) @@ -3980,7 +4030,7 @@ mod tests { #[gpui::test] fn test_move_cursor_multibyte(cx: &mut gpui::MutableAppContext) { - let buffer = cx.add_model(|cx| Buffer::new(0, "ⓐⓑⓒⓓⓔ\nabcde\nαβγδε\n", cx)); + let buffer = MultiBuffer::build_simple("ⓐⓑⓒⓓⓔ\nabcde\nαβγδε\n", cx); let settings = EditorSettings::test(&cx); let (_, view) = cx.add_window(Default::default(), |cx| { build_editor(buffer.clone(), settings, cx) @@ -4038,7 +4088,7 @@ mod tests { #[gpui::test] fn test_move_cursor_different_line_lengths(cx: &mut gpui::MutableAppContext) { - let buffer = cx.add_model(|cx| Buffer::new(0, "ⓐⓑⓒⓓⓔ\nabcd\nαβγ\nabcd\nⓐⓑⓒⓓⓔ\n", cx)); + let buffer = MultiBuffer::build_simple("ⓐⓑⓒⓓⓔ\nabcd\nαβγ\nabcd\nⓐⓑⓒⓓⓔ\n", cx); let settings = EditorSettings::test(&cx); let (_, view) = cx.add_window(Default::default(), |cx| { build_editor(buffer.clone(), settings, cx) @@ -4069,7 +4119,7 @@ mod tests { #[gpui::test] fn test_beginning_end_of_line(cx: &mut gpui::MutableAppContext) { - let buffer = cx.add_model(|cx| Buffer::new(0, "abc\n def", cx)); + let buffer = MultiBuffer::build_simple("abc\n def", cx); let settings = EditorSettings::test(&cx); let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, settings, cx)); view.update(cx, |view, cx| { @@ -4211,8 +4261,7 @@ mod tests { #[gpui::test] fn test_prev_next_word_boundary(cx: &mut gpui::MutableAppContext) { - let buffer = - cx.add_model(|cx| Buffer::new(0, "use std::str::{foo, bar}\n\n {baz.qux()}", cx)); + let buffer = MultiBuffer::build_simple("use std::str::{foo, bar}\n\n {baz.qux()}", cx); let settings = EditorSettings::test(&cx); let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, settings, cx)); view.update(cx, |view, cx| { @@ -4351,8 +4400,7 @@ mod tests { #[gpui::test] fn test_prev_next_word_bounds_with_soft_wrap(cx: &mut gpui::MutableAppContext) { - let buffer = - cx.add_model(|cx| Buffer::new(0, "use one::{\n two::three::four::five\n};", cx)); + let buffer = MultiBuffer::build_simple("use one::{\n two::three::four::five\n};", cx); let settings = EditorSettings::test(&cx); let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, settings, cx)); @@ -4406,7 +4454,7 @@ mod tests { #[gpui::test] fn test_delete_to_word_boundary(cx: &mut gpui::MutableAppContext) { - let buffer = cx.add_model(|cx| Buffer::new(0, "one two three four", cx)); + let buffer = MultiBuffer::build_simple("one two three four", cx); let settings = EditorSettings::test(&cx); let (_, view) = cx.add_window(Default::default(), |cx| { build_editor(buffer.clone(), settings, cx) @@ -4447,7 +4495,7 @@ mod tests { #[gpui::test] fn test_newline(cx: &mut gpui::MutableAppContext) { - let buffer = cx.add_model(|cx| Buffer::new(0, "aaaa\n bbbb\n", cx)); + let buffer = MultiBuffer::build_simple("aaaa\n bbbb\n", cx); let settings = EditorSettings::test(&cx); let (_, view) = cx.add_window(Default::default(), |cx| { build_editor(buffer.clone(), settings, cx) @@ -4471,7 +4519,7 @@ mod tests { #[gpui::test] fn test_indent_outdent(cx: &mut gpui::MutableAppContext) { - let buffer = cx.add_model(|cx| Buffer::new(0, " one two\nthree\n four", cx)); + let buffer = MultiBuffer::build_simple(" one two\nthree\n four", cx); let settings = EditorSettings::test(&cx); let (_, view) = cx.add_window(Default::default(), |cx| { build_editor(buffer.clone(), settings, cx) @@ -4532,13 +4580,8 @@ mod tests { #[gpui::test] fn test_backspace(cx: &mut gpui::MutableAppContext) { - let buffer = cx.add_model(|cx| { - Buffer::new( - 0, - "one two three\nfour five six\nseven eight nine\nten\n", - cx, - ) - }); + let buffer = + MultiBuffer::build_simple("one two three\nfour five six\nseven eight nine\nten\n", cx); let settings = EditorSettings::test(&cx); let (_, view) = cx.add_window(Default::default(), |cx| { build_editor(buffer.clone(), settings, cx) @@ -4568,13 +4611,8 @@ mod tests { #[gpui::test] fn test_delete(cx: &mut gpui::MutableAppContext) { - let buffer = cx.add_model(|cx| { - Buffer::new( - 0, - "one two three\nfour five six\nseven eight nine\nten\n", - cx, - ) - }); + let buffer = + MultiBuffer::build_simple("one two three\nfour five six\nseven eight nine\nten\n", cx); let settings = EditorSettings::test(&cx); let (_, view) = cx.add_window(Default::default(), |cx| { build_editor(buffer.clone(), settings, cx) @@ -4605,7 +4643,7 @@ mod tests { #[gpui::test] fn test_delete_line(cx: &mut gpui::MutableAppContext) { let settings = EditorSettings::test(&cx); - let buffer = cx.add_model(|cx| Buffer::new(0, "abc\ndef\nghi\n", cx)); + let buffer = MultiBuffer::build_simple("abc\ndef\nghi\n", cx); let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, settings, cx)); view.update(cx, |view, cx| { view.select_display_ranges( @@ -4629,7 +4667,7 @@ mod tests { }); let settings = EditorSettings::test(&cx); - let buffer = cx.add_model(|cx| Buffer::new(0, "abc\ndef\nghi\n", cx)); + let buffer = MultiBuffer::build_simple("abc\ndef\nghi\n", cx); let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, settings, cx)); view.update(cx, |view, cx| { view.select_display_ranges(&[DisplayPoint::new(2, 0)..DisplayPoint::new(0, 1)], cx) @@ -4646,7 +4684,7 @@ mod tests { #[gpui::test] fn test_duplicate_line(cx: &mut gpui::MutableAppContext) { let settings = EditorSettings::test(&cx); - let buffer = cx.add_model(|cx| Buffer::new(0, "abc\ndef\nghi\n", cx)); + let buffer = MultiBuffer::build_simple("abc\ndef\nghi\n", cx); let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, settings, cx)); view.update(cx, |view, cx| { view.select_display_ranges( @@ -4673,7 +4711,7 @@ mod tests { }); let settings = EditorSettings::test(&cx); - let buffer = cx.add_model(|cx| Buffer::new(0, "abc\ndef\nghi\n", cx)); + let buffer = MultiBuffer::build_simple("abc\ndef\nghi\n", cx); let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, settings, cx)); view.update(cx, |view, cx| { view.select_display_ranges( @@ -4699,7 +4737,7 @@ mod tests { #[gpui::test] fn test_move_line_up_down(cx: &mut gpui::MutableAppContext) { let settings = EditorSettings::test(&cx); - let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(10, 5, 'a'), cx)); + let buffer = MultiBuffer::build_simple(&sample_text(10, 5, 'a'), cx); let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, settings, cx)); view.update(cx, |view, cx| { view.fold_ranges( @@ -4795,7 +4833,7 @@ mod tests { #[gpui::test] fn test_clipboard(cx: &mut gpui::MutableAppContext) { - let buffer = cx.add_model(|cx| Buffer::new(0, "one✅ two three four five six ", cx)); + let buffer = MultiBuffer::build_simple("one✅ two three four five six ", cx); let settings = EditorSettings::test(&cx); let view = cx .add_window(Default::default(), |cx| { @@ -4930,7 +4968,7 @@ mod tests { #[gpui::test] fn test_select_all(cx: &mut gpui::MutableAppContext) { - let buffer = cx.add_model(|cx| Buffer::new(0, "abc\nde\nfgh", cx)); + let buffer = MultiBuffer::build_simple("abc\nde\nfgh", cx); let settings = EditorSettings::test(&cx); let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, settings, cx)); view.update(cx, |view, cx| { @@ -4945,7 +4983,7 @@ mod tests { #[gpui::test] fn test_select_line(cx: &mut gpui::MutableAppContext) { let settings = EditorSettings::test(&cx); - let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(6, 5, 'a'), cx)); + let buffer = MultiBuffer::build_simple(&sample_text(6, 5, 'a'), cx); let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, settings, cx)); view.update(cx, |view, cx| { view.select_display_ranges( @@ -4991,7 +5029,7 @@ mod tests { #[gpui::test] fn test_split_selection_into_lines(cx: &mut gpui::MutableAppContext) { let settings = EditorSettings::test(&cx); - let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(9, 5, 'a'), cx)); + let buffer = MultiBuffer::build_simple(&sample_text(9, 5, 'a'), cx); let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, settings, cx)); view.update(cx, |view, cx| { view.fold_ranges( @@ -5059,7 +5097,7 @@ mod tests { #[gpui::test] fn test_add_selection_above_below(cx: &mut gpui::MutableAppContext) { let settings = EditorSettings::test(&cx); - let buffer = cx.add_model(|cx| Buffer::new(0, "abc\ndefghi\n\njk\nlmno\n", cx)); + let buffer = MultiBuffer::build_simple("abc\ndefghi\n\njk\nlmno\n", cx); let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, settings, cx)); view.update(cx, |view, cx| { @@ -5247,8 +5285,9 @@ mod tests { .unindent(); let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, None, cx)); + let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); let (_, view) = cx.add_window(|cx| build_editor(buffer, settings, cx)); - view.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing()) + view.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing(cx)) .await; view.update(&mut cx, |view, cx| { @@ -5403,8 +5442,9 @@ mod tests { .unindent(); let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, None, cx)); + let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); let (_, view) = cx.add_window(|cx| build_editor(buffer, settings, cx)); - view.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing()) + view.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing(cx)) .await; view.update(&mut cx, |view, cx| { @@ -5503,6 +5543,7 @@ mod tests { .unindent(); let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, None, cx)); + let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); let (_, view) = cx.add_window(|cx| build_editor(buffer, settings, cx)); view.update(&mut cx, |editor, cx| { @@ -5600,8 +5641,9 @@ mod tests { ); let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, None, cx)); + let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); let (_, view) = cx.add_window(|cx| build_editor(buffer, settings, cx)); - view.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing()) + view.condition(&cx, |view, cx| !view.buffer.read(cx).is_parsing(cx)) .await; view.update(&mut cx, |view, cx| { @@ -5659,7 +5701,7 @@ mod tests { } fn build_editor( - buffer: ModelHandle, + buffer: ModelHandle, settings: EditorSettings, cx: &mut ViewContext, ) -> Editor { diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index a53fdd3a8bb1f8f2d1600dba7b8f5757d88ccf9b..a878bc4a17d538ea3f26a66da10dccd53534a38f 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -19,7 +19,7 @@ use gpui::{ MutableAppContext, PaintContext, Quad, Scene, SizeConstraint, ViewContext, WeakViewHandle, }; use json::json; -use language::{Chunk, ToPoint}; +use language::{multi_buffer::ToPoint, Chunk}; use smallvec::SmallVec; use std::{ cmp::{self, Ordering}, @@ -738,13 +738,11 @@ impl Element for EditorElement { self.update_view(cx.app, |view, cx| { highlighted_row = view.highlighted_row(); for selection_set_id in view.active_selection_sets(cx).collect::>() { - let replica_selections = view - .intersecting_selections( - selection_set_id, - DisplayPoint::new(start_row, 0)..DisplayPoint::new(end_row, 0), - cx, - ) - .collect::>(); + let replica_selections = view.intersecting_selections( + selection_set_id, + DisplayPoint::new(start_row, 0)..DisplayPoint::new(end_row, 0), + cx, + ); for selection in &replica_selections { if selection_set_id == view.selection_set_id { let is_empty = selection.start == selection.end; @@ -1165,14 +1163,13 @@ fn scale_horizontal_mouse_autoscroll_delta(delta: f32) -> f32 { mod tests { use super::*; use crate::{Editor, EditorSettings}; - use language::Buffer; + use language::{MultiBuffer}; use util::test::sample_text; #[gpui::test] fn test_layout_line_numbers(cx: &mut gpui::MutableAppContext) { let settings = EditorSettings::test(cx); - - let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'a'), cx)); + let buffer = MultiBuffer::build_simple(&sample_text(6, 6, 'a'), cx); let (window_id, editor) = cx.add_window(Default::default(), |cx| { Editor::for_buffer( buffer, diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index f4261c30bbf306446a70a97b5b49ec39c5310165..1c7b4a25f5b32fc0e12361545e527f06f7e73047 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -5,12 +5,15 @@ use gpui::{ MutableAppContext, RenderContext, Subscription, Task, View, ViewContext, ViewHandle, WeakModelHandle, }; -use language::{Buffer, Diagnostic, File as _}; +use language::{ + multi_buffer::{MultiBuffer, ToPoint as _}, + Diagnostic, File as _, +}; use postage::watch; use project::{ProjectPath, Worktree}; use std::fmt::Write; use std::path::Path; -use text::{Point, Selection, ToPoint}; +use text::{Point, Selection}; use workspace::{ settings, EntryOpener, ItemHandle, ItemView, ItemViewHandle, Settings, StatusItemView, WeakItemHandle, @@ -19,10 +22,10 @@ use workspace::{ pub struct BufferOpener; #[derive(Clone)] -pub struct BufferItemHandle(pub ModelHandle); +pub struct BufferItemHandle(pub ModelHandle); #[derive(Clone)] -struct WeakBufferItemHandle(WeakModelHandle); +struct WeakBufferItemHandle(WeakModelHandle); impl EntryOpener for BufferOpener { fn open( @@ -32,10 +35,10 @@ impl EntryOpener for BufferOpener { cx: &mut ModelContext, ) -> Option>>> { let buffer = worktree.open_buffer(project_path.path, cx); - let task = cx.spawn(|_, _| async move { - buffer - .await - .map(|buffer| Box::new(BufferItemHandle(buffer)) as Box) + let task = cx.spawn(|_, mut cx| async move { + let buffer = buffer.await?; + let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); + Ok(Box::new(BufferItemHandle(buffer)) as Box) }); Some(task) } @@ -102,7 +105,7 @@ impl ItemHandle for BufferItemHandle { } fn project_path(&self, cx: &AppContext) -> Option { - self.0.read(cx).file().map(|f| ProjectPath { + self.0.read(cx).file(cx).map(|f| ProjectPath { worktree_id: f.worktree_id(), path: f.path().clone(), }) @@ -137,7 +140,7 @@ impl ItemView for Editor { let filename = self .buffer() .read(cx) - .file() + .file(cx) .and_then(|file| file.file_name()); if let Some(name) = filename { name.to_string_lossy().into() @@ -147,7 +150,7 @@ impl ItemView for Editor { } fn project_path(&self, cx: &AppContext) -> Option { - self.buffer().read(cx).file().map(|file| ProjectPath { + self.buffer().read(cx).file(cx).map(|file| ProjectPath { worktree_id: file.worktree_id(), path: file.path().clone(), }) @@ -174,7 +177,14 @@ impl ItemView for Editor { path: &Path, cx: &mut ViewContext, ) -> Task> { - self.buffer().update(cx, |buffer, cx| { + let buffer = self + .buffer() + .read(cx) + .as_singleton() + .expect("cannot call save_as on an excerpt list") + .clone(); + + buffer.update(cx, |buffer, cx| { let handle = cx.handle(); let text = buffer.as_rope().clone(); let version = buffer.version(); @@ -237,7 +247,7 @@ impl CursorPosition { fn update_position(&mut self, editor: ViewHandle, cx: &mut ViewContext) { let editor = editor.read(cx); - let buffer = editor.buffer().read(cx); + let buffer = editor.buffer().read(cx).snapshot(cx); self.selected_count = 0; let mut last_selection: Option> = None; @@ -250,7 +260,7 @@ impl CursorPosition { last_selection = Some(selection); } } - self.position = last_selection.map(|s| s.head().to_point(buffer)); + self.position = last_selection.map(|s| s.head().to_point(&buffer)); cx.notify(); } diff --git a/crates/editor/src/movement.rs b/crates/editor/src/movement.rs index 9ba6cbc08dfd4a91e23e6195bad9cecfe5deb585..44cb1ebcf59fb4bf44a6c60ebe06a44f0d0970c4 100644 --- a/crates/editor/src/movement.rs +++ b/crates/editor/src/movement.rs @@ -1,7 +1,7 @@ use super::{Bias, DisplayPoint, DisplaySnapshot, SelectionGoal, ToDisplayPoint}; use anyhow::Result; +use language::multi_buffer::ToPoint; use std::{cmp, ops::Range}; -use text::ToPoint; pub fn left(map: &DisplaySnapshot, mut point: DisplayPoint) -> Result { if point.column() > 0 { @@ -244,7 +244,8 @@ fn char_kind(c: char) -> CharKind { #[cfg(test)] mod tests { use super::*; - use crate::{display_map::DisplayMap, Buffer}; + use crate::display_map::DisplayMap; + use language::MultiBuffer; #[gpui::test] fn test_prev_next_word_boundary_multibyte(cx: &mut gpui::MutableAppContext) { @@ -256,7 +257,7 @@ mod tests { .unwrap(); let font_size = 14.0; - let buffer = cx.add_model(|cx| Buffer::new(0, "a bcΔ defγ hi—jk", cx)); + let buffer = MultiBuffer::build_simple("a bcΔ defγ hi—jk", cx); let display_map = cx.add_model(|cx| DisplayMap::new(buffer, tab_size, font_id, font_size, None, cx)); let snapshot = display_map.update(cx, |map, cx| map.snapshot(cx)); @@ -312,7 +313,7 @@ mod tests { .select_font(family_id, &Default::default()) .unwrap(); let font_size = 14.0; - let buffer = cx.add_model(|cx| Buffer::new(0, "lorem ipsum dolor\n sit", cx)); + let buffer = MultiBuffer::build_simple("lorem ipsum dolor\n sit", cx); let display_map = cx.add_model(|cx| DisplayMap::new(buffer, tab_size, font_id, font_size, None, cx)); let snapshot = display_map.update(cx, |map, cx| map.snapshot(cx)); diff --git a/crates/go_to_line/src/go_to_line.rs b/crates/go_to_line/src/go_to_line.rs index dbd36b1139402e535a8ee3c3fcf2257b503903bb..b35a6fe0025c403e408cddaaca295d4701c8cb94 100644 --- a/crates/go_to_line/src/go_to_line.rs +++ b/crates/go_to_line/src/go_to_line.rs @@ -67,7 +67,7 @@ impl GoToLine { let (restore_state, cursor_point, max_point) = active_editor.update(cx, |editor, cx| { let restore_state = Some(RestoreState { scroll_position: editor.scroll_position(cx), - selections: editor.selections::(cx).collect(), + selections: editor.selections::(cx), }); ( diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 16dbd1c140e450f1def33665ee3ac9ba0c3af380..3fa611ee0405d02ecf080a9e474bbdd647977bb1 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -1,6 +1,6 @@ mod buffer; mod highlight_map; -mod multi_buffer; +pub mod multi_buffer; pub mod proto; #[cfg(test)] mod tests; @@ -12,6 +12,7 @@ use gpui::{executor::Background, AppContext}; use highlight_map::HighlightMap; use lazy_static::lazy_static; use lsp::LanguageServer; +pub use multi_buffer::MultiBuffer; use parking_lot::Mutex; use serde::Deserialize; use std::{collections::HashSet, path::Path, str, sync::Arc}; diff --git a/crates/language/src/multi_buffer.rs b/crates/language/src/multi_buffer.rs index 6e545e3fd25f84c3a72f4008e388231f403721cd..12834a53d3b9c614dbb5c65e5517904d5abaae89 100644 --- a/crates/language/src/multi_buffer.rs +++ b/crates/language/src/multi_buffer.rs @@ -5,20 +5,25 @@ mod selection; use self::location::*; use crate::{ buffer::{self, Buffer, Chunk, ToOffset as _, ToPoint as _}, - BufferSnapshot, + BufferSnapshot, Diagnostic, File, Language, }; +use anyhow::Result; +use clock::ReplicaId; use collections::HashMap; -use gpui::{AppContext, Entity, ModelContext, ModelHandle}; -use parking_lot::Mutex; -use std::{cmp, ops::Range}; +use gpui::{AppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task}; +use parking_lot::{Mutex, MutexGuard}; +use std::{cmp, io, ops::Range, sync::Arc, time::SystemTime}; use sum_tree::{Bias, Cursor, SumTree}; use text::{ rope::TextDimension, subscription::{Subscription, Topic}, - AnchorRangeExt, Edit, Point, PointUtf16, TextSummary, + AnchorRangeExt as _, Edit, Point, PointUtf16, Selection, SelectionSetId, TextSummary, }; use theme::SyntaxTheme; +pub use anchor::{Anchor, AnchorRangeExt, AnchorRangeMap, AnchorRangeSet}; +pub use selection::SelectionSet; + const NEWLINES: &'static [u8] = &[b'\n'; u8::MAX as usize]; #[derive(Default)] @@ -28,11 +33,11 @@ pub struct MultiBuffer { subscriptions: Topic, } -pub trait ToOffset { +pub trait ToOffset: 'static { fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize; } -pub trait ToPoint { +pub trait ToPoint: 'static { fn to_point<'a>(&self, snapshot: &MultiBufferSnapshot) -> Point; } @@ -69,7 +74,7 @@ struct ExcerptSummary { text: TextSummary, } -pub struct Chunks<'a> { +pub struct MultiBufferChunks<'a> { range: Range, cursor: Cursor<'a, Excerpt, usize>, header_height: u8, @@ -77,20 +82,155 @@ pub struct Chunks<'a> { theme: Option<&'a SyntaxTheme>, } +pub struct MultiBufferBytes<'a> { + chunks: MultiBufferChunks<'a>, +} + impl MultiBuffer { pub fn new() -> Self { Self::default() } + pub fn singleton(buffer: ModelHandle, cx: &mut ModelContext) -> Self { + let mut this = Self::new(); + this.push( + ExcerptProperties { + buffer: &buffer, + range: text::Anchor::min()..text::Anchor::max(), + header_height: 0, + }, + cx, + ); + this + } + + pub fn build_simple(text: &str, cx: &mut MutableAppContext) -> ModelHandle { + let buffer = cx.add_model(|cx| Buffer::new(0, text, cx)); + cx.add_model(|cx| Self::singleton(buffer, cx)) + } + pub fn snapshot(&self, cx: &AppContext) -> MultiBufferSnapshot { self.sync(cx); self.snapshot.lock().clone() } + pub fn as_snapshot(&self) -> MutexGuard { + self.snapshot.lock() + } + + pub fn as_singleton(&self) -> Option<&ModelHandle> { + if self.buffers.len() == 1 { + return Some(&self.buffers.values().next().unwrap().buffer); + } else { + None + } + } + pub fn subscribe(&mut self) -> Subscription { self.subscriptions.subscribe() } + pub fn edit(&mut self, ranges_iter: I, new_text: T, cx: &mut ModelContext) + where + I: IntoIterator>, + S: ToOffset, + T: Into, + { + self.edit_internal(ranges_iter, new_text, false, cx) + } + + pub fn edit_with_autoindent( + &mut self, + ranges_iter: I, + new_text: T, + cx: &mut ModelContext, + ) where + I: IntoIterator>, + S: ToOffset, + T: Into, + { + self.edit_internal(ranges_iter, new_text, true, cx) + } + + pub fn edit_internal( + &mut self, + ranges_iter: I, + new_text: T, + autoindent: bool, + cx: &mut ModelContext, + ) where + I: IntoIterator>, + S: ToOffset, + T: Into, + { + todo!() + } + + pub fn start_transaction( + &mut self, + selection_set_ids: impl IntoIterator, + ) -> Result<()> { + todo!() + } + + pub fn end_transaction( + &mut self, + selection_set_ids: impl IntoIterator, + cx: &mut ModelContext, + ) -> Result<()> { + todo!() + } + + pub fn undo(&mut self, cx: &mut ModelContext) { + todo!() + } + + pub fn redo(&mut self, cx: &mut ModelContext) { + todo!() + } + + pub fn selection_set(&self, set_id: SelectionSetId) -> Result<&SelectionSet> { + todo!() + } + + pub fn add_selection_set( + &mut self, + selections: &[Selection], + cx: &mut ModelContext, + ) -> SelectionSetId { + todo!() + } + + pub fn remove_selection_set( + &mut self, + set_id: SelectionSetId, + cx: &mut ModelContext, + ) -> Result<()> { + todo!() + } + + pub fn update_selection_set( + &mut self, + set_id: SelectionSetId, + selections: &[Selection], + cx: &mut ModelContext, + ) -> Result<()> { + todo!() + } + + pub fn set_active_selection_set( + &mut self, + set_id: Option, + cx: &mut ModelContext, + ) -> Result<()> { + todo!() + } + + pub fn selection_sets(&self) -> impl Iterator { + todo!(); + None.into_iter() + } + pub fn push(&mut self, props: ExcerptProperties, cx: &mut ModelContext) -> ExcerptId where O: text::ToOffset, @@ -125,6 +265,30 @@ impl MultiBuffer { id } + pub fn save( + &mut self, + cx: &mut ModelContext, + ) -> Result>> { + todo!() + } + + pub fn file<'a>(&self, cx: &'a AppContext) -> Option<&'a dyn File> { + self.as_singleton() + .and_then(|buffer| buffer.read(cx).file()) + } + + pub fn is_dirty(&self) -> bool { + todo!() + } + + pub fn has_conflict(&self) -> bool { + todo!() + } + + pub fn is_parsing(&self, _: &AppContext) -> bool { + todo!() + } + fn sync(&self, cx: &AppContext) { let mut snapshot = self.snapshot.lock(); let mut excerpts_to_edit = Vec::new(); @@ -194,17 +358,141 @@ impl MultiBuffer { } } +// Methods delegating to the snapshot +impl MultiBuffer { + pub fn replica_id(&self) -> ReplicaId { + self.snapshot.lock().replica_id() + } + + pub fn text(&self) -> String { + self.snapshot.lock().text() + } + + pub fn text_for_range<'a, T: ToOffset>( + &'a self, + range: Range, + ) -> impl Iterator { + todo!(); + [].into_iter() + } + + pub fn max_point(&self) -> Point { + self.snapshot.lock().max_point() + } + + pub fn len(&self) -> usize { + self.snapshot.lock().len() + } + + pub fn line_len(&self, row: u32) -> u32 { + self.snapshot.lock().line_len(row) + } + + pub fn is_line_blank(&self, row: u32) -> bool { + self.snapshot.lock().is_line_blank(row) + } + + pub fn indent_column_for_line(&self, row: u32) -> u32 { + self.snapshot.lock().indent_column_for_line(row) + } + + pub fn anchor_before(&self, position: T) -> Anchor { + self.snapshot.lock().anchor_before(position) + } + + pub fn anchor_after(&self, position: T) -> Anchor { + self.snapshot.lock().anchor_after(position) + } + + pub fn anchor_at(&self, position: T, bias: Bias) -> Anchor { + self.snapshot.lock().anchor_at(position, bias) + } + + pub fn anchor_range_set( + &self, + start_bias: Bias, + end_bias: Bias, + entries: E, + ) -> AnchorRangeSet + where + E: IntoIterator>, + { + todo!() + } + + pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize { + self.snapshot.lock().clip_offset(offset, bias) + } + + pub fn clip_point(&self, point: Point, bias: Bias) -> Point { + self.snapshot.lock().clip_point(point, bias) + } + + pub fn language<'a>(&self) -> Option<&'a Arc> { + todo!() + } + + pub fn parse_count(&self) -> usize { + self.snapshot.lock().parse_count() + } + + pub fn diagnostics_update_count(&self) -> usize { + self.snapshot.lock().diagnostics_update_count() + } + + pub fn diagnostics_in_range<'a, T, O>( + &'a self, + search_range: Range, + ) -> impl Iterator, &Diagnostic)> + 'a + where + T: 'a + ToOffset, + O: 'a, + { + todo!(); + None.into_iter() + } +} + +#[cfg(any(test, feature = "test-support"))] +impl MultiBuffer { + pub fn randomly_edit(&mut self, _: &mut R, _: usize, _: &mut ModelContext) { + todo!() + } + + pub fn randomly_mutate(&mut self, rng: &mut R, cx: &mut ModelContext) { + todo!() + } +} + impl Entity for MultiBuffer { - type Event = (); + type Event = super::Event; } impl MultiBufferSnapshot { + pub fn replica_id(&self) -> ReplicaId { + todo!() + } + pub fn text(&self) -> String { self.chunks(0..self.len(), None) .map(|chunk| chunk.text) .collect() } + pub fn reversed_chars_at<'a, T: ToOffset>( + &'a self, + position: T, + ) -> impl Iterator + 'a { + todo!(); + None.into_iter() + } + + pub fn chars_at<'a, T: ToOffset>(&'a self, position: T) -> impl Iterator + 'a { + let offset = position.to_offset(self); + self.text_for_range(offset..self.len()) + .flat_map(|chunk| chunk.chars()) + } + pub fn text_for_range<'a, T: ToOffset>( &'a self, range: Range, @@ -212,6 +500,18 @@ impl MultiBufferSnapshot { self.chunks(range, None).map(|chunk| chunk.text) } + pub fn is_line_blank(&self, row: u32) -> bool { + self.text_for_range(Point::new(row, 0)..Point::new(row, self.line_len(row))) + .all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none()) + } + + pub fn contains_str_at(&self, _: T, _: &str) -> bool + where + T: ToOffset, + { + todo!() + } + pub fn len(&self) -> usize { self.excerpts.summary().text.bytes } @@ -291,11 +591,15 @@ impl MultiBufferSnapshot { } } + pub fn bytes_in_range<'a, T: ToOffset>(&'a self, range: Range) -> MultiBufferBytes<'a> { + todo!() + } + pub fn chunks<'a, T: ToOffset>( &'a self, range: Range, theme: Option<&'a SyntaxTheme>, - ) -> Chunks<'a> { + ) -> MultiBufferChunks<'a> { let range = range.start.to_offset(self)..range.end.to_offset(self); let mut cursor = self.excerpts.cursor::(); cursor.seek(&range.start, Bias::Right, &()); @@ -331,7 +635,7 @@ impl MultiBufferSnapshot { excerpt.buffer.chunks(buffer_start..buffer_end, theme) }); - Chunks { + MultiBufferChunks { range, cursor, header_height, @@ -413,6 +717,10 @@ impl MultiBufferSnapshot { } } + pub fn indent_column_for_line(&self, row: u32) -> u32 { + todo!() + } + pub fn line_len(&self, row: u32) -> u32 { let mut cursor = self.excerpts.cursor::(); cursor.seek(&Point::new(row, 0), Bias::Right, &()); @@ -534,18 +842,62 @@ impl MultiBufferSnapshot { summary } - fn resolve_excerpt<'a, D: TextDimension>( + pub fn anchor_before(&self, position: T) -> Anchor { + self.anchor_at(position, Bias::Left) + } + + pub fn anchor_after(&self, position: T) -> Anchor { + self.anchor_at(position, Bias::Right) + } + + pub fn anchor_at(&self, position: T, bias: Bias) -> Anchor { + todo!() + } + + pub fn parse_count(&self) -> usize { + todo!() + } + + pub fn enclosing_bracket_ranges( + &self, + range: Range, + ) -> Option<(Range, Range)> { + todo!() + } + + pub fn diagnostics_update_count(&self) -> usize { + todo!() + } + + pub fn language<'a>(&self) -> Option<&'a Arc> { + todo!() + } + + pub fn diagnostic_group<'a, O>( &'a self, - excerpt_id: &ExcerptId, - ) -> Option<(D, &'a BufferSnapshot)> { - let mut cursor = self.excerpts.cursor::<(ExcerptId, TextSummary)>(); - cursor.seek(excerpt_id, Bias::Left, &()); - if let Some(excerpt) = cursor.item() { - if cursor.start().0 == *excerpt_id { - return Some((D::from_text_summary(&cursor.start().1), &excerpt.buffer)); - } - } - None + group_id: usize, + ) -> impl Iterator, &Diagnostic)> + 'a + where + O: 'a, + { + todo!(); + None.into_iter() + } + + pub fn diagnostics_in_range<'a, T, O>( + &'a self, + search_range: Range, + ) -> impl Iterator, &Diagnostic)> + 'a + where + T: 'a + ToOffset, + O: 'a, + { + todo!(); + None.into_iter() + } + + pub fn range_for_syntax_ancestor(&self, range: Range) -> Option> { + todo!() } fn buffer_snapshot_for_excerpt<'a>( @@ -672,7 +1024,17 @@ impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Location { } } -impl<'a> Iterator for Chunks<'a> { +impl<'a> MultiBufferChunks<'a> { + pub fn offset(&self) -> usize { + todo!() + } + + pub fn seek(&mut self, offset: usize) { + todo!() + } +} + +impl<'a> Iterator for MultiBufferChunks<'a> { type Item = Chunk<'a>; fn next(&mut self) -> Option { @@ -726,6 +1088,20 @@ impl<'a> Iterator for Chunks<'a> { } } +impl<'a> Iterator for MultiBufferBytes<'a> { + type Item = &'a [u8]; + + fn next(&mut self) -> Option { + todo!() + } +} + +impl<'a> io::Read for MultiBufferBytes<'a> { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + todo!() + } +} + impl ToOffset for Point { fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize { snapshot.point_to_offset(*self) diff --git a/crates/language/src/multi_buffer/anchor.rs b/crates/language/src/multi_buffer/anchor.rs index e6b78eeefebd868d8774d6e144b3b7f8ce9fa0c5..752308604b284e084674bda7ea0a5b8205bb4f24 100644 --- a/crates/language/src/multi_buffer/anchor.rs +++ b/crates/language/src/multi_buffer/anchor.rs @@ -1,9 +1,12 @@ -use super::{location::*, ExcerptSummary, MultiBufferSnapshot, ToOffset}; +use super::{location::*, ExcerptSummary, MultiBufferSnapshot, ToOffset, ToPoint}; use anyhow::{anyhow, Result}; use smallvec::SmallVec; -use std::{cmp::Ordering, ops::Range}; +use std::{ + cmp::Ordering, + ops::{Range, Sub}, +}; use sum_tree::Bias; -use text::{rope::TextDimension, AnchorRangeExt, ToOffset as _}; +use text::{rope::TextDimension, AnchorRangeExt as _, Point}; #[derive(Clone, Eq, PartialEq, Debug, Hash)] pub struct Anchor { @@ -16,6 +19,9 @@ pub struct AnchorRangeMap { entries: SmallVec<[(ExcerptId, text::AnchorRangeMap); 1]>, } +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct AnchorRangeSet(AnchorRangeMap<()>); + impl Anchor { pub fn min() -> Self { Self { @@ -68,6 +74,27 @@ impl Anchor { } self.clone() } + + pub fn summary<'a, D>(&self, snapshot: &'a MultiBufferSnapshot) -> D + where + D: TextDimension + Ord + Sub, + { + let mut cursor = snapshot.excerpts.cursor::(); + cursor.seek(&self.excerpt_id, Bias::Left, &()); + if let Some(excerpt) = cursor.item() { + if excerpt.id == self.excerpt_id { + let mut excerpt_start = D::from_text_summary(&cursor.start().text); + excerpt_start.add_summary(&excerpt.header_summary(), &()); + let excerpt_buffer_start = excerpt.range.start.summary::(&excerpt.buffer); + let buffer_point = self.text_anchor.summary::(&excerpt.buffer); + if buffer_point > excerpt_buffer_start { + excerpt_start.add_assign(&(buffer_point - excerpt_buffer_start)); + } + return excerpt_start; + } + } + D::from_text_summary(&cursor.start().text) + } } impl AnchorRangeMap { @@ -263,18 +290,48 @@ impl AnchorRangeMap { } } +impl AnchorRangeSet { + pub fn len(&self) -> usize { + self.0.len() + } + + pub fn ranges<'a, D>( + &'a self, + content: &'a MultiBufferSnapshot, + ) -> impl 'a + Iterator> + where + D: TextDimension, + { + self.0.ranges(content).map(|(range, _)| range) + } +} + impl ToOffset for Anchor { fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize { - let mut cursor = snapshot.excerpts.cursor::(); - cursor.seek(&self.excerpt_id, Bias::Left, &()); - if let Some(excerpt) = cursor.item() { - if excerpt.id == self.excerpt_id { - let buffer_offset = self.text_anchor.to_offset(&excerpt.buffer); - return cursor.start().text.bytes - + excerpt.header_height as usize - + buffer_offset.saturating_sub(excerpt.range.start.to_offset(&excerpt.buffer)); - } - } - cursor.start().text.bytes + self.summary(snapshot) + } +} + +impl ToPoint for Anchor { + fn to_point<'a>(&self, snapshot: &MultiBufferSnapshot) -> Point { + self.summary(snapshot) + } +} + +pub trait AnchorRangeExt { + fn cmp(&self, b: &Range, buffer: &MultiBufferSnapshot) -> Result; + fn to_offset(&self, content: &MultiBufferSnapshot) -> Range; +} + +impl AnchorRangeExt for Range { + fn cmp(&self, other: &Range, buffer: &MultiBufferSnapshot) -> Result { + Ok(match self.start.cmp(&other.start, buffer)? { + Ordering::Equal => other.end.cmp(&self.end, buffer)?, + ord @ _ => ord, + }) + } + + fn to_offset(&self, content: &MultiBufferSnapshot) -> Range { + self.start.to_offset(&content)..self.end.to_offset(&content) } } diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index 9a52322c22bbdd20d423fb8b24038fb387cd50a4..c7c5670103ca50b6ee556bf6d4e1b8d3b75b3685 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -376,7 +376,7 @@ fn test_autoindent_moves_selections(cx: &mut MutableAppContext) { .selection_set(selection_set_id) .unwrap() .selections::(&buffer) - .map(|selection| selection.point_range(&buffer)) + .map(|selection| selection.start.to_point(&buffer)..selection.end.to_point(&buffer)) .collect::>(); assert_eq!(selection_ranges[0], empty(Point::new(1, 4))); diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index 96949d05ff57192341f14995d1fd50fdd875cd4a..970a739981b6b11244a86cd74d77eb834c510b9a 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -948,7 +948,7 @@ mod tests { fs::{FakeFs, Fs as _}, language::{ tree_sitter_rust, Diagnostic, Language, LanguageConfig, LanguageRegistry, - LanguageServerConfig, Point, + LanguageServerConfig, MultiBuffer, Point, }, lsp, project::{ProjectPath, Worktree}, @@ -1035,6 +1035,7 @@ mod tests { .update(&mut cx_b, |worktree, cx| worktree.open_buffer("b.txt", cx)) .await .unwrap(); + let buffer_b = cx_b.add_model(|cx| MultiBuffer::singleton(buffer_b, cx)); buffer_b.read_with(&cx_b, |buf, _| assert_eq!(buf.text(), "b-contents")); worktree_a.read_with(&cx_a, |tree, cx| assert!(tree.has_open_buffer("b.txt", cx))); let buffer_a = worktree_a diff --git a/crates/text/src/patch.rs b/crates/text/src/patch.rs index f5592cefd00f41c17b320cd7a5d55511dc4ed514..8e68b2545ce2e54833bdac4022c0f25c32593fb9 100644 --- a/crates/text/src/patch.rs +++ b/crates/text/src/patch.rs @@ -201,6 +201,15 @@ where } } +impl IntoIterator for Patch { + type Item = Edit; + type IntoIter = std::vec::IntoIter>; + + fn into_iter(self) -> Self::IntoIter { + self.0.into_iter() + } +} + impl<'a, T: Clone> IntoIterator for &'a Patch { type Item = Edit; type IntoIter = std::iter::Cloned>>; diff --git a/crates/text/src/selection.rs b/crates/text/src/selection.rs index 6c04da016a1a6bb0782ba3a56ccbb8a1c54d4111..184118b78b93dba5e8cbe0e8905a6c8e357ab946 100644 --- a/crates/text/src/selection.rs +++ b/crates/text/src/selection.rs @@ -1,6 +1,4 @@ -use crate::{ - rope::TextDimension, AnchorRangeMap, Buffer, BufferSnapshot, Point, ToOffset, ToPoint, -}; +use crate::{rope::TextDimension, AnchorRangeMap, BufferSnapshot, ToOffset, ToPoint}; use std::{cmp::Ordering, ops::Range, sync::Arc}; use sum_tree::Bias; @@ -75,26 +73,6 @@ impl Selection { self.end = head; } } - - pub fn point_range(&self, buffer: &Buffer) -> Range { - let start = self.start.to_point(buffer); - let end = self.end.to_point(buffer); - if self.reversed { - end..start - } else { - start..end - } - } - - pub fn offset_range(&self, buffer: &Buffer) -> Range { - let start = self.start.to_offset(buffer); - let end = self.end.to_offset(buffer); - if self.reversed { - end..start - } else { - start..end - } - } } impl SelectionSet { diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 5a4ee1ad93393fa9fa3315a92c305729068a7565..597649af49d03f13007b4a84d06a41468f515bcc 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -1850,13 +1850,13 @@ impl BufferSnapshot { self.visible_text.clip_point_utf16(point, bias) } - pub fn point_for_offset(&self, offset: usize) -> Result { - if offset <= self.len() { - Ok(self.text_summary_for_range(0..offset)) - } else { - Err(anyhow!("offset out of bounds")) - } - } + // pub fn point_for_offset(&self, offset: usize) -> Result { + // if offset <= self.len() { + // Ok(self.text_summary_for_range(0..offset)) + // } else { + // Err(anyhow!("offset out of bounds")) + // } + // } pub fn edits_since<'a, D>( &'a self, From 4ee404a0af9e0146d21bca0f8c9851d506a21947 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Wed, 8 Dec 2021 19:30:52 -0700 Subject: [PATCH 025/196] Take a cx in MultiBuffer::start_transaction --- crates/editor/src/editor.rs | 8 +++----- crates/language/src/multi_buffer.rs | 1 + 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index a88d962ba93a14ac5d5b573fc519cfa90cd9cc7c..ae1143d0fa342ca39440b7e36ecafc44f4618192 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -3211,18 +3211,16 @@ impl Editor { fn start_transaction(&mut self, cx: &mut ViewContext) { self.end_selection(cx); - self.buffer.update(cx, |buffer, _| { + self.buffer.update(cx, |buffer, cx| { buffer - .start_transaction(Some(self.selection_set_id)) + .start_transaction([self.selection_set_id], cx) .unwrap() }); } fn end_transaction(&self, cx: &mut ViewContext) { self.buffer.update(cx, |buffer, cx| { - buffer - .end_transaction(Some(self.selection_set_id), cx) - .unwrap() + buffer.end_transaction([self.selection_set_id], cx).unwrap() }); } diff --git a/crates/language/src/multi_buffer.rs b/crates/language/src/multi_buffer.rs index 12834a53d3b9c614dbb5c65e5517904d5abaae89..300deb4487dac1f82c4dbd7267150423b7456519 100644 --- a/crates/language/src/multi_buffer.rs +++ b/crates/language/src/multi_buffer.rs @@ -169,6 +169,7 @@ impl MultiBuffer { pub fn start_transaction( &mut self, selection_set_ids: impl IntoIterator, + cx: &mut ModelContext, ) -> Result<()> { todo!() } From 98f726974eeee365faa94d698ca47f0fedd9f3a2 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Wed, 8 Dec 2021 20:04:30 -0700 Subject: [PATCH 026/196] WIP --- crates/language/src/multi_buffer.rs | 24 ++++++++++++-- crates/language/src/multi_buffer/anchor.rs | 14 ++++---- crates/text/src/text.rs | 38 +++++++++++----------- 3 files changed, 48 insertions(+), 28 deletions(-) diff --git a/crates/language/src/multi_buffer.rs b/crates/language/src/multi_buffer.rs index 300deb4487dac1f82c4dbd7267150423b7456519..eb765688561545f3b9062675c13363834959d257 100644 --- a/crates/language/src/multi_buffer.rs +++ b/crates/language/src/multi_buffer.rs @@ -12,6 +12,7 @@ use clock::ReplicaId; use collections::HashMap; use gpui::{AppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task}; use parking_lot::{Mutex, MutexGuard}; +use smallvec::SmallVec; use std::{cmp, io, ops::Range, sync::Arc, time::SystemTime}; use sum_tree::{Bias, Cursor, SumTree}; use text::{ @@ -31,6 +32,7 @@ pub struct MultiBuffer { snapshot: Mutex, buffers: HashMap, subscriptions: Topic, + selection_sets: HashMap, } pub trait ToOffset: 'static { @@ -409,12 +411,30 @@ impl MultiBuffer { self.snapshot.lock().anchor_at(position, bias) } - pub fn anchor_range_set( + pub fn anchor_range_map( &self, start_bias: Bias, end_bias: Bias, entries: E, - ) -> AnchorRangeSet + ) -> AnchorRangeMap + where + E: IntoIterator, T)>, + { + let entries = entries.into_iter().peekable(); + let mut child_maps = SmallVec::new(); + if let Some((range, _)) = entries.peek() { + let mut cursor = self.snapshot.lock().excerpts.cursor::(); + cursor.seek(&range.start, Bias::Right, &()); + let mut excerpt_end = cursor.end(&()); + + // child_maps.push + + // for entry in entries {} + } + AnchorRangeMap { child_maps } + } + + pub fn anchor_range_set(&self, start_bias: Bias, end_bias: Bias, ranges: E) -> AnchorRangeSet where E: IntoIterator>, { diff --git a/crates/language/src/multi_buffer/anchor.rs b/crates/language/src/multi_buffer/anchor.rs index 752308604b284e084674bda7ea0a5b8205bb4f24..5f5173925f40626ddb473d30e25f9c00c4be270c 100644 --- a/crates/language/src/multi_buffer/anchor.rs +++ b/crates/language/src/multi_buffer/anchor.rs @@ -14,9 +14,9 @@ pub struct Anchor { text_anchor: text::Anchor, } -#[derive(Clone, Debug, PartialEq, Eq)] +#[derive(Clone, Debug, Default, PartialEq, Eq)] pub struct AnchorRangeMap { - entries: SmallVec<[(ExcerptId, text::AnchorRangeMap); 1]>, + pub(crate) child_maps: SmallVec<[(ExcerptId, text::AnchorRangeMap); 1]>, } #[derive(Clone, Debug, PartialEq, Eq)] @@ -99,7 +99,7 @@ impl Anchor { impl AnchorRangeMap { pub fn len(&self) -> usize { - self.entries + self.child_maps .iter() .map(|(_, text_map)| text_map.len()) .sum() @@ -113,7 +113,7 @@ impl AnchorRangeMap { D: TextDimension + Clone, { let mut cursor = snapshot.excerpts.cursor::(); - self.entries + self.child_maps .iter() .filter_map(move |(excerpt_id, text_map)| { cursor.seek_forward(excerpt_id, Bias::Left, &()); @@ -154,14 +154,14 @@ impl AnchorRangeMap { cursor.seek(&start_offset, start_bias, &()); let start_excerpt_id = &cursor.start().excerpt_id; let start_ix = match self - .entries + .child_maps .binary_search_by_key(&start_excerpt_id, |e| &e.0) { Ok(ix) | Err(ix) => ix, }; let mut entry_ranges = None; - let mut entries = self.entries[start_ix..].iter(); + let mut entries = self.child_maps[start_ix..].iter(); std::iter::from_fn(move || loop { match &mut entry_ranges { None => { @@ -265,7 +265,7 @@ impl AnchorRangeMap { { let mut cursor = snapshot.excerpts.cursor::(); let mut max = None; - for (excerpt_id, text_map) in &self.entries { + for (excerpt_id, text_map) in &self.child_maps { cursor.seek(excerpt_id, Bias::Left, &()); if let Some(excerpt) = cursor.item() { if excerpt.id == *excerpt_id { diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 597649af49d03f13007b4a84d06a41468f515bcc..3c1a8aac16dbd2da87ba3c17557594f48bb9a844 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -40,7 +40,7 @@ pub struct Buffer { snapshot: BufferSnapshot, last_edit: clock::Local, history: History, - selections: HashMap, + selection_sets: HashMap, deferred_ops: OperationQueue, deferred_replicas: HashSet, replica_id: ReplicaId, @@ -482,7 +482,7 @@ impl Buffer { }, last_edit: clock::Local::default(), history, - selections: Default::default(), + selection_sets: Default::default(), deferred_ops: OperationQueue::new(), deferred_replicas: HashSet::default(), replica_id, @@ -735,10 +735,10 @@ impl Buffer { selections, lamport_timestamp, } => { - if let Some(set) = self.selections.get_mut(&set_id) { + if let Some(set) = self.selection_sets.get_mut(&set_id) { set.selections = selections; } else { - self.selections.insert( + self.selection_sets.insert( set_id, SelectionSet { id: set_id, @@ -753,14 +753,14 @@ impl Buffer { set_id, lamport_timestamp, } => { - self.selections.remove(&set_id); + self.selection_sets.remove(&set_id); self.lamport_clock.observe(lamport_timestamp); } Operation::SetActiveSelections { set_id, lamport_timestamp, } => { - for (id, set) in &mut self.selections { + for (id, set) in &mut self.selection_sets { if id.replica_id == lamport_timestamp.replica_id { if Some(*id) == set_id { set.active = true; @@ -1063,7 +1063,7 @@ impl Buffer { } Operation::RemoveSelections { .. } => true, Operation::SetActiveSelections { set_id, .. } => { - set_id.map_or(true, |set_id| self.selections.contains_key(&set_id)) + set_id.map_or(true, |set_id| self.selection_sets.contains_key(&set_id)) } #[cfg(test)] Operation::Test(_) => true, @@ -1091,7 +1091,7 @@ impl Buffer { .into_iter() .map(|set_id| { let set = self - .selections + .selection_sets .get(&set_id) .expect("invalid selection set id"); (set_id, set.selections.clone()) @@ -1115,7 +1115,7 @@ impl Buffer { .into_iter() .map(|set_id| { let set = self - .selections + .selection_sets .get(&set_id) .expect("invalid selection set id"); (set_id, set.selections.clone()) @@ -1132,7 +1132,7 @@ impl Buffer { } pub fn remove_peer(&mut self, replica_id: ReplicaId) { - self.selections + self.selection_sets .retain(|set_id, _| set_id.replica_id != replica_id) } @@ -1194,13 +1194,13 @@ impl Buffer { } pub fn selection_set(&self, set_id: SelectionSetId) -> Result<&SelectionSet> { - self.selections + self.selection_sets .get(&set_id) .ok_or_else(|| anyhow!("invalid selection set id {:?}", set_id)) } pub fn selection_sets(&self) -> impl Iterator { - self.selections.iter() + self.selection_sets.iter() } fn build_selection_anchor_range_map( @@ -1231,7 +1231,7 @@ impl Buffer { ) -> Result { let selections = self.build_selection_anchor_range_map(selections); let set = self - .selections + .selection_sets .get_mut(&set_id) .ok_or_else(|| anyhow!("invalid selection set id {:?}", set_id))?; set.selections = selections.clone(); @@ -1248,7 +1248,7 @@ impl Buffer { selections: Arc>, ) -> Result { let set = self - .selections + .selection_sets .get_mut(&set_id) .ok_or_else(|| anyhow!("invalid selection set id {:?}", set_id))?; set.selections = selections.clone(); @@ -1262,7 +1262,7 @@ impl Buffer { pub fn add_selection_set(&mut self, selections: &[Selection]) -> Operation { let selections = self.build_selection_anchor_range_map(selections); let set_id = self.lamport_clock.tick(); - self.selections.insert( + self.selection_sets.insert( set_id, SelectionSet { id: set_id, @@ -1278,7 +1278,7 @@ impl Buffer { } pub fn add_raw_selection_set(&mut self, id: SelectionSetId, selections: SelectionSet) { - self.selections.insert(id, selections); + self.selection_sets.insert(id, selections); } pub fn set_active_selection_set( @@ -1289,7 +1289,7 @@ impl Buffer { assert_eq!(set_id.replica_id, self.replica_id()); } - for (id, set) in &mut self.selections { + for (id, set) in &mut self.selection_sets { if id.replica_id == self.local_clock.replica_id { if Some(*id) == set_id { set.active = true; @@ -1306,7 +1306,7 @@ impl Buffer { } pub fn remove_selection_set(&mut self, set_id: SelectionSetId) -> Result { - self.selections + self.selection_sets .remove(&set_id) .ok_or_else(|| anyhow!("invalid selection set id {:?}", set_id))?; Ok(Operation::RemoveSelections { @@ -1469,7 +1469,7 @@ impl Buffer { where D: TextDimension, { - self.selections + self.selection_sets .keys() .map(move |set_id| (*set_id, self.selection_ranges(*set_id).unwrap())) } From ec54010e3c69646f7ac32021c5d79b1856e5884d Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Wed, 8 Dec 2021 21:04:22 -0700 Subject: [PATCH 027/196] Sketch in type-level changes to track insertion splits --- crates/text/src/locator.rs | 74 ++++++++++++++++++++++++++++++++++++++ crates/text/src/text.rs | 72 ++++++++++++++++++++++++++++++------- 2 files changed, 134 insertions(+), 12 deletions(-) create mode 100644 crates/text/src/locator.rs diff --git a/crates/text/src/locator.rs b/crates/text/src/locator.rs new file mode 100644 index 0000000000000000000000000000000000000000..487c8c260867536fca3480c1ac0fcd62d946a533 --- /dev/null +++ b/crates/text/src/locator.rs @@ -0,0 +1,74 @@ +use smallvec::{smallvec, SmallVec}; +use std::iter; + +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Locator(SmallVec<[u32; 4]>); + +impl Locator { + pub fn min() -> Self { + Self(smallvec![u32::MIN]) + } + + pub fn max() -> Self { + Self(smallvec![u32::MAX]) + } + + pub fn between(lhs: &Self, rhs: &Self) -> Self { + let lhs = lhs.0.iter().copied().chain(iter::repeat(u32::MIN)); + let rhs = rhs.0.iter().copied().chain(iter::repeat(u32::MAX)); + let mut location = SmallVec::new(); + for (lhs, rhs) in lhs.zip(rhs) { + let mid = lhs + (rhs.saturating_sub(lhs)) / 2; + location.push(mid); + if mid > lhs { + break; + } + } + Self(location) + } +} + +impl Default for Locator { + fn default() -> Self { + Self::min() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use rand::prelude::*; + use std::mem; + + #[gpui::test(iterations = 100)] + fn test_locators(mut rng: StdRng) { + let mut lhs = Default::default(); + let mut rhs = Default::default(); + while lhs == rhs { + lhs = Locator( + (0..rng.gen_range(1..=5)) + .map(|_| rng.gen_range(0..=100)) + .collect(), + ); + rhs = Locator( + (0..rng.gen_range(1..=5)) + .map(|_| rng.gen_range(0..=100)) + .collect(), + ); + } + + if lhs > rhs { + mem::swap(&mut lhs, &mut rhs); + } + + let middle = Locator::between(&lhs, &rhs); + assert!(middle > lhs); + assert!(middle < rhs); + for ix in 0..middle.0.len() - 1 { + assert!( + middle.0[ix] == *lhs.0.get(ix).unwrap_or(&0) + || middle.0[ix] == *rhs.0.get(ix).unwrap_or(&0) + ); + } + } +} diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index a434e97e2ee66acf05171c500b6c4be5f98fdb33..3d2f95c169f3a9d36528616c8371851645423838 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -1,4 +1,5 @@ mod anchor; +mod locator; mod operation_queue; mod patch; mod point; @@ -14,6 +15,7 @@ pub use anchor::*; use anyhow::{anyhow, Result}; use clock::ReplicaId; use collections::{HashMap, HashSet}; +use locator::Locator; use operation_queue::OperationQueue; use parking_lot::Mutex; pub use patch::Patch; @@ -55,6 +57,7 @@ pub struct Snapshot { deleted_text: Rope, undo_map: UndoMap, fragments: SumTree, + insertions: SumTree, pub version: clock::Global, } @@ -381,6 +384,7 @@ impl InsertionTimestamp { #[derive(Eq, PartialEq, Clone, Debug)] struct Fragment { + id: Locator, timestamp: InsertionTimestamp, len: usize, visible: bool, @@ -391,6 +395,7 @@ struct Fragment { #[derive(Eq, PartialEq, Clone, Debug)] pub struct FragmentSummary { text: FragmentTextSummary, + max_id: Locator, max_version: clock::Global, min_insertion_version: clock::Global, max_insertion_version: clock::Global, @@ -402,11 +407,17 @@ struct FragmentTextSummary { deleted: usize, } -impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary { - fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option) { - self.visible += summary.text.visible; - self.deleted += summary.text.deleted; - } +#[derive(Eq, PartialEq, Clone, Debug)] +struct InsertionFragment { + timestamp: InsertionTimestamp, + split_offset: usize, + fragment_id: Locator, +} + +#[derive(Clone, Debug, Default)] +struct InsertionSummary { + max_timestamp: InsertionTimestamp, + max_split_offset: usize, } #[derive(Clone, Debug, Eq, PartialEq)] @@ -452,6 +463,7 @@ pub struct UndoOperation { impl Buffer { pub fn new(replica_id: u16, remote_id: u64, history: History) -> Buffer { let mut fragments = SumTree::new(); + let mut insertions = SumTree::new(); let mut local_clock = clock::Local::new(replica_id); let mut lamport_clock = clock::Lamport::new(replica_id); @@ -466,8 +478,10 @@ impl Buffer { local_clock.observe(timestamp.local()); lamport_clock.observe(timestamp.lamport()); version.observe(timestamp.local()); + let fragment_id = Locator::between(&Locator::min(), &Locator::max()); fragments.push( Fragment { + id: fragment_id, timestamp, len: visible_text.len(), visible: true, @@ -476,6 +490,14 @@ impl Buffer { }, &None, ); + insertions.push( + InsertionFragment { + timestamp, + split_offset: 0, + fragment_id, + }, + &(), + ); } Buffer { @@ -483,6 +505,7 @@ impl Buffer { visible_text, deleted_text: Rope::new(), fragments, + insertions, version, undo_map: Default::default(), }, @@ -504,13 +527,7 @@ impl Buffer { } pub fn snapshot(&self) -> Snapshot { - Snapshot { - visible_text: self.visible_text.clone(), - deleted_text: self.deleted_text.clone(), - undo_map: self.undo_map.clone(), - fragments: self.fragments.clone(), - version: self.version.clone(), - } + self.snapshot.clone() } pub fn replica_id(&self) -> ReplicaId { @@ -569,6 +586,7 @@ impl Buffer { ranges: Vec::with_capacity(ranges.len()), new_text: None, }; + let mut insertions = Vec::new(); let mut ranges = ranges .map(|range| range.start.to_offset(&*self)..range.end.to_offset(&*self)) @@ -2040,6 +2058,7 @@ impl sum_tree::Item for Fragment { let max_insertion_version = min_insertion_version.clone(); if self.visible { FragmentSummary { + max_id: self.id.clone(), text: FragmentTextSummary { visible: self.len, deleted: 0, @@ -2050,6 +2069,7 @@ impl sum_tree::Item for Fragment { } } else { FragmentSummary { + max_id: self.id.clone(), text: FragmentTextSummary { visible: 0, deleted: self.len, @@ -2079,6 +2099,7 @@ impl sum_tree::Summary for FragmentSummary { impl Default for FragmentSummary { fn default() -> Self { FragmentSummary { + max_id: Locator::min(), text: FragmentTextSummary::default(), max_version: clock::Global::new(), min_insertion_version: clock::Global::new(), @@ -2087,6 +2108,33 @@ impl Default for FragmentSummary { } } +impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary { + fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option) { + self.visible += summary.text.visible; + self.deleted += summary.text.deleted; + } +} + +impl sum_tree::Item for InsertionFragment { + type Summary = InsertionSummary; + + fn summary(&self) -> Self::Summary { + InsertionSummary { + max_timestamp: self.timestamp, + max_split_offset: self.split_offset, + } + } +} + +impl sum_tree::Summary for InsertionSummary { + type Context = (); + + fn add_summary(&mut self, summary: &Self, cx: &()) { + self.max_timestamp = summary.max_timestamp; + self.max_split_offset = summary.max_split_offset; + } +} + #[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct FullOffset(pub usize); From dd38eb12648575584acf8a9e2840a09f75753b73 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Wed, 8 Dec 2021 22:05:13 -0700 Subject: [PATCH 028/196] Start on maintaining an insertions tree I'm correctly assigning fragment ids to all fragments in the fragments tree, but I have a randomized test failure when making sure that the insertions tree matches the state of the fragments tree. --- crates/text/src/locator.rs | 10 +- crates/text/src/random_char_iter.rs | 12 +- crates/text/src/tests.rs | 23 +++- crates/text/src/text.rs | 163 ++++++++++++++++++---------- 4 files changed, 140 insertions(+), 68 deletions(-) diff --git a/crates/text/src/locator.rs b/crates/text/src/locator.rs index 487c8c260867536fca3480c1ac0fcd62d946a533..0a22ea58f904b4fc28efc2ac785bd67ef5abd2dd 100644 --- a/crates/text/src/locator.rs +++ b/crates/text/src/locator.rs @@ -2,20 +2,20 @@ use smallvec::{smallvec, SmallVec}; use std::iter; #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct Locator(SmallVec<[u32; 4]>); +pub struct Locator(SmallVec<[u8; 4]>); impl Locator { pub fn min() -> Self { - Self(smallvec![u32::MIN]) + Self(smallvec![u8::MIN]) } pub fn max() -> Self { - Self(smallvec![u32::MAX]) + Self(smallvec![u8::MAX]) } pub fn between(lhs: &Self, rhs: &Self) -> Self { - let lhs = lhs.0.iter().copied().chain(iter::repeat(u32::MIN)); - let rhs = rhs.0.iter().copied().chain(iter::repeat(u32::MAX)); + let lhs = lhs.0.iter().copied().chain(iter::repeat(u8::MIN)); + let rhs = rhs.0.iter().copied().chain(iter::repeat(u8::MAX)); let mut location = SmallVec::new(); for (lhs, rhs) in lhs.zip(rhs) { let mid = lhs + (rhs.saturating_sub(lhs)) / 2; diff --git a/crates/text/src/random_char_iter.rs b/crates/text/src/random_char_iter.rs index 244665688d6008caa1bbb0c8208aef0df863b8e9..1b0e6cc64d573196bc75b55326f10d1a00c46eab 100644 --- a/crates/text/src/random_char_iter.rs +++ b/crates/text/src/random_char_iter.rs @@ -14,13 +14,13 @@ impl Iterator for RandomCharIter { fn next(&mut self) -> Option { match self.0.gen_range(0..100) { // whitespace - 0..=19 => [' ', '\n', '\t'].choose(&mut self.0).copied(), + 0..=5 => ['\n'].choose(&mut self.0).copied(), // two-byte greek letters - 20..=32 => char::from_u32(self.0.gen_range(('α' as u32)..('ω' as u32 + 1))), - // three-byte characters - 33..=45 => ['✋', '✅', '❌', '❎', '⭐'].choose(&mut self.0).copied(), - // four-byte characters - 46..=58 => ['🍐', '🏀', '🍗', '🎉'].choose(&mut self.0).copied(), + // 20..=32 => char::from_u32(self.0.gen_range(('α' as u32)..('ω' as u32 + 1))), + // // three-byte characters + // 33..=45 => ['✋', '✅', '❌', '❎', '⭐'].choose(&mut self.0).copied(), + // // four-byte characters + // 46..=58 => ['🍐', '🏀', '🍗', '🎉'].choose(&mut self.0).copied(), // ascii letters _ => Some(self.0.gen_range(b'a'..b'z' + 1).into()), } diff --git a/crates/text/src/tests.rs b/crates/text/src/tests.rs index a13273b898b9d6febd865add8cfee83d30f81fc1..e55f478c9f5fef57f8316004da309bd5287160e2 100644 --- a/crates/text/src/tests.rs +++ b/crates/text/src/tests.rs @@ -51,7 +51,7 @@ fn test_random_edits(mut rng: StdRng) { ); for _i in 0..operations { - let (old_ranges, new_text, _) = buffer.randomly_edit(&mut rng, 5); + let (old_ranges, new_text, _) = buffer.randomly_edit(&mut rng, 1); for old_range in old_ranges.iter().rev() { reference_string.replace_range(old_range.clone(), &new_text); } @@ -78,6 +78,27 @@ fn test_random_edits(mut rng: StdRng) { TextSummary::from(&reference_string[range]) ); + // Ensure every fragment is ordered by locator in the fragment tree and corresponds + // to an insertion fragment in the insertions tree. + let mut prev_fragment_id = Locator::min(); + for fragment in buffer.snapshot.fragments.items(&None) { + assert!(fragment.id > prev_fragment_id); + prev_fragment_id = fragment.id.clone(); + + let insertion_fragment = buffer + .snapshot + .insertions + .get( + &InsertionFragmentKey { + timestamp: fragment.insertion_timestamp, + split_offset: fragment.insertion_offset, + }, + &(), + ) + .unwrap(); + assert_eq!(insertion_fragment.fragment_id, fragment.id); + } + if rng.gen_bool(0.3) { buffer_versions.push((buffer.clone(), buffer.subscribe())); } diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 3d2f95c169f3a9d36528616c8371851645423838..c9343eb7a249e6585c98a3a1cebc2dffbcbfc6b4 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -359,7 +359,7 @@ impl Subscription { } } -#[derive(Copy, Clone, Debug, Default, Eq, PartialEq)] +#[derive(Copy, Clone, Debug, Default, Eq, PartialEq, PartialOrd, Ord)] pub struct InsertionTimestamp { pub replica_id: ReplicaId, pub local: clock::Seq, @@ -385,7 +385,8 @@ impl InsertionTimestamp { #[derive(Eq, PartialEq, Clone, Debug)] struct Fragment { id: Locator, - timestamp: InsertionTimestamp, + insertion_timestamp: InsertionTimestamp, + insertion_offset: usize, len: usize, visible: bool, deletions: HashSet, @@ -414,10 +415,10 @@ struct InsertionFragment { fragment_id: Locator, } -#[derive(Clone, Debug, Default)] -struct InsertionSummary { - max_timestamp: InsertionTimestamp, - max_split_offset: usize, +#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord)] +struct InsertionFragmentKey { + timestamp: InsertionTimestamp, + split_offset: usize, } #[derive(Clone, Debug, Eq, PartialEq)] @@ -470,34 +471,26 @@ impl Buffer { let mut version = clock::Global::new(); let visible_text = Rope::from(history.base_text.as_ref()); if visible_text.len() > 0 { - let timestamp = InsertionTimestamp { + let insertion_timestamp = InsertionTimestamp { replica_id: 0, local: 1, lamport: 1, }; - local_clock.observe(timestamp.local()); - lamport_clock.observe(timestamp.lamport()); - version.observe(timestamp.local()); + local_clock.observe(insertion_timestamp.local()); + lamport_clock.observe(insertion_timestamp.lamport()); + version.observe(insertion_timestamp.local()); let fragment_id = Locator::between(&Locator::min(), &Locator::max()); - fragments.push( - Fragment { - id: fragment_id, - timestamp, - len: visible_text.len(), - visible: true, - deletions: Default::default(), - max_undos: Default::default(), - }, - &None, - ); - insertions.push( - InsertionFragment { - timestamp, - split_offset: 0, - fragment_id, - }, - &(), - ); + let fragment = Fragment { + id: fragment_id, + insertion_timestamp, + insertion_offset: 0, + len: visible_text.len(), + visible: true, + deletions: Default::default(), + max_undos: Default::default(), + }; + insertions.push(InsertionFragment::new(&fragment), &()); + fragments.push(fragment, &None); } Buffer { @@ -586,7 +579,7 @@ impl Buffer { ranges: Vec::with_capacity(ranges.len()), new_text: None, }; - let mut insertions = Vec::new(); + let mut new_insertions = Vec::new(); let mut ranges = ranges .map(|range| range.start.to_offset(&*self)..range.end.to_offset(&*self)) @@ -612,6 +605,8 @@ impl Buffer { if fragment_end > fragment_start { let mut suffix = old_fragments.item().unwrap().clone(); suffix.len = fragment_end - fragment_start; + suffix.insertion_offset += fragment_start - old_fragments.start().visible; + new_insertions.push(InsertionFragment::insert_new(&suffix)); new_ropes.push_fragment(&suffix, suffix.visible); new_fragments.push(suffix, &None); } @@ -630,6 +625,15 @@ impl Buffer { if fragment_start < range.start { let mut prefix = old_fragments.item().unwrap().clone(); prefix.len = range.start - fragment_start; + prefix.insertion_offset += fragment_start - old_fragments.start().visible; + + // log::info!( + // "pushing prefix between {:?} and {:?}", + // new_fragments.summary().max_id, + // prefix.id + // ); + prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id); + new_insertions.push(InsertionFragment::insert_new(&prefix)); new_ropes.push_fragment(&prefix, prefix.visible); new_fragments.push(prefix, &None); fragment_start = range.start; @@ -642,17 +646,32 @@ impl Buffer { old: fragment_start..fragment_start, new: new_start..new_start + new_text.len(), }); + + // log::info!( + // "pushing new fragment between {:?} and {:?}", + // new_fragments.summary().max_id, + // old_fragments + // .item() + // .map_or(&Locator::max(), |old_fragment| &old_fragment.id) + // ); + + let fragment = Fragment { + id: Locator::between( + &new_fragments.summary().max_id, + old_fragments + .item() + .map_or(&Locator::max(), |old_fragment| &old_fragment.id), + ), + insertion_timestamp: timestamp, + insertion_offset: 0, + len: new_text.len(), + deletions: Default::default(), + max_undos: Default::default(), + visible: true, + }; + new_insertions.push(InsertionFragment::insert_new(&fragment)); new_ropes.push_str(new_text); - new_fragments.push( - Fragment { - timestamp, - len: new_text.len(), - deletions: Default::default(), - max_undos: Default::default(), - visible: true, - }, - &None, - ); + new_fragments.push(fragment, &None); } // Advance through every fragment that intersects this range, marking the intersecting @@ -664,6 +683,8 @@ impl Buffer { let intersection_end = cmp::min(range.end, fragment_end); if fragment.visible { intersection.len = intersection_end - fragment_start; + intersection.id = + Locator::between(&new_fragments.summary().max_id, &intersection.id); intersection.deletions.insert(timestamp.local()); intersection.visible = false; } @@ -675,6 +696,7 @@ impl Buffer { new: new_start..new_start, }); } + new_insertions.push(InsertionFragment::insert_new(&intersection)); new_ropes.push_fragment(&intersection, fragment.visible); new_fragments.push(intersection, &None); fragment_start = intersection_end; @@ -695,6 +717,8 @@ impl Buffer { if fragment_end > fragment_start { let mut suffix = old_fragments.item().unwrap().clone(); suffix.len = fragment_end - fragment_start; + suffix.insertion_offset += fragment_start - old_fragments.start().visible; + new_insertions.push(InsertionFragment::insert_new(&suffix)); new_ropes.push_fragment(&suffix, suffix.visible); new_fragments.push(suffix, &None); } @@ -708,6 +732,7 @@ impl Buffer { drop(old_fragments); self.snapshot.fragments = new_fragments; + self.snapshot.insertions.edit(new_insertions, &()); self.snapshot.visible_text = visible_text; self.snapshot.deleted_text = deleted_text; self.update_subscriptions(edits); @@ -865,7 +890,7 @@ impl Buffer { // timestamp. while let Some(fragment) = old_fragments.item() { if fragment_start == range.start - && fragment.timestamp.lamport() > timestamp.lamport() + && fragment.insertion_timestamp.lamport() > timestamp.lamport() { new_ropes.push_fragment(fragment, fragment.visible); new_fragments.push(fragment.clone(), &None); @@ -900,7 +925,9 @@ impl Buffer { new_ropes.push_str(new_text); new_fragments.push( Fragment { - timestamp, + id: todo!(), + insertion_timestamp: timestamp, + insertion_offset: todo!(), len: new_text.len(), deletions: Default::default(), max_undos: Default::default(), @@ -1008,7 +1035,9 @@ impl Buffer { let fragment_was_visible = fragment.visible; if fragment.was_visible(&undo.version, &self.undo_map) - || undo.counts.contains_key(&fragment.timestamp.local()) + || undo + .counts + .contains_key(&fragment.insertion_timestamp.local()) { fragment.visible = fragment.is_visible(&self.undo_map); fragment.max_undos.observe(undo.id); @@ -2028,13 +2057,13 @@ impl<'a, D: TextDimension<'a> + Ord, F: FnMut(&FragmentSummary) -> bool> Iterato impl Fragment { fn is_visible(&self, undos: &UndoMap) -> bool { - !undos.is_undone(self.timestamp.local()) + !undos.is_undone(self.insertion_timestamp.local()) && self.deletions.iter().all(|d| undos.is_undone(*d)) } fn was_visible(&self, version: &clock::Global, undos: &UndoMap) -> bool { - (version.observed(self.timestamp.local()) - && !undos.was_undone(self.timestamp.local(), version)) + (version.observed(self.insertion_timestamp.local()) + && !undos.was_undone(self.insertion_timestamp.local(), version)) && self .deletions .iter() @@ -2047,14 +2076,14 @@ impl sum_tree::Item for Fragment { fn summary(&self) -> Self::Summary { let mut max_version = clock::Global::new(); - max_version.observe(self.timestamp.local()); + max_version.observe(self.insertion_timestamp.local()); for deletion in &self.deletions { max_version.observe(*deletion); } max_version.join(&self.max_undos); let mut min_insertion_version = clock::Global::new(); - min_insertion_version.observe(self.timestamp.local()); + min_insertion_version.observe(self.insertion_timestamp.local()); let max_insertion_version = min_insertion_version.clone(); if self.visible { FragmentSummary { @@ -2086,6 +2115,7 @@ impl sum_tree::Summary for FragmentSummary { type Context = Option; fn add_summary(&mut self, other: &Self, _: &Self::Context) { + self.max_id = other.max_id.clone(); self.text.visible += &other.text.visible; self.text.deleted += &other.text.deleted; self.max_version.join(&other.max_version); @@ -2116,22 +2146,43 @@ impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary { } impl sum_tree::Item for InsertionFragment { - type Summary = InsertionSummary; + type Summary = InsertionFragmentKey; fn summary(&self) -> Self::Summary { - InsertionSummary { - max_timestamp: self.timestamp, - max_split_offset: self.split_offset, + InsertionFragmentKey { + timestamp: self.timestamp, + split_offset: self.split_offset, } } } -impl sum_tree::Summary for InsertionSummary { +impl sum_tree::KeyedItem for InsertionFragment { + type Key = InsertionFragmentKey; + + fn key(&self) -> Self::Key { + sum_tree::Item::summary(self) + } +} + +impl InsertionFragment { + fn new(fragment: &Fragment) -> Self { + Self { + timestamp: fragment.insertion_timestamp, + split_offset: fragment.insertion_offset, + fragment_id: fragment.id.clone(), + } + } + + fn insert_new(fragment: &Fragment) -> sum_tree::Edit { + sum_tree::Edit::Insert(Self::new(fragment)) + } +} + +impl sum_tree::Summary for InsertionFragmentKey { type Context = (); - fn add_summary(&mut self, summary: &Self, cx: &()) { - self.max_timestamp = summary.max_timestamp; - self.max_split_offset = summary.max_split_offset; + fn add_summary(&mut self, summary: &Self, _: &()) { + *self = *summary; } } From b4ebe179f938292ede0a068501b23afed104e9da Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 9 Dec 2021 08:44:59 +0100 Subject: [PATCH 029/196] Make local edit randomized tests pass with locators --- crates/text/src/tests.rs | 54 ++++++++++++++++++++++++---------------- crates/text/src/text.rs | 20 +++------------ 2 files changed, 37 insertions(+), 37 deletions(-) diff --git a/crates/text/src/tests.rs b/crates/text/src/tests.rs index e55f478c9f5fef57f8316004da309bd5287160e2..e14baf47c1855de11a13d72431366f30cce8f9e5 100644 --- a/crates/text/src/tests.rs +++ b/crates/text/src/tests.rs @@ -51,7 +51,7 @@ fn test_random_edits(mut rng: StdRng) { ); for _i in 0..operations { - let (old_ranges, new_text, _) = buffer.randomly_edit(&mut rng, 1); + let (old_ranges, new_text, _) = buffer.randomly_edit(&mut rng, 5); for old_range in old_ranges.iter().rev() { reference_string.replace_range(old_range.clone(), &new_text); } @@ -78,26 +78,7 @@ fn test_random_edits(mut rng: StdRng) { TextSummary::from(&reference_string[range]) ); - // Ensure every fragment is ordered by locator in the fragment tree and corresponds - // to an insertion fragment in the insertions tree. - let mut prev_fragment_id = Locator::min(); - for fragment in buffer.snapshot.fragments.items(&None) { - assert!(fragment.id > prev_fragment_id); - prev_fragment_id = fragment.id.clone(); - - let insertion_fragment = buffer - .snapshot - .insertions - .get( - &InsertionFragmentKey { - timestamp: fragment.insertion_timestamp, - split_offset: fragment.insertion_offset, - }, - &(), - ) - .unwrap(); - assert_eq!(insertion_fragment.fragment_id, fragment.id); - } + buffer.check_invariants(); if rng.gen_bool(0.3) { buffer_versions.push((buffer.clone(), buffer.subscribe())); @@ -639,6 +620,37 @@ struct Network { rng: R, } +impl Buffer { + fn check_invariants(&self) { + // Ensure every fragment is ordered by locator in the fragment tree and corresponds + // to an insertion fragment in the insertions tree. + let mut prev_fragment_id = Locator::min(); + for fragment in self.snapshot.fragments.items(&None) { + assert!(fragment.id > prev_fragment_id); + prev_fragment_id = fragment.id.clone(); + + let insertion_fragment = self + .snapshot + .insertions + .get( + &InsertionFragmentKey { + timestamp: fragment.insertion_timestamp, + split_offset: fragment.insertion_offset, + }, + &(), + ) + .unwrap(); + assert_eq!(insertion_fragment.fragment_id, fragment.id); + } + + let insertions = self.snapshot.insertions.items(&()); + assert_eq!( + HashSet::from_iter(insertions.iter().map(|i| &i.fragment_id)).len(), + insertions.len() + ); + } +} + impl Network { fn new(rng: R) -> Self { Network { diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index c9343eb7a249e6585c98a3a1cebc2dffbcbfc6b4..fb00e4bba19c747d4c2c1f98bca184658e0665ab 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -580,6 +580,7 @@ impl Buffer { new_text: None, }; let mut new_insertions = Vec::new(); + let mut insertion_offset = 0; let mut ranges = ranges .map(|range| range.start.to_offset(&*self)..range.end.to_offset(&*self)) @@ -626,12 +627,6 @@ impl Buffer { let mut prefix = old_fragments.item().unwrap().clone(); prefix.len = range.start - fragment_start; prefix.insertion_offset += fragment_start - old_fragments.start().visible; - - // log::info!( - // "pushing prefix between {:?} and {:?}", - // new_fragments.summary().max_id, - // prefix.id - // ); prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id); new_insertions.push(InsertionFragment::insert_new(&prefix)); new_ropes.push_fragment(&prefix, prefix.visible); @@ -646,15 +641,6 @@ impl Buffer { old: fragment_start..fragment_start, new: new_start..new_start + new_text.len(), }); - - // log::info!( - // "pushing new fragment between {:?} and {:?}", - // new_fragments.summary().max_id, - // old_fragments - // .item() - // .map_or(&Locator::max(), |old_fragment| &old_fragment.id) - // ); - let fragment = Fragment { id: Locator::between( &new_fragments.summary().max_id, @@ -663,7 +649,7 @@ impl Buffer { .map_or(&Locator::max(), |old_fragment| &old_fragment.id), ), insertion_timestamp: timestamp, - insertion_offset: 0, + insertion_offset, len: new_text.len(), deletions: Default::default(), max_undos: Default::default(), @@ -672,6 +658,7 @@ impl Buffer { new_insertions.push(InsertionFragment::insert_new(&fragment)); new_ropes.push_str(new_text); new_fragments.push(fragment, &None); + insertion_offset += new_text.len(); } // Advance through every fragment that intersects this range, marking the intersecting @@ -683,6 +670,7 @@ impl Buffer { let intersection_end = cmp::min(range.end, fragment_end); if fragment.visible { intersection.len = intersection_end - fragment_start; + intersection.insertion_offset += fragment_start - old_fragments.start().visible; intersection.id = Locator::between(&new_fragments.summary().max_id, &intersection.id); intersection.deletions.insert(timestamp.local()); From dc81b5f57a227275ebd004200c31594124e261a9 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 9 Dec 2021 09:15:19 +0100 Subject: [PATCH 030/196] Make remote edit randomized tests pass with locators --- crates/text/src/tests.rs | 2 ++ crates/text/src/text.rs | 47 ++++++++++++++++++++++++++++++---------- 2 files changed, 37 insertions(+), 12 deletions(-) diff --git a/crates/text/src/tests.rs b/crates/text/src/tests.rs index e14baf47c1855de11a13d72431366f30cce8f9e5..38390a210af6f4d8dff815bfa5e8fe68e73f4eca 100644 --- a/crates/text/src/tests.rs +++ b/crates/text/src/tests.rs @@ -579,6 +579,7 @@ fn test_random_concurrent_edits(mut rng: StdRng) { } _ => {} } + buffer.check_invariants(); if mutation_count == 0 && network.is_idle() { break; @@ -605,6 +606,7 @@ fn test_random_concurrent_edits(mut rng: StdRng) { .all_selection_ranges::() .collect::>() ); + buffer.check_invariants(); } } diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index fb00e4bba19c747d4c2c1f98bca184658e0665ab..a609f8ec8db7bffca20ce9d45a0631f04808842b 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -827,6 +827,8 @@ impl Buffer { let mut edits = Patch::default(); let cx = Some(version.clone()); + let mut new_insertions = Vec::new(); + let mut insertion_offset = 0; let mut new_ropes = RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); let mut old_fragments = self.fragments.cursor::<(VersionedFullOffset, usize)>(); @@ -850,6 +852,9 @@ impl Buffer { if fragment_end > fragment_start { let mut suffix = old_fragments.item().unwrap().clone(); suffix.len = fragment_end.0 - fragment_start.0; + suffix.insertion_offset += + fragment_start - old_fragments.start().0.full_offset(); + new_insertions.push(InsertionFragment::insert_new(&suffix)); new_ropes.push_fragment(&suffix, suffix.visible); new_fragments.push(suffix, &None); } @@ -868,6 +873,8 @@ impl Buffer { if fragment_end == range.start && fragment_end > fragment_start { let mut fragment = old_fragments.item().unwrap().clone(); fragment.len = fragment_end.0 - fragment_start.0; + fragment.insertion_offset += fragment_start - old_fragments.start().0.full_offset(); + new_insertions.push(InsertionFragment::insert_new(&fragment)); new_ropes.push_fragment(&fragment, fragment.visible); new_fragments.push(fragment, &None); old_fragments.next(&cx); @@ -894,6 +901,9 @@ impl Buffer { if fragment_start < range.start { let mut prefix = old_fragments.item().unwrap().clone(); prefix.len = range.start.0 - fragment_start.0; + prefix.insertion_offset += fragment_start - old_fragments.start().0.full_offset(); + prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id); + new_insertions.push(InsertionFragment::insert_new(&prefix)); fragment_start = range.start; new_ropes.push_fragment(&prefix, prefix.visible); new_fragments.push(prefix, &None); @@ -910,19 +920,24 @@ impl Buffer { old: old_start..old_start, new: new_start..new_start + new_text.len(), }); + let fragment = Fragment { + id: Locator::between( + &new_fragments.summary().max_id, + old_fragments + .item() + .map_or(&Locator::max(), |old_fragment| &old_fragment.id), + ), + insertion_timestamp: timestamp, + insertion_offset, + len: new_text.len(), + deletions: Default::default(), + max_undos: Default::default(), + visible: true, + }; + new_insertions.push(InsertionFragment::insert_new(&fragment)); new_ropes.push_str(new_text); - new_fragments.push( - Fragment { - id: todo!(), - insertion_timestamp: timestamp, - insertion_offset: todo!(), - len: new_text.len(), - deletions: Default::default(), - max_undos: Default::default(), - visible: true, - }, - &None, - ); + new_fragments.push(fragment, &None); + insertion_offset += new_text.len(); } // Advance through every fragment that intersects this range, marking the intersecting @@ -934,6 +949,10 @@ impl Buffer { let intersection_end = cmp::min(range.end, fragment_end); if fragment.was_visible(version, &self.undo_map) { intersection.len = intersection_end.0 - fragment_start.0; + intersection.insertion_offset += + fragment_start - old_fragments.start().0.full_offset(); + intersection.id = + Locator::between(&new_fragments.summary().max_id, &intersection.id); intersection.deletions.insert(timestamp.local()); intersection.visible = false; } @@ -947,6 +966,7 @@ impl Buffer { new: new_start..new_start, }); } + new_insertions.push(InsertionFragment::insert_new(&intersection)); new_ropes.push_fragment(&intersection, fragment.visible); new_fragments.push(intersection, &None); fragment_start = intersection_end; @@ -964,6 +984,8 @@ impl Buffer { if fragment_end > fragment_start { let mut suffix = old_fragments.item().unwrap().clone(); suffix.len = fragment_end.0 - fragment_start.0; + suffix.insertion_offset += fragment_start - old_fragments.start().0.full_offset(); + new_insertions.push(InsertionFragment::insert_new(&suffix)); new_ropes.push_fragment(&suffix, suffix.visible); new_fragments.push(suffix, &None); } @@ -979,6 +1001,7 @@ impl Buffer { self.snapshot.fragments = new_fragments; self.snapshot.visible_text = visible_text; self.snapshot.deleted_text = deleted_text; + self.snapshot.insertions.edit(new_insertions, &()); self.local_clock.observe(timestamp.local()); self.lamport_clock.observe(timestamp.lamport()); self.update_subscriptions(edits); From b7535dfba4df92ef86194a178df56f781f2b6b4e Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 9 Dec 2021 09:21:52 +0100 Subject: [PATCH 031/196] Store only `clock::Local` in `InsertionFragment` --- crates/text/src/tests.rs | 2 +- crates/text/src/text.rs | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/text/src/tests.rs b/crates/text/src/tests.rs index 38390a210af6f4d8dff815bfa5e8fe68e73f4eca..dafbd9604c2413fa56c247d494ffe25a1c29b473 100644 --- a/crates/text/src/tests.rs +++ b/crates/text/src/tests.rs @@ -636,7 +636,7 @@ impl Buffer { .insertions .get( &InsertionFragmentKey { - timestamp: fragment.insertion_timestamp, + timestamp: fragment.insertion_timestamp.local(), split_offset: fragment.insertion_offset, }, &(), diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index a609f8ec8db7bffca20ce9d45a0631f04808842b..0137a25bbee800ec5b4aefe248b5273959a6bf15 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -410,14 +410,14 @@ struct FragmentTextSummary { #[derive(Eq, PartialEq, Clone, Debug)] struct InsertionFragment { - timestamp: InsertionTimestamp, + timestamp: clock::Local, split_offset: usize, fragment_id: Locator, } #[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord)] struct InsertionFragmentKey { - timestamp: InsertionTimestamp, + timestamp: clock::Local, split_offset: usize, } @@ -2178,7 +2178,7 @@ impl sum_tree::KeyedItem for InsertionFragment { impl InsertionFragment { fn new(fragment: &Fragment) -> Self { Self { - timestamp: fragment.insertion_timestamp, + timestamp: fragment.insertion_timestamp.local(), split_offset: fragment.insertion_offset, fragment_id: fragment.id.clone(), } From cbe136c0cb4ef310dd2ab8a89198e8e25887eeee Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 9 Dec 2021 11:18:01 +0100 Subject: [PATCH 032/196] Implement anchor resolution using locators --- crates/text/src/anchor.rs | 589 ++++------------------------------- crates/text/src/selection.rs | 87 +++--- crates/text/src/tests.rs | 12 +- crates/text/src/text.rs | 370 ++++++++-------------- 4 files changed, 244 insertions(+), 814 deletions(-) diff --git a/crates/text/src/anchor.rs b/crates/text/src/anchor.rs index 846c57274b3724e5a2f3680e8d5bf43fe16b4fda..1123bd21042710bdf5138445270552fdc4800609 100644 --- a/crates/text/src/anchor.rs +++ b/crates/text/src/anchor.rs @@ -1,561 +1,96 @@ use crate::{rope::TextDimension, Snapshot}; -use super::{Buffer, FromAnchor, FullOffset, Point, ToOffset}; +use super::{Buffer, ToOffset}; use anyhow::Result; -use std::{ - cmp::Ordering, - fmt::{Debug, Formatter}, - ops::Range, -}; -use sum_tree::{Bias, SumTree}; +use std::{cmp::Ordering, fmt::Debug, ops::Range}; +use sum_tree::Bias; #[derive(Clone, Eq, PartialEq, Debug, Hash)] -pub struct Anchor { - pub full_offset: FullOffset, - pub bias: Bias, - pub version: clock::Global, -} - -#[derive(Clone)] -pub struct AnchorMap { - pub(crate) version: clock::Global, - pub(crate) bias: Bias, - pub(crate) entries: Vec<(FullOffset, T)>, -} - -#[derive(Clone)] -pub struct AnchorSet(pub(crate) AnchorMap<()>); - -#[derive(Clone)] -pub struct AnchorRangeMap { - pub(crate) version: clock::Global, - pub(crate) entries: Vec<(Range, T)>, - pub(crate) start_bias: Bias, - pub(crate) end_bias: Bias, -} - -#[derive(Clone)] -pub struct AnchorRangeSet(pub(crate) AnchorRangeMap<()>); - -#[derive(Clone)] -pub struct AnchorRangeMultimap { - pub(crate) entries: SumTree>, - pub(crate) version: clock::Global, - pub(crate) start_bias: Bias, - pub(crate) end_bias: Bias, -} - -#[derive(Clone)] -pub(crate) struct AnchorRangeMultimapEntry { - pub(crate) range: FullOffsetRange, - pub(crate) value: T, -} - -#[derive(Clone, Debug)] -pub(crate) struct FullOffsetRange { - pub(crate) start: FullOffset, - pub(crate) end: FullOffset, -} - -#[derive(Clone, Debug)] -pub(crate) struct AnchorRangeMultimapSummary { - start: FullOffset, - end: FullOffset, - min_start: FullOffset, - max_end: FullOffset, - count: usize, +pub enum Anchor { + Min, + Insertion { + timestamp: clock::Local, + offset: usize, + bias: Bias, + }, + Max, } impl Anchor { pub fn min() -> Self { - Self { - full_offset: FullOffset(0), - bias: Bias::Left, - version: Default::default(), - } + Self::Min } pub fn max() -> Self { - Self { - full_offset: FullOffset::MAX, - bias: Bias::Right, - version: Default::default(), - } + Self::Max } pub fn cmp<'a>(&self, other: &Anchor, buffer: &Snapshot) -> Result { - if self == other { - return Ok(Ordering::Equal); - } - - let offset_comparison = if self.version == other.version { - self.full_offset.cmp(&other.full_offset) - } else { - buffer - .full_offset_for_anchor(self) - .cmp(&buffer.full_offset_for_anchor(other)) - }; - - Ok(offset_comparison.then_with(|| self.bias.cmp(&other.bias))) - } - - pub fn bias_left(&self, buffer: &Buffer) -> Anchor { - if self.bias == Bias::Left { - self.clone() - } else { - buffer.anchor_before(self) - } - } - - pub fn bias_right(&self, buffer: &Buffer) -> Anchor { - if self.bias == Bias::Right { - self.clone() - } else { - buffer.anchor_after(self) - } - } - - pub fn summary<'a, D>(&self, content: &'a Snapshot) -> D - where - D: TextDimension<'a>, - { - content.summary_for_anchor(self) - } -} - -impl AnchorMap { - pub fn version(&self) -> &clock::Global { - &self.version - } - - pub fn len(&self) -> usize { - self.entries.len() - } - - pub fn iter<'a, D>(&'a self, snapshot: &'a Snapshot) -> impl Iterator + 'a - where - D: 'a + TextDimension<'a>, - { - snapshot - .summaries_for_anchors( - self.version.clone(), - self.bias, - self.entries.iter().map(|e| &e.0), - ) - .zip(self.entries.iter().map(|e| &e.1)) - } -} - -impl AnchorSet { - pub fn version(&self) -> &clock::Global { - &self.0.version - } - - pub fn len(&self) -> usize { - self.0.len() - } - - pub fn iter<'a, D>(&'a self, content: &'a Snapshot) -> impl Iterator + 'a - where - D: 'a + TextDimension<'a>, - { - self.0.iter(content).map(|(position, _)| position) - } -} - -impl AnchorRangeMap { - pub fn version(&self) -> &clock::Global { - &self.version - } - - pub fn len(&self) -> usize { - self.entries.len() - } - - pub fn from_full_offset_ranges( - version: clock::Global, - start_bias: Bias, - end_bias: Bias, - entries: Vec<(Range, T)>, - ) -> Self { - Self { - version, - start_bias, - end_bias, - entries, - } - } - - pub fn ranges<'a, D>( - &'a self, - content: &'a Snapshot, - ) -> impl Iterator, &'a T)> + 'a - where - D: 'a + TextDimension<'a>, - { - content - .summaries_for_anchor_ranges( - self.version.clone(), - self.start_bias, - self.end_bias, - self.entries.iter().map(|e| &e.0), - ) - .zip(self.entries.iter().map(|e| &e.1)) - } - - pub fn intersecting_ranges<'a, D, I>( - &'a self, - range: Range<(I, Bias)>, - content: &'a Snapshot, - ) -> impl Iterator, &'a T)> + 'a - where - D: 'a + TextDimension<'a>, - I: ToOffset, - { - let range = content.anchor_at(range.start.0, range.start.1) - ..content.anchor_at(range.end.0, range.end.1); - - let mut probe_anchor = Anchor { - full_offset: Default::default(), - bias: self.start_bias, - version: self.version.clone(), - }; - let start_ix = self.entries.binary_search_by(|probe| { - probe_anchor.full_offset = probe.0.end; - probe_anchor.cmp(&range.start, &content).unwrap() - }); - - match start_ix { - Ok(start_ix) | Err(start_ix) => content - .summaries_for_anchor_ranges( - self.version.clone(), - self.start_bias, - self.end_bias, - self.entries[start_ix..].iter().map(|e| &e.0), - ) - .zip(self.entries.iter().map(|e| &e.1)), - } - } - - pub fn full_offset_ranges(&self) -> impl Iterator, T)> { - self.entries.iter() - } - - pub fn min_by_key<'a, D, F, K>( - &self, - content: &'a Snapshot, - mut extract_key: F, - ) -> Option<(Range, &T)> - where - D: 'a + TextDimension<'a>, - F: FnMut(&T) -> K, - K: Ord, - { - self.entries - .iter() - .min_by_key(|(_, value)| extract_key(value)) - .map(|(range, value)| (self.resolve_range(range, &content), value)) - } - - pub fn max_by_key<'a, D, F, K>( - &self, - content: &'a Snapshot, - mut extract_key: F, - ) -> Option<(Range, &T)> - where - D: 'a + TextDimension<'a>, - F: FnMut(&T) -> K, - K: Ord, - { - self.entries - .iter() - .max_by_key(|(_, value)| extract_key(value)) - .map(|(range, value)| (self.resolve_range(range, &content), value)) - } - - fn resolve_range<'a, D>(&self, range: &Range, content: &'a Snapshot) -> Range - where - D: 'a + TextDimension<'a>, - { - let mut anchor = Anchor { - full_offset: range.start, - bias: self.start_bias, - version: self.version.clone(), - }; - let start = content.summary_for_anchor(&anchor); - - anchor.full_offset = range.end; - anchor.bias = self.end_bias; - let end = content.summary_for_anchor(&anchor); - - start..end - } -} - -impl PartialEq for AnchorRangeMap { - fn eq(&self, other: &Self) -> bool { - self.version == other.version && self.entries == other.entries - } -} - -impl Eq for AnchorRangeMap {} - -impl Debug for AnchorRangeMap { - fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> { - let mut f = f.debug_map(); - for (range, value) in &self.entries { - f.key(range); - f.value(value); - } - f.finish() - } -} - -impl Debug for AnchorRangeSet { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - let mut f = f.debug_set(); - for (range, _) in &self.0.entries { - f.entry(range); - } - f.finish() - } -} - -impl AnchorRangeSet { - pub fn len(&self) -> usize { - self.0.len() - } - - pub fn version(&self) -> &clock::Global { - self.0.version() - } - - pub fn ranges<'a, D>(&'a self, content: &'a Snapshot) -> impl 'a + Iterator> - where - D: 'a + TextDimension<'a>, - { - self.0.ranges(content).map(|(range, _)| range) - } -} + match (self, other) { + (Self::Min, Self::Min) => Ok(Ordering::Equal), + (Self::Min, _) => Ok(Ordering::Less), + (_, Self::Min) => Ok(Ordering::Greater), + (Self::Max, Self::Max) => Ok(Ordering::Equal), + (Self::Max, _) => Ok(Ordering::Greater), + (_, Self::Max) => Ok(Ordering::Less), + ( + Self::Insertion { + timestamp: lhs_id, + bias: lhs_bias, + offset: lhs_offset, + }, + Self::Insertion { + timestamp: rhs_id, + bias: rhs_bias, + offset: rhs_offset, + }, + ) => { + let offset_comparison = if lhs_id == rhs_id { + lhs_offset.cmp(&rhs_offset) + } else { + buffer + .full_offset_for_anchor(self) + .cmp(&buffer.full_offset_for_anchor(other)) + }; -impl Default for AnchorRangeMultimap { - fn default() -> Self { - Self { - entries: Default::default(), - version: Default::default(), - start_bias: Bias::Left, - end_bias: Bias::Left, + Ok(offset_comparison.then_with(|| lhs_bias.cmp(&rhs_bias))) + } } } -} - -impl AnchorRangeMultimap { - pub fn version(&self) -> &clock::Global { - &self.version - } - - pub fn intersecting_ranges<'a, I, O>( - &'a self, - range: Range, - content: &'a Snapshot, - inclusive: bool, - ) -> impl Iterator, &T)> + 'a - where - I: ToOffset, - O: FromAnchor, - { - let end_bias = if inclusive { Bias::Right } else { Bias::Left }; - let range = range.start.to_full_offset(&content, Bias::Left) - ..range.end.to_full_offset(&content, end_bias); - let mut cursor = self.entries.filter::<_, usize>( - { - let mut endpoint = Anchor { - full_offset: FullOffset(0), - bias: Bias::Right, - version: self.version.clone(), - }; - move |summary: &AnchorRangeMultimapSummary| { - endpoint.full_offset = summary.max_end; - endpoint.bias = self.end_bias; - let max_end = endpoint.to_full_offset(&content, self.end_bias); - let start_cmp = range.start.cmp(&max_end); - - endpoint.full_offset = summary.min_start; - endpoint.bias = self.start_bias; - let min_start = endpoint.to_full_offset(&content, self.start_bias); - let end_cmp = range.end.cmp(&min_start); - - if inclusive { - start_cmp <= Ordering::Equal && end_cmp >= Ordering::Equal - } else { - start_cmp == Ordering::Less && end_cmp == Ordering::Greater - } - } - }, - &(), - ); - std::iter::from_fn({ - let mut endpoint = Anchor { - full_offset: FullOffset(0), - bias: Bias::Left, - version: self.version.clone(), - }; - move || { - if let Some(item) = cursor.item() { - let ix = *cursor.start(); - endpoint.full_offset = item.range.start; - endpoint.bias = self.start_bias; - let start = O::from_anchor(&endpoint, &content); - endpoint.full_offset = item.range.end; - endpoint.bias = self.end_bias; - let end = O::from_anchor(&endpoint, &content); - let value = &item.value; - cursor.next(&()); - Some((ix, start..end, value)) + pub fn bias_left(&self, buffer: &Buffer) -> Anchor { + match self { + Anchor::Min => Anchor::Min, + Anchor::Insertion { bias, .. } => { + if *bias == Bias::Left { + self.clone() } else { - None + buffer.anchor_before(self) } } - }) - } - - pub fn from_full_offset_ranges( - version: clock::Global, - start_bias: Bias, - end_bias: Bias, - entries: impl Iterator, T)>, - ) -> Self { - Self { - version, - start_bias, - end_bias, - entries: SumTree::from_iter( - entries.map(|(range, value)| AnchorRangeMultimapEntry { - range: FullOffsetRange { - start: range.start, - end: range.end, - }, - value, - }), - &(), - ), + Anchor::Max => buffer.anchor_before(self), } } - pub fn full_offset_ranges(&self) -> impl Iterator, &T)> { - self.entries - .cursor::<()>() - .map(|entry| (entry.range.start..entry.range.end, &entry.value)) - } - - pub fn filter<'a, O, F>( - &'a self, - content: &'a Snapshot, - mut f: F, - ) -> impl 'a + Iterator, &T)> - where - O: FromAnchor, - F: 'a + FnMut(&'a T) -> bool, - { - let mut endpoint = Anchor { - full_offset: FullOffset(0), - bias: Bias::Left, - version: self.version.clone(), - }; - self.entries - .cursor::<()>() - .enumerate() - .filter_map(move |(ix, entry)| { - if f(&entry.value) { - endpoint.full_offset = entry.range.start; - endpoint.bias = self.start_bias; - let start = O::from_anchor(&endpoint, &content); - endpoint.full_offset = entry.range.end; - endpoint.bias = self.end_bias; - let end = O::from_anchor(&endpoint, &content); - Some((ix, start..end, &entry.value)) + pub fn bias_right(&self, buffer: &Buffer) -> Anchor { + match self { + Anchor::Min => buffer.anchor_after(self), + Anchor::Insertion { bias, .. } => { + if *bias == Bias::Right { + self.clone() } else { - None + buffer.anchor_after(self) } - }) - } -} - -impl sum_tree::Item for AnchorRangeMultimapEntry { - type Summary = AnchorRangeMultimapSummary; - - fn summary(&self) -> Self::Summary { - AnchorRangeMultimapSummary { - start: self.range.start, - end: self.range.end, - min_start: self.range.start, - max_end: self.range.end, - count: 1, - } - } -} - -impl Default for AnchorRangeMultimapSummary { - fn default() -> Self { - Self { - start: FullOffset(0), - end: FullOffset::MAX, - min_start: FullOffset::MAX, - max_end: FullOffset(0), - count: 0, - } - } -} - -impl sum_tree::Summary for AnchorRangeMultimapSummary { - type Context = (); - - fn add_summary(&mut self, other: &Self, _: &Self::Context) { - self.min_start = self.min_start.min(other.min_start); - self.max_end = self.max_end.max(other.max_end); - - #[cfg(debug_assertions)] - { - let start_comparison = self.start.cmp(&other.start); - assert!(start_comparison <= Ordering::Equal); - if start_comparison == Ordering::Equal { - assert!(self.end.cmp(&other.end) >= Ordering::Equal); } + Anchor::Max => Anchor::Max, } - - self.start = other.start; - self.end = other.end; - self.count += other.count; - } -} - -impl Default for FullOffsetRange { - fn default() -> Self { - Self { - start: FullOffset(0), - end: FullOffset::MAX, - } - } -} - -impl<'a> sum_tree::Dimension<'a, AnchorRangeMultimapSummary> for usize { - fn add_summary(&mut self, summary: &'a AnchorRangeMultimapSummary, _: &()) { - *self += summary.count; } -} - -impl<'a> sum_tree::Dimension<'a, AnchorRangeMultimapSummary> for FullOffsetRange { - fn add_summary(&mut self, summary: &'a AnchorRangeMultimapSummary, _: &()) { - self.start = summary.start; - self.end = summary.end; - } -} -impl<'a> sum_tree::SeekTarget<'a, AnchorRangeMultimapSummary, FullOffsetRange> for FullOffsetRange { - fn cmp(&self, cursor_location: &FullOffsetRange, _: &()) -> Ordering { - Ord::cmp(&self.start, &cursor_location.start) - .then_with(|| Ord::cmp(&cursor_location.end, &self.end)) + pub fn summary<'a, D>(&self, content: &'a Snapshot) -> D + where + D: TextDimension<'a>, + { + content.summary_for_anchor(self) } } diff --git a/crates/text/src/selection.rs b/crates/text/src/selection.rs index eaa2409772744806c0f2018946781b4b4117f8f3..e9e7dd1f22f443c54507eb640760e4d4d964d827 100644 --- a/crates/text/src/selection.rs +++ b/crates/text/src/selection.rs @@ -1,8 +1,8 @@ use sum_tree::Bias; -use crate::{rope::TextDimension, Snapshot}; +use crate::{rope::TextDimension, Anchor, Snapshot}; -use super::{AnchorRangeMap, Buffer, Point, ToOffset, ToPoint}; +use super::{Buffer, Point, ToOffset, ToPoint}; use std::{cmp::Ordering, ops::Range, sync::Arc}; pub type SelectionSetId = clock::Lamport; @@ -28,7 +28,7 @@ pub struct Selection { pub struct SelectionSet { pub id: SelectionSetId, pub active: bool, - pub selections: Arc>, + pub selections: Arc<[Selection]>, } #[derive(Debug, Eq, PartialEq)] @@ -98,6 +98,21 @@ impl Selection { } } +impl Selection { + pub fn resolve<'a, D: 'a + TextDimension<'a>>( + &'a self, + snapshot: &'a Snapshot, + ) -> Selection { + Selection { + id: self.id, + start: snapshot.summary_for_anchor(&self.start), + end: snapshot.summary_for_anchor(&self.end), + reversed: self.reversed, + goal: self.goal, + } + } +} + impl SelectionSet { pub fn len(&self) -> usize { self.selections.len() @@ -105,69 +120,59 @@ impl SelectionSet { pub fn selections<'a, D>( &'a self, - content: &'a Snapshot, + snapshot: &'a Snapshot, ) -> impl 'a + Iterator> where D: 'a + TextDimension<'a>, { - self.selections - .ranges(content) - .map(|(range, state)| Selection { - id: state.id, - start: range.start, - end: range.end, - reversed: state.reversed, - goal: state.goal, - }) + self.selections.iter().map(|s| s.resolve(snapshot)) } pub fn intersecting_selections<'a, D, I>( &'a self, range: Range<(I, Bias)>, - content: &'a Snapshot, + snapshot: &'a Snapshot, ) -> impl 'a + Iterator> where D: 'a + TextDimension<'a>, I: 'a + ToOffset, { - self.selections - .intersecting_ranges(range, content) - .map(|(range, state)| Selection { - id: state.id, - start: range.start, - end: range.end, - reversed: state.reversed, - goal: state.goal, - }) + let start = snapshot.anchor_at(range.start.0, range.start.1); + let end = snapshot.anchor_at(range.end.0, range.end.1); + let start_ix = match self + .selections + .binary_search_by(|probe| probe.start.cmp(&start, snapshot).unwrap()) + { + Ok(ix) | Err(ix) => ix, + }; + let end_ix = match self + .selections + .binary_search_by(|probe| probe.end.cmp(&end, snapshot).unwrap()) + { + Ok(ix) | Err(ix) => ix, + }; + self.selections[start_ix..end_ix] + .iter() + .map(|s| s.resolve(snapshot)) } - pub fn oldest_selection<'a, D>(&'a self, content: &'a Snapshot) -> Option> + pub fn oldest_selection<'a, D>(&'a self, snapshot: &'a Snapshot) -> Option> where D: 'a + TextDimension<'a>, { self.selections - .min_by_key(content, |selection| selection.id) - .map(|(range, state)| Selection { - id: state.id, - start: range.start, - end: range.end, - reversed: state.reversed, - goal: state.goal, - }) + .iter() + .min_by_key(|s| s.id) + .map(|s| s.resolve(snapshot)) } - pub fn newest_selection<'a, D>(&'a self, content: &'a Snapshot) -> Option> + pub fn newest_selection<'a, D>(&'a self, snapshot: &'a Snapshot) -> Option> where D: 'a + TextDimension<'a>, { self.selections - .max_by_key(content, |selection| selection.id) - .map(|(range, state)| Selection { - id: state.id, - start: range.start, - end: range.end, - reversed: state.reversed, - goal: state.goal, - }) + .iter() + .max_by_key(|s| s.id) + .map(|s| s.resolve(snapshot)) } } diff --git a/crates/text/src/tests.rs b/crates/text/src/tests.rs index dafbd9604c2413fa56c247d494ffe25a1c29b473..f7f307049cbd4e089e59ee5a34141f3d116d9bf5 100644 --- a/crates/text/src/tests.rs +++ b/crates/text/src/tests.rs @@ -645,11 +645,13 @@ impl Buffer { assert_eq!(insertion_fragment.fragment_id, fragment.id); } - let insertions = self.snapshot.insertions.items(&()); - assert_eq!( - HashSet::from_iter(insertions.iter().map(|i| &i.fragment_id)).len(), - insertions.len() - ); + let mut cursor = self.snapshot.fragments.cursor::(); + for insertion_fragment in self.snapshot.insertions.cursor::<()>() { + cursor.seek(&insertion_fragment.fragment_id, Bias::Left, &None); + let fragment = cursor.item().unwrap(); + assert_eq!(insertion_fragment.fragment_id, fragment.id); + assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset); + } } } diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 0137a25bbee800ec5b4aefe248b5273959a6bf15..5f54c4b8b997c77eaeab57c9bfd1d454ec364db7 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -27,7 +27,7 @@ use rope::TextDimension; pub use rope::{Chunks, Rope, TextSummary}; pub use selection::*; use std::{ - cmp::{self, Reverse}, + cmp::{self, Ordering}, iter::Iterator, ops::{self, Deref, Range, Sub}, str, @@ -67,8 +67,8 @@ pub struct Transaction { end: clock::Global, edits: Vec, ranges: Vec>, - selections_before: HashMap>>, - selections_after: HashMap>>, + selections_before: HashMap]>>, + selections_after: HashMap]>>, first_edit_at: Instant, last_edit_at: Instant, } @@ -155,7 +155,7 @@ impl History { fn start_transaction( &mut self, start: clock::Global, - selections_before: HashMap>>, + selections_before: HashMap]>>, now: Instant, ) { self.transaction_depth += 1; @@ -175,7 +175,7 @@ impl History { fn end_transaction( &mut self, - selections_after: HashMap>>, + selections_after: HashMap]>>, now: Instant, ) -> Option<&Transaction> { assert_ne!(self.transaction_depth, 0); @@ -430,7 +430,7 @@ pub enum Operation { }, UpdateSelections { set_id: SelectionSetId, - selections: Arc>, + selections: Arc<[Selection]>, lamport_timestamp: clock::Lamport, }, RemoveSelections { @@ -1122,9 +1122,9 @@ impl Buffer { match op { Operation::Edit(edit) => self.version.ge(&edit.version), Operation::Undo { undo, .. } => self.version.ge(&undo.version), - Operation::UpdateSelections { selections, .. } => { - self.version.ge(selections.version()) - } + Operation::UpdateSelections { selections, .. } => selections + .iter() + .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)), Operation::RemoveSelections { .. } => true, Operation::SetActiveSelections { set_id, .. } => { set_id.map_or(true, |set_id| self.selections.contains_key(&set_id)) @@ -1135,6 +1135,14 @@ impl Buffer { } } + fn can_resolve(&self, anchor: &Anchor) -> bool { + match anchor { + Anchor::Min => true, + Anchor::Insertion { timestamp, .. } => self.version.observed(*timestamp), + Anchor::Max => true, + } + } + pub fn peek_undo_stack(&self) -> Option<&Transaction> { self.history.undo_stack.last() } @@ -1280,25 +1288,22 @@ impl Buffer { self.selections.iter() } - fn build_selection_anchor_range_map( + fn build_anchor_selection_set( &self, selections: &[Selection], - ) -> Arc> { - Arc::new(self.anchor_range_map( - Bias::Left, - Bias::Left, - selections.iter().map(|selection| { - let start = selection.start.to_offset(self); - let end = selection.end.to_offset(self); - let range = start..end; - let state = SelectionState { + ) -> Arc<[Selection]> { + Arc::from( + selections + .iter() + .map(|selection| Selection { id: selection.id, + start: self.anchor_before(&selection.start), + end: self.anchor_before(&selection.end), reversed: selection.reversed, goal: selection.goal, - }; - (range, state) - }), - )) + }) + .collect::>(), + ) } pub fn update_selection_set( @@ -1306,7 +1311,7 @@ impl Buffer { set_id: SelectionSetId, selections: &[Selection], ) -> Result { - let selections = self.build_selection_anchor_range_map(selections); + let selections = self.build_anchor_selection_set(selections); let set = self .selections .get_mut(&set_id) @@ -1322,7 +1327,7 @@ impl Buffer { pub fn restore_selection_set( &mut self, set_id: SelectionSetId, - selections: Arc>, + selections: Arc<[Selection]>, ) -> Result { let set = self .selections @@ -1337,7 +1342,7 @@ impl Buffer { } pub fn add_selection_set(&mut self, selections: &[Selection]) -> Operation { - let selections = self.build_selection_anchor_range_map(selections); + let selections = self.build_anchor_selection_set(selections); let set_id = self.lamport_clock.tick(); self.selections.insert( set_id, @@ -1675,19 +1680,81 @@ impl Snapshot { where D: TextDimension<'a>, { - let cx = Some(anchor.version.clone()); - let mut cursor = self.fragments.cursor::<(VersionedFullOffset, usize)>(); - cursor.seek( - &VersionedFullOffset::Offset(anchor.full_offset), - anchor.bias, - &cx, - ); - let overshoot = if cursor.item().map_or(false, |fragment| fragment.visible) { - anchor.full_offset - cursor.start().0.full_offset() - } else { - 0 - }; - self.text_summary_for_range(0..cursor.start().1 + overshoot) + match anchor { + Anchor::Min => D::default(), + Anchor::Insertion { + timestamp, + offset, + bias, + } => { + let anchor_key = InsertionFragmentKey { + timestamp: *timestamp, + split_offset: *offset, + }; + let mut insertion_cursor = self.insertions.cursor::(); + insertion_cursor.seek(&anchor_key, *bias, &()); + if let Some(insertion) = insertion_cursor.item() { + let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key); + if comparison == Ordering::Greater + || (*bias == Bias::Left && comparison == Ordering::Equal && *offset > 0) + { + insertion_cursor.prev(&()); + } + } else { + insertion_cursor.prev(&()); + } + let insertion = insertion_cursor.item().expect("invalid insertion"); + debug_assert_eq!(insertion.timestamp, *timestamp, "invalid insertion"); + + let mut fragment_cursor = self.fragments.cursor::<(Locator, usize)>(); + fragment_cursor.seek(&insertion.fragment_id, Bias::Left, &None); + let fragment = fragment_cursor.item().unwrap(); + let mut fragment_offset = fragment_cursor.start().1; + if fragment.visible { + fragment_offset += *offset - insertion.split_offset; + } + self.text_summary_for_range(0..fragment_offset) + } + Anchor::Max => D::from_text_summary(&self.visible_text.summary()), + } + } + + fn full_offset_for_anchor(&self, anchor: &Anchor) -> FullOffset { + match anchor { + Anchor::Min => Default::default(), + Anchor::Insertion { + timestamp, + offset, + bias, + } => { + let anchor_key = InsertionFragmentKey { + timestamp: *timestamp, + split_offset: *offset, + }; + let mut insertion_cursor = self.insertions.cursor::(); + insertion_cursor.seek(&anchor_key, *bias, &()); + if let Some(insertion) = insertion_cursor.item() { + let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key); + if comparison == Ordering::Greater + || (*bias == Bias::Left && comparison == Ordering::Equal && *offset > 0) + { + insertion_cursor.prev(&()); + } + } else { + insertion_cursor.prev(&()); + } + let insertion = insertion_cursor.item().expect("invalid insertion"); + debug_assert_eq!(insertion.timestamp, *timestamp, "invalid insertion"); + + let mut fragment_cursor = self.fragments.cursor::<(Locator, FullOffset)>(); + fragment_cursor.seek(&insertion.fragment_id, Bias::Left, &None); + fragment_cursor.start().1 + (*offset - insertion.split_offset) + } + Anchor::Max => { + let text = self.fragments.summary().text; + FullOffset(text.visible + text.deleted) + } + } } pub fn text_summary_for_range<'a, D, O: ToOffset>(&'a self, range: Range) -> D @@ -1699,70 +1766,6 @@ impl Snapshot { .summary(range.end.to_offset(self)) } - fn summaries_for_anchors<'a, D, I>( - &'a self, - version: clock::Global, - bias: Bias, - ranges: I, - ) -> impl 'a + Iterator - where - D: 'a + TextDimension<'a>, - I: 'a + IntoIterator, - { - let cx = Some(version.clone()); - let mut summary = D::default(); - let mut rope_cursor = self.visible_text.cursor(0); - let mut cursor = self.fragments.cursor::<(VersionedFullOffset, usize)>(); - ranges.into_iter().map(move |offset| { - cursor.seek_forward(&VersionedFullOffset::Offset(*offset), bias, &cx); - let overshoot = if cursor.item().map_or(false, |fragment| fragment.visible) { - *offset - cursor.start().0.full_offset() - } else { - 0 - }; - summary.add_assign(&rope_cursor.summary(cursor.start().1 + overshoot)); - summary.clone() - }) - } - - fn summaries_for_anchor_ranges<'a, D, I>( - &'a self, - version: clock::Global, - start_bias: Bias, - end_bias: Bias, - ranges: I, - ) -> impl 'a + Iterator> - where - D: 'a + TextDimension<'a>, - I: 'a + IntoIterator>, - { - let cx = Some(version); - let mut summary = D::default(); - let mut rope_cursor = self.visible_text.cursor(0); - let mut cursor = self.fragments.cursor::<(VersionedFullOffset, usize)>(); - ranges.into_iter().map(move |range| { - cursor.seek_forward(&VersionedFullOffset::Offset(range.start), start_bias, &cx); - let overshoot = if cursor.item().map_or(false, |fragment| fragment.visible) { - range.start - cursor.start().0.full_offset() - } else { - 0 - }; - summary.add_assign(&rope_cursor.summary::(cursor.start().1 + overshoot)); - let start_summary = summary.clone(); - - cursor.seek_forward(&VersionedFullOffset::Offset(range.end), end_bias, &cx); - let overshoot = if cursor.item().map_or(false, |fragment| fragment.visible) { - range.end - cursor.start().0.full_offset() - } else { - 0 - }; - summary.add_assign(&rope_cursor.summary::(cursor.start().1 + overshoot)); - let end_summary = summary.clone(); - - start_summary..end_summary - }) - } - pub fn anchor_before(&self, position: T) -> Anchor { self.anchor_at(position, Bias::Left) } @@ -1772,139 +1775,22 @@ impl Snapshot { } pub fn anchor_at(&self, position: T, bias: Bias) -> Anchor { - Anchor { - full_offset: position.to_full_offset(self, bias), - bias, - version: self.version.clone(), - } - } - - pub fn anchor_map(&self, bias: Bias, entries: E) -> AnchorMap - where - E: IntoIterator, - { - let version = self.version.clone(); - let mut cursor = self.fragments.cursor::(); - let entries = entries - .into_iter() - .map(|(offset, value)| { - cursor.seek_forward(&offset, bias, &None); - let full_offset = FullOffset(cursor.start().deleted + offset); - (full_offset, value) - }) - .collect(); - - AnchorMap { - version, - bias, - entries, - } - } - - pub fn anchor_range_map( - &self, - start_bias: Bias, - end_bias: Bias, - entries: E, - ) -> AnchorRangeMap - where - E: IntoIterator, T)>, - { - let version = self.version.clone(); - let mut cursor = self.fragments.cursor::(); - let entries = entries - .into_iter() - .map(|(range, value)| { - let Range { - start: start_offset, - end: end_offset, - } = range; - cursor.seek_forward(&start_offset, start_bias, &None); - let full_start_offset = FullOffset(cursor.start().deleted + start_offset); - cursor.seek_forward(&end_offset, end_bias, &None); - let full_end_offset = FullOffset(cursor.start().deleted + end_offset); - (full_start_offset..full_end_offset, value) - }) - .collect(); - - AnchorRangeMap { - version, - start_bias, - end_bias, - entries, - } - } - - pub fn anchor_set(&self, bias: Bias, entries: E) -> AnchorSet - where - E: IntoIterator, - { - AnchorSet(self.anchor_map(bias, entries.into_iter().map(|range| (range, ())))) - } - - pub fn anchor_range_set( - &self, - start_bias: Bias, - end_bias: Bias, - entries: E, - ) -> AnchorRangeSet - where - E: IntoIterator>, - { - AnchorRangeSet(self.anchor_range_map( - start_bias, - end_bias, - entries.into_iter().map(|range| (range, ())), - )) - } - - pub fn anchor_range_multimap( - &self, - start_bias: Bias, - end_bias: Bias, - entries: E, - ) -> AnchorRangeMultimap - where - T: Clone, - E: IntoIterator, T)>, - O: ToOffset, - { - let mut entries = entries - .into_iter() - .map(|(range, value)| AnchorRangeMultimapEntry { - range: FullOffsetRange { - start: range.start.to_full_offset(self, start_bias), - end: range.end.to_full_offset(self, end_bias), - }, - value, - }) - .collect::>(); - entries.sort_unstable_by_key(|i| (i.range.start, Reverse(i.range.end))); - AnchorRangeMultimap { - entries: SumTree::from_iter(entries, &()), - version: self.version.clone(), - start_bias, - end_bias, - } - } - - fn full_offset_for_anchor(&self, anchor: &Anchor) -> FullOffset { - let cx = Some(anchor.version.clone()); - let mut cursor = self - .fragments - .cursor::<(VersionedFullOffset, FragmentTextSummary)>(); - cursor.seek( - &VersionedFullOffset::Offset(anchor.full_offset), - anchor.bias, - &cx, - ); - let overshoot = if cursor.item().is_some() { - anchor.full_offset - cursor.start().0.full_offset() + let offset = position.to_offset(self); + if bias == Bias::Left && offset == 0 { + Anchor::Min + } else if bias == Bias::Right && offset == self.len() { + Anchor::Max } else { - 0 - }; - let summary = cursor.start().1; - FullOffset(summary.visible + summary.deleted + overshoot) + let mut fragment_cursor = self.fragments.cursor::<(usize, Locator)>(); + fragment_cursor.seek(&offset, bias, &None); + let fragment = fragment_cursor.item().unwrap(); + let overshoot = offset - fragment_cursor.start().0; + Anchor::Insertion { + timestamp: fragment.insertion_timestamp.local(), + offset: fragment.insertion_offset + overshoot, + bias, + } + } } pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize { @@ -2200,10 +2086,6 @@ impl sum_tree::Summary for InsertionFragmentKey { #[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct FullOffset(pub usize); -impl FullOffset { - const MAX: Self = FullOffset(usize::MAX); -} - impl ops::AddAssign for FullOffset { fn add_assign(&mut self, rhs: usize) { self.0 += rhs; @@ -2239,6 +2121,12 @@ impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FullOffset { } } +impl<'a> sum_tree::Dimension<'a, FragmentSummary> for Locator { + fn add_summary(&mut self, summary: &FragmentSummary, _: &Option) { + *self = summary.max_id.clone(); + } +} + impl<'a> sum_tree::SeekTarget<'a, FragmentSummary, FragmentTextSummary> for usize { fn cmp( &self, @@ -2363,9 +2251,9 @@ impl ToOffset for Anchor { } } -impl<'a> ToOffset for &'a Anchor { +impl<'a, T: ToOffset> ToOffset for &'a T { fn to_offset(&self, content: &Snapshot) -> usize { - content.summary_for_anchor(self) + (*self).to_offset(content) } } From 67686dd1c2013092255e554ef83d9b9a66b1f8f0 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 9 Dec 2021 12:00:51 +0100 Subject: [PATCH 033/196] Don't use an enum for anchors and model min/max more implicitly This will make it easier to serialize an anchor. --- crates/clock/src/clock.rs | 9 +++ crates/text/src/anchor.rs | 98 ++++++++++------------------ crates/text/src/text.rs | 134 ++++++++++++++++++-------------------- 3 files changed, 108 insertions(+), 133 deletions(-) diff --git a/crates/clock/src/clock.rs b/crates/clock/src/clock.rs index 6e8b460861efa59fbf38046dd514cdec00c2d0f5..2632aecce598fe1f26e2515c254b4c9771c10a36 100644 --- a/crates/clock/src/clock.rs +++ b/crates/clock/src/clock.rs @@ -21,6 +21,15 @@ pub struct Lamport { } impl Local { + pub const MIN: Self = Self { + replica_id: ReplicaId::MIN, + value: Seq::MIN, + }; + pub const MAX: Self = Self { + replica_id: ReplicaId::MAX, + value: Seq::MAX, + }; + pub fn new(replica_id: ReplicaId) -> Self { Self { replica_id, diff --git a/crates/text/src/anchor.rs b/crates/text/src/anchor.rs index 1123bd21042710bdf5138445270552fdc4800609..a781c9f8876e78d50f2f05b66d1f34d2ab6b7b0e 100644 --- a/crates/text/src/anchor.rs +++ b/crates/text/src/anchor.rs @@ -1,88 +1,57 @@ -use crate::{rope::TextDimension, Snapshot}; - -use super::{Buffer, ToOffset}; +use super::{rope::TextDimension, Buffer, Point, Snapshot, ToOffset}; use anyhow::Result; use std::{cmp::Ordering, fmt::Debug, ops::Range}; use sum_tree::Bias; #[derive(Clone, Eq, PartialEq, Debug, Hash)] -pub enum Anchor { - Min, - Insertion { - timestamp: clock::Local, - offset: usize, - bias: Bias, - }, - Max, +pub struct Anchor { + pub timestamp: clock::Local, + pub offset: usize, + pub bias: Bias, } impl Anchor { pub fn min() -> Self { - Self::Min + Self { + timestamp: clock::Local::MIN, + offset: usize::MIN, + bias: Bias::Left, + } } pub fn max() -> Self { - Self::Max + Self { + timestamp: clock::Local::MAX, + offset: usize::MAX, + bias: Bias::Right, + } } pub fn cmp<'a>(&self, other: &Anchor, buffer: &Snapshot) -> Result { - match (self, other) { - (Self::Min, Self::Min) => Ok(Ordering::Equal), - (Self::Min, _) => Ok(Ordering::Less), - (_, Self::Min) => Ok(Ordering::Greater), - (Self::Max, Self::Max) => Ok(Ordering::Equal), - (Self::Max, _) => Ok(Ordering::Greater), - (_, Self::Max) => Ok(Ordering::Less), - ( - Self::Insertion { - timestamp: lhs_id, - bias: lhs_bias, - offset: lhs_offset, - }, - Self::Insertion { - timestamp: rhs_id, - bias: rhs_bias, - offset: rhs_offset, - }, - ) => { - let offset_comparison = if lhs_id == rhs_id { - lhs_offset.cmp(&rhs_offset) - } else { - buffer - .full_offset_for_anchor(self) - .cmp(&buffer.full_offset_for_anchor(other)) - }; + let offset_comparison = if self.timestamp == other.timestamp { + self.offset.cmp(&other.offset) + } else { + buffer + .full_offset_for_anchor(self) + .cmp(&buffer.full_offset_for_anchor(other)) + }; - Ok(offset_comparison.then_with(|| lhs_bias.cmp(&rhs_bias))) - } - } + Ok(offset_comparison.then_with(|| self.bias.cmp(&other.bias))) } pub fn bias_left(&self, buffer: &Buffer) -> Anchor { - match self { - Anchor::Min => Anchor::Min, - Anchor::Insertion { bias, .. } => { - if *bias == Bias::Left { - self.clone() - } else { - buffer.anchor_before(self) - } - } - Anchor::Max => buffer.anchor_before(self), + if self.bias == Bias::Left { + self.clone() + } else { + buffer.anchor_before(self) } } pub fn bias_right(&self, buffer: &Buffer) -> Anchor { - match self { - Anchor::Min => buffer.anchor_after(self), - Anchor::Insertion { bias, .. } => { - if *bias == Bias::Right { - self.clone() - } else { - buffer.anchor_after(self) - } - } - Anchor::Max => Anchor::Max, + if self.bias == Bias::Right { + self.clone() + } else { + buffer.anchor_after(self) } } @@ -97,6 +66,7 @@ impl Anchor { pub trait AnchorRangeExt { fn cmp(&self, b: &Range, buffer: &Snapshot) -> Result; fn to_offset(&self, content: &Snapshot) -> Range; + fn to_point(&self, content: &Snapshot) -> Range; } impl AnchorRangeExt for Range { @@ -110,4 +80,8 @@ impl AnchorRangeExt for Range { fn to_offset(&self, content: &Snapshot) -> Range { self.start.to_offset(&content)..self.end.to_offset(&content) } + + fn to_point(&self, content: &Snapshot) -> Range { + self.start.summary::(&content)..self.end.summary::(&content) + } } diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 5f54c4b8b997c77eaeab57c9bfd1d454ec364db7..b896aa687eb5fb4f92efc7d0b24c7c7bcd5086a6 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -1136,11 +1136,9 @@ impl Buffer { } fn can_resolve(&self, anchor: &Anchor) -> bool { - match anchor { - Anchor::Min => true, - Anchor::Insertion { timestamp, .. } => self.version.observed(*timestamp), - Anchor::Max => true, - } + *anchor == Anchor::min() + || *anchor == Anchor::max() + || self.version.observed(anchor.timestamp) } pub fn peek_undo_stack(&self) -> Option<&Transaction> { @@ -1680,80 +1678,74 @@ impl Snapshot { where D: TextDimension<'a>, { - match anchor { - Anchor::Min => D::default(), - Anchor::Insertion { - timestamp, - offset, - bias, - } => { - let anchor_key = InsertionFragmentKey { - timestamp: *timestamp, - split_offset: *offset, - }; - let mut insertion_cursor = self.insertions.cursor::(); - insertion_cursor.seek(&anchor_key, *bias, &()); - if let Some(insertion) = insertion_cursor.item() { - let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key); - if comparison == Ordering::Greater - || (*bias == Bias::Left && comparison == Ordering::Equal && *offset > 0) - { - insertion_cursor.prev(&()); - } - } else { + if *anchor == Anchor::min() { + D::default() + } else if *anchor == Anchor::max() { + D::from_text_summary(&self.visible_text.summary()) + } else { + let anchor_key = InsertionFragmentKey { + timestamp: anchor.timestamp, + split_offset: anchor.offset, + }; + let mut insertion_cursor = self.insertions.cursor::(); + insertion_cursor.seek(&anchor_key, anchor.bias, &()); + if let Some(insertion) = insertion_cursor.item() { + let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key); + if comparison == Ordering::Greater + || (anchor.bias == Bias::Left + && comparison == Ordering::Equal + && anchor.offset > 0) + { insertion_cursor.prev(&()); } - let insertion = insertion_cursor.item().expect("invalid insertion"); - debug_assert_eq!(insertion.timestamp, *timestamp, "invalid insertion"); + } else { + insertion_cursor.prev(&()); + } + let insertion = insertion_cursor.item().expect("invalid insertion"); + debug_assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion"); - let mut fragment_cursor = self.fragments.cursor::<(Locator, usize)>(); - fragment_cursor.seek(&insertion.fragment_id, Bias::Left, &None); - let fragment = fragment_cursor.item().unwrap(); - let mut fragment_offset = fragment_cursor.start().1; - if fragment.visible { - fragment_offset += *offset - insertion.split_offset; - } - self.text_summary_for_range(0..fragment_offset) + let mut fragment_cursor = self.fragments.cursor::<(Locator, usize)>(); + fragment_cursor.seek(&insertion.fragment_id, Bias::Left, &None); + let fragment = fragment_cursor.item().unwrap(); + let mut fragment_offset = fragment_cursor.start().1; + if fragment.visible { + fragment_offset += anchor.offset - insertion.split_offset; } - Anchor::Max => D::from_text_summary(&self.visible_text.summary()), + self.text_summary_for_range(0..fragment_offset) } } fn full_offset_for_anchor(&self, anchor: &Anchor) -> FullOffset { - match anchor { - Anchor::Min => Default::default(), - Anchor::Insertion { - timestamp, - offset, - bias, - } => { - let anchor_key = InsertionFragmentKey { - timestamp: *timestamp, - split_offset: *offset, - }; - let mut insertion_cursor = self.insertions.cursor::(); - insertion_cursor.seek(&anchor_key, *bias, &()); - if let Some(insertion) = insertion_cursor.item() { - let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key); - if comparison == Ordering::Greater - || (*bias == Bias::Left && comparison == Ordering::Equal && *offset > 0) - { - insertion_cursor.prev(&()); - } - } else { + if *anchor == Anchor::min() { + Default::default() + } else if *anchor == Anchor::max() { + let text = self.fragments.summary().text; + FullOffset(text.visible + text.deleted) + } else { + let anchor_key = InsertionFragmentKey { + timestamp: anchor.timestamp, + split_offset: anchor.offset, + }; + let mut insertion_cursor = self.insertions.cursor::(); + insertion_cursor.seek(&anchor_key, anchor.bias, &()); + if let Some(insertion) = insertion_cursor.item() { + let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key); + if comparison == Ordering::Greater + || (anchor.bias == Bias::Left + && comparison == Ordering::Equal + && anchor.offset > 0) + { insertion_cursor.prev(&()); } - let insertion = insertion_cursor.item().expect("invalid insertion"); - debug_assert_eq!(insertion.timestamp, *timestamp, "invalid insertion"); - - let mut fragment_cursor = self.fragments.cursor::<(Locator, FullOffset)>(); - fragment_cursor.seek(&insertion.fragment_id, Bias::Left, &None); - fragment_cursor.start().1 + (*offset - insertion.split_offset) - } - Anchor::Max => { - let text = self.fragments.summary().text; - FullOffset(text.visible + text.deleted) + } else { + insertion_cursor.prev(&()); } + let insertion = insertion_cursor.item().expect("invalid insertion"); + debug_assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion"); + + let mut fragment_cursor = self.fragments.cursor::<(Locator, FullOffset)>(); + fragment_cursor.seek(&insertion.fragment_id, Bias::Left, &None); + fragment_cursor.start().1 + (anchor.offset - insertion.split_offset) } } @@ -1777,15 +1769,15 @@ impl Snapshot { pub fn anchor_at(&self, position: T, bias: Bias) -> Anchor { let offset = position.to_offset(self); if bias == Bias::Left && offset == 0 { - Anchor::Min + Anchor::min() } else if bias == Bias::Right && offset == self.len() { - Anchor::Max + Anchor::max() } else { let mut fragment_cursor = self.fragments.cursor::<(usize, Locator)>(); fragment_cursor.seek(&offset, bias, &None); let fragment = fragment_cursor.item().unwrap(); let overshoot = offset - fragment_cursor.start().0; - Anchor::Insertion { + Anchor { timestamp: fragment.insertion_timestamp.local(), offset: fragment.insertion_offset + overshoot, bias, From 65711b2256cfd535e30fe9bd3e5f7b4b31888d65 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 9 Dec 2021 16:38:46 +0100 Subject: [PATCH 034/196] Remove anchor collections Co-Authored-By: Nathan Sobo --- Cargo.lock | 1 + crates/editor/src/editor.rs | 64 ++--- crates/editor/src/items.rs | 12 +- crates/language/Cargo.toml | 5 +- crates/language/src/buffer.rs | 321 +++++++++++++++----------- crates/language/src/diagnostic_set.rs | 141 +++++++++++ crates/language/src/language.rs | 1 + crates/language/src/proto.rs | 184 ++++++++------- crates/language/src/tests.rs | 24 +- crates/project/src/worktree.rs | 3 +- crates/rpc/proto/zed.proto | 39 ++-- crates/rpc/src/peer.rs | 8 +- crates/server/src/rpc.rs | 5 +- crates/sum_tree/src/cursor.rs | 70 ++++++ crates/sum_tree/src/sum_tree.rs | 11 +- crates/text/src/operation_queue.rs | 50 ++-- crates/text/src/selection.rs | 4 +- crates/text/src/text.rs | 38 ++- 18 files changed, 659 insertions(+), 322 deletions(-) create mode 100644 crates/language/src/diagnostic_set.rs diff --git a/Cargo.lock b/Cargo.lock index 0aadd18f662e46f0702390405eb815a324f4caa8..a1188259ac23e9b18f57fd22c68da69da81b012d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2587,6 +2587,7 @@ dependencies = [ "serde", "similar", "smol", + "sum_tree", "text", "theme", "tree-sitter", diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 539736aca254e803fe9ac2ab29c51c63e0f014cb..497fbb2e83284130caaf9dd4c0ce07cecfed470e 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -398,7 +398,7 @@ struct SelectNextState { #[derive(Debug)] struct BracketPairState { - ranges: AnchorRangeSet, + ranges: Vec>, pair: BracketPair, } @@ -1285,7 +1285,7 @@ impl Editor { fn autoclose_pairs(&mut self, cx: &mut ViewContext) { let selections = self.selections::(cx).collect::>(); - let new_autoclose_pair_state = self.buffer.update(cx, |buffer, cx| { + let new_autoclose_pair = self.buffer.update(cx, |buffer, cx| { let autoclose_pair = buffer.language().and_then(|language| { let first_selection_start = selections.first().unwrap().start; let pair = language.brackets().iter().find(|pair| { @@ -1324,15 +1324,14 @@ impl Editor { if pair.end.len() == 1 { let mut delta = 0; Some(BracketPairState { - ranges: buffer.anchor_range_set( - Bias::Left, - Bias::Right, - selections.iter().map(move |selection| { + ranges: selections + .iter() + .map(move |selection| { let offset = selection.start + delta; delta += 1; - offset..offset - }), - ), + buffer.anchor_before(offset)..buffer.anchor_after(offset) + }) + .collect(), pair, }) } else { @@ -1340,26 +1339,26 @@ impl Editor { } }) }); - self.autoclose_stack.extend(new_autoclose_pair_state); + self.autoclose_stack.extend(new_autoclose_pair); } fn skip_autoclose_end(&mut self, text: &str, cx: &mut ViewContext) -> bool { let old_selections = self.selections::(cx).collect::>(); - let autoclose_pair_state = if let Some(autoclose_pair_state) = self.autoclose_stack.last() { - autoclose_pair_state + let autoclose_pair = if let Some(autoclose_pair) = self.autoclose_stack.last() { + autoclose_pair } else { return false; }; - if text != autoclose_pair_state.pair.end { + if text != autoclose_pair.pair.end { return false; } - debug_assert_eq!(old_selections.len(), autoclose_pair_state.ranges.len()); + debug_assert_eq!(old_selections.len(), autoclose_pair.ranges.len()); let buffer = self.buffer.read(cx); if old_selections .iter() - .zip(autoclose_pair_state.ranges.ranges::(buffer)) + .zip(autoclose_pair.ranges.iter().map(|r| r.to_offset(buffer))) .all(|(selection, autoclose_range)| { let autoclose_range_end = autoclose_range.end.to_offset(buffer); selection.is_empty() && selection.start == autoclose_range_end @@ -2826,13 +2825,14 @@ impl Editor { loop { let next_group = buffer - .diagnostics_in_range::<_, usize>(search_start..buffer.len()) - .find_map(|(range, diagnostic)| { - if diagnostic.is_primary + .diagnostics_in_range(search_start..buffer.len()) + .find_map(|entry| { + let range = entry.range.to_offset(buffer); + if entry.diagnostic.is_primary && !range.is_empty() && Some(range.end) != active_primary_range.as_ref().map(|r| *r.end()) { - Some((range, diagnostic.group_id)) + Some((range, entry.diagnostic.group_id)) } else { None } @@ -2866,12 +2866,13 @@ impl Editor { let buffer = self.buffer.read(cx); let primary_range_start = active_diagnostics.primary_range.start.to_offset(buffer); let is_valid = buffer - .diagnostics_in_range::<_, usize>(active_diagnostics.primary_range.clone()) - .any(|(range, diagnostic)| { - diagnostic.is_primary + .diagnostics_in_range(active_diagnostics.primary_range.clone()) + .any(|entry| { + let range = entry.range.to_offset(buffer); + entry.diagnostic.is_primary && !range.is_empty() && range.start == primary_range_start - && diagnostic.message == active_diagnostics.primary_message + && entry.diagnostic.message == active_diagnostics.primary_message }); if is_valid != active_diagnostics.is_valid { @@ -2901,16 +2902,17 @@ impl Editor { let mut primary_message = None; let mut group_end = Point::zero(); let diagnostic_group = buffer - .diagnostic_group::(group_id) - .map(|(range, diagnostic)| { + .diagnostic_group(group_id) + .map(|entry| { + let range = entry.range.to_point(buffer); if range.end > group_end { group_end = range.end; } - if diagnostic.is_primary { + if entry.diagnostic.is_primary { primary_range = Some(range.clone()); - primary_message = Some(diagnostic.message.clone()); + primary_message = Some(entry.diagnostic.message.clone()); } - (range, diagnostic.clone()) + (range, entry.diagnostic.clone()) }) .collect::>(); let primary_range = primary_range.unwrap(); @@ -3165,12 +3167,12 @@ impl Editor { self.add_selections_state = None; self.select_next_state = None; self.select_larger_syntax_node_stack.clear(); - while let Some(autoclose_pair_state) = self.autoclose_stack.last() { + while let Some(autoclose_pair) = self.autoclose_stack.last() { let all_selections_inside_autoclose_ranges = - if selections.len() == autoclose_pair_state.ranges.len() { + if selections.len() == autoclose_pair.ranges.len() { selections .iter() - .zip(autoclose_pair_state.ranges.ranges::(buffer)) + .zip(autoclose_pair.ranges.iter().map(|r| r.to_point(buffer))) .all(|(selection, autoclose_range)| { let head = selection.head().to_point(&*buffer); autoclose_range.start <= head && autoclose_range.end >= head diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index f4261c30bbf306446a70a97b5b49ec39c5310165..061aece652104e199ea2f9317abf0657d24ad226 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -5,7 +5,7 @@ use gpui::{ MutableAppContext, RenderContext, Subscription, Task, View, ViewContext, ViewHandle, WeakModelHandle, }; -use language::{Buffer, Diagnostic, File as _}; +use language::{AnchorRangeExt, Buffer, Diagnostic, File as _}; use postage::watch; use project::{ProjectPath, Worktree}; use std::fmt::Write; @@ -314,11 +314,11 @@ impl DiagnosticMessage { fn update(&mut self, editor: ViewHandle, cx: &mut ViewContext) { let editor = editor.read(cx); - let cursor_position = editor.newest_selection(cx).head(); - let new_diagnostic = editor - .buffer() - .read(cx) - .diagnostics_in_range::(cursor_position..cursor_position) + let cursor_position = editor.newest_selection::(cx).head(); + let buffer = editor.buffer().read(cx); + let new_diagnostic = buffer + .diagnostics_in_range(cursor_position..cursor_position) + .map(|entry| (entry.range.to_offset(buffer), &entry.diagnostic)) .filter(|(range, _)| !range.is_empty()) .min_by_key(|(range, diagnostic)| (diagnostic.severity, range.len())) .map(|(_, diagnostic)| diagnostic.clone()); diff --git a/crates/language/Cargo.toml b/crates/language/Cargo.toml index f4037ee70a1e49167afd4e73228a6ddeb55a2341..d5e40456c8d85d4ff8737ff2848ee3120d65ff81 100644 --- a/crates/language/Cargo.toml +++ b/crates/language/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "language" version = "0.1.0" -edition = "2018" +edition = "2021" [lib] path = "src/language.rs" @@ -15,11 +15,12 @@ test-support = [ ] [dependencies] -text = { path = "../text" } clock = { path = "../clock" } gpui = { path = "../gpui" } lsp = { path = "../lsp" } rpc = { path = "../rpc" } +sum_tree = { path = "../sum_tree" } +text = { path = "../text" } theme = { path = "../theme" } util = { path = "../util" } anyhow = "1.0.38" diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 55346fc9dd410c5617c2c1186ca839cc9c0ca32e..99239a3089f3c6929dbd4e62e71288f02ab30103 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -1,4 +1,6 @@ +use crate::diagnostic_set::DiagnosticEntry; pub use crate::{ + diagnostic_set::DiagnosticSet, highlight_map::{HighlightId, HighlightMap}, proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig, PLAIN_TEXT, @@ -28,6 +30,7 @@ use std::{ time::{Duration, Instant, SystemTime, UNIX_EPOCH}, vec, }; +use text::operation_queue::OperationQueue; pub use text::{Buffer as TextBuffer, Operation as _, *}; use theme::SyntaxTheme; use tree_sitter::{InputEdit, Parser, QueryCursor, Tree}; @@ -61,9 +64,10 @@ pub struct Buffer { syntax_tree: Mutex>, parsing_in_background: bool, parse_count: usize, - diagnostics: AnchorRangeMultimap, + diagnostics: DiagnosticSet, diagnostics_update_count: usize, language_server: Option, + deferred_ops: OperationQueue, #[cfg(test)] pub(crate) operations: Vec, } @@ -71,7 +75,7 @@ pub struct Buffer { pub struct Snapshot { text: text::Snapshot, tree: Option, - diagnostics: AnchorRangeMultimap, + diagnostics: DiagnosticSet, diagnostics_update_count: usize, is_parsing: bool, language: Option>, @@ -101,10 +105,13 @@ struct LanguageServerSnapshot { path: Arc, } -#[derive(Clone)] +#[derive(Clone, Debug)] pub enum Operation { Buffer(text::Operation), - UpdateDiagnostics(AnchorRangeMultimap), + UpdateDiagnostics { + diagnostics: Arc<[DiagnosticEntry]>, + lamport_timestamp: clock::Lamport, + }, } #[derive(Clone, Debug, Eq, PartialEq)] @@ -173,8 +180,8 @@ struct SyntaxTree { struct AutoindentRequest { selection_set_ids: HashSet, before_edit: Snapshot, - edited: AnchorSet, - inserted: Option, + edited: Vec, + inserted: Option>>, } #[derive(Debug)] @@ -275,9 +282,11 @@ impl Buffer { buffer.add_raw_selection_set(set.id, set); } let mut this = Self::build(buffer, file); - if let Some(diagnostics) = message.diagnostics { - this.apply_diagnostic_update(proto::deserialize_diagnostics(diagnostics), cx); - } + this.apply_diagnostic_update( + Arc::from(proto::deserialize_diagnostics(message.diagnostics)), + cx, + ); + Ok(this) } @@ -294,7 +303,7 @@ impl Buffer { .selection_sets() .map(|(_, set)| proto::serialize_selection_set(set)) .collect(), - diagnostics: Some(proto::serialize_diagnostics(&self.diagnostics)), + diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()), } } @@ -331,6 +340,7 @@ impl Buffer { diagnostics: Default::default(), diagnostics_update_count: 0, language_server: None, + deferred_ops: OperationQueue::new(), #[cfg(test)] operations: Default::default(), } @@ -690,6 +700,8 @@ impl Buffer { mut diagnostics: Vec, cx: &mut ModelContext, ) -> Result { + diagnostics.sort_unstable_by_key(|d| (d.range.start, d.range.end)); + let version = version.map(|version| version as usize); let content = if let Some(version) = version { let language_server = self.language_server.as_mut().unwrap(); @@ -710,81 +722,79 @@ impl Buffer { .and_then(|language| language.disk_based_diagnostic_sources()) .unwrap_or(&empty_set); - diagnostics.sort_unstable_by_key(|d| (d.range.start, d.range.end)); - self.diagnostics = { - let mut edits_since_save = content - .edits_since::(&self.saved_version) - .peekable(); - let mut last_edit_old_end = PointUtf16::zero(); - let mut last_edit_new_end = PointUtf16::zero(); - let mut group_ids_by_diagnostic_range = HashMap::new(); - let mut diagnostics_by_group_id = HashMap::new(); - let mut next_group_id = 0; - 'outer: for diagnostic in &diagnostics { - let mut start = diagnostic.range.start.to_point_utf16(); - let mut end = diagnostic.range.end.to_point_utf16(); - let source = diagnostic.source.as_ref(); - let code = diagnostic.code.as_ref(); - let group_id = diagnostic_ranges(&diagnostic, abs_path.as_deref()) - .find_map(|range| group_ids_by_diagnostic_range.get(&(source, code, range))) - .copied() - .unwrap_or_else(|| { - let group_id = post_inc(&mut next_group_id); - for range in diagnostic_ranges(&diagnostic, abs_path.as_deref()) { - group_ids_by_diagnostic_range.insert((source, code, range), group_id); - } - group_id - }); - - if diagnostic - .source - .as_ref() - .map_or(false, |source| disk_based_sources.contains(source)) - { - while let Some(edit) = edits_since_save.peek() { - if edit.old.end <= start { - last_edit_old_end = edit.old.end; - last_edit_new_end = edit.new.end; - edits_since_save.next(); - } else if edit.old.start <= end && edit.old.end >= start { - continue 'outer; - } else { - break; - } + let mut edits_since_save = content + .edits_since::(&self.saved_version) + .peekable(); + let mut last_edit_old_end = PointUtf16::zero(); + let mut last_edit_new_end = PointUtf16::zero(); + let mut group_ids_by_diagnostic_range = HashMap::new(); + let mut diagnostics_by_group_id = HashMap::new(); + let mut next_group_id = 0; + 'outer: for diagnostic in &diagnostics { + let mut start = diagnostic.range.start.to_point_utf16(); + let mut end = diagnostic.range.end.to_point_utf16(); + let source = diagnostic.source.as_ref(); + let code = diagnostic.code.as_ref(); + let group_id = diagnostic_ranges(&diagnostic, abs_path.as_deref()) + .find_map(|range| group_ids_by_diagnostic_range.get(&(source, code, range))) + .copied() + .unwrap_or_else(|| { + let group_id = post_inc(&mut next_group_id); + for range in diagnostic_ranges(&diagnostic, abs_path.as_deref()) { + group_ids_by_diagnostic_range.insert((source, code, range), group_id); } + group_id + }); - start = last_edit_new_end + (start - last_edit_old_end); - end = last_edit_new_end + (end - last_edit_old_end); - } - - let mut range = content.clip_point_utf16(start, Bias::Left) - ..content.clip_point_utf16(end, Bias::Right); - if range.start == range.end { - range.end.column += 1; - range.end = content.clip_point_utf16(range.end, Bias::Right); - if range.start == range.end && range.end.column > 0 { - range.start.column -= 1; - range.start = content.clip_point_utf16(range.start, Bias::Left); + if diagnostic + .source + .as_ref() + .map_or(false, |source| disk_based_sources.contains(source)) + { + while let Some(edit) = edits_since_save.peek() { + if edit.old.end <= start { + last_edit_old_end = edit.old.end; + last_edit_new_end = edit.new.end; + edits_since_save.next(); + } else if edit.old.start <= end && edit.old.end >= start { + continue 'outer; + } else { + break; } } - diagnostics_by_group_id - .entry(group_id) - .or_insert(Vec::new()) - .push(( - range, - Diagnostic { - severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR), - message: diagnostic.message.clone(), - group_id, - is_primary: false, - }, - )); + start = last_edit_new_end + (start - last_edit_old_end); + end = last_edit_new_end + (end - last_edit_old_end); + } + + let mut range = content.clip_point_utf16(start, Bias::Left) + ..content.clip_point_utf16(end, Bias::Right); + if range.start == range.end { + range.end.column += 1; + range.end = content.clip_point_utf16(range.end, Bias::Right); + if range.start == range.end && range.end.column > 0 { + range.start.column -= 1; + range.start = content.clip_point_utf16(range.start, Bias::Left); + } } - content.anchor_range_multimap( - Bias::Left, - Bias::Right, + diagnostics_by_group_id + .entry(group_id) + .or_insert(Vec::new()) + .push(( + range, + Diagnostic { + severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR), + message: diagnostic.message.clone(), + group_id, + is_primary: false, + }, + )); + } + + drop(edits_since_save); + self.diagnostics + .reset( diagnostics_by_group_id .into_values() .flat_map(|mut diagnostics| { @@ -793,8 +803,7 @@ impl Buffer { primary_diagnostic.1.is_primary = true; diagnostics }), - ) - }; + ); if let Some(version) = version { let language_server = self.language_server.as_mut().unwrap(); @@ -811,32 +820,24 @@ impl Buffer { self.diagnostics_update_count += 1; cx.notify(); cx.emit(Event::DiagnosticsUpdated); - Ok(Operation::UpdateDiagnostics(self.diagnostics.clone())) + Ok(Operation::UpdateDiagnostics { + diagnostics: Arc::from(self.diagnostics.iter().cloned().collect::>()), + lamport_timestamp: self.lamport_timestamp(), + }) } - pub fn diagnostics_in_range<'a, T, O>( + pub fn diagnostics_in_range<'a, T>( &'a self, search_range: Range, - ) -> impl Iterator, &Diagnostic)> + 'a + ) -> impl Iterator where T: 'a + ToOffset, - O: 'a + FromAnchor, { - self.diagnostics - .intersecting_ranges(search_range, self, true) - .map(move |(_, range, diagnostic)| (range, diagnostic)) + self.diagnostics.range(search_range, self, true) } - pub fn diagnostic_group<'a, O>( - &'a self, - group_id: usize, - ) -> impl Iterator, &Diagnostic)> + 'a - where - O: 'a + FromAnchor, - { - self.diagnostics - .filter(self, move |diagnostic| diagnostic.group_id == group_id) - .map(move |(_, range, diagnostic)| (range, diagnostic)) + pub fn diagnostic_group(&self, group_id: usize) -> impl Iterator { + self.diagnostics.group(group_id) } pub fn diagnostics_update_count(&self) -> usize { @@ -879,13 +880,13 @@ impl Buffer { for request in autoindent_requests { let old_to_new_rows = request .edited - .iter::(&request.before_edit) - .map(|point| point.row) + .iter() + .map(|anchor| anchor.summary::(&request.before_edit).row) .zip( request .edited - .iter::(&snapshot) - .map(|point| point.row), + .iter() + .map(|anchor| anchor.summary::(&snapshot).row), ) .collect::>(); @@ -947,7 +948,8 @@ impl Buffer { if let Some(inserted) = request.inserted.as_ref() { let inserted_row_ranges = contiguous_ranges( inserted - .ranges::(&snapshot) + .iter() + .map(|range| range.to_point(&snapshot)) .flat_map(|range| range.start.row..range.end.row + 1), max_rows_between_yields, ); @@ -1264,17 +1266,17 @@ impl Buffer { self.pending_autoindent.take(); let autoindent_request = if autoindent && self.language.is_some() { let before_edit = self.snapshot(); - let edited = self.anchor_set( - Bias::Left, - ranges.iter().filter_map(|range| { + let edited = ranges + .iter() + .filter_map(|range| { let start = range.start.to_point(self); if new_text.starts_with('\n') && start.column == self.line_len(start.row) { None } else { - Some(range.start) + Some(self.anchor_before(range.start)) } - }), - ); + }) + .collect(); Some((before_edit, edited)) } else { None @@ -1289,17 +1291,19 @@ impl Buffer { let mut inserted = None; if let Some(first_newline_ix) = first_newline_ix { let mut delta = 0isize; - inserted = Some(self.anchor_range_set( - Bias::Left, - Bias::Right, - ranges.iter().map(|range| { - let start = (delta + range.start as isize) as usize + first_newline_ix + 1; - let end = (delta + range.start as isize) as usize + new_text_len; - delta += - (range.end as isize - range.start as isize) + new_text_len as isize; - start..end - }), - )); + inserted = Some( + ranges + .iter() + .map(|range| { + let start = + (delta + range.start as isize) as usize + first_newline_ix + 1; + let end = (delta + range.start as isize) as usize + new_text_len; + delta += + (range.end as isize - range.start as isize) + new_text_len as isize; + self.anchor_before(start)..self.anchor_after(end) + }) + .collect(), + ); } let selection_set_ids = self @@ -1401,17 +1405,23 @@ impl Buffer { self.pending_autoindent.take(); let was_dirty = self.is_dirty(); let old_version = self.version.clone(); + let mut deferred_ops = Vec::new(); let buffer_ops = ops .into_iter() .filter_map(|op| match op { Operation::Buffer(op) => Some(op), - Operation::UpdateDiagnostics(diagnostics) => { - self.apply_diagnostic_update(diagnostics, cx); + _ => { + if self.can_apply_op(&op) { + self.apply_op(op, cx); + } else { + deferred_ops.push(op); + } None } }) .collect::>(); self.text.apply_ops(buffer_ops)?; + self.flush_deferred_ops(cx); self.did_edit(&old_version, was_dirty, cx); // Notify independently of whether the buffer was edited as the operations could include a // selection update. @@ -1419,12 +1429,49 @@ impl Buffer { Ok(()) } + fn flush_deferred_ops(&mut self, cx: &mut ModelContext) { + let mut deferred_ops = Vec::new(); + for op in self.deferred_ops.drain().iter().cloned() { + if self.can_apply_op(&op) { + self.apply_op(op, cx); + } else { + deferred_ops.push(op); + } + } + self.deferred_ops.insert(deferred_ops); + } + + fn can_apply_op(&self, operation: &Operation) -> bool { + match operation { + Operation::Buffer(_) => { + unreachable!("buffer operations should never be applied at this layer") + } + Operation::UpdateDiagnostics { diagnostics, .. } => { + diagnostics.iter().all(|diagnostic| { + self.text.can_resolve(&diagnostic.range.start) + && self.text.can_resolve(&diagnostic.range.end) + }) + } + } + } + + fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext) { + match operation { + Operation::Buffer(_) => { + unreachable!("buffer operations should never be applied at this layer") + } + Operation::UpdateDiagnostics { diagnostics, .. } => { + self.apply_diagnostic_update(diagnostics, cx); + } + } + } + fn apply_diagnostic_update( &mut self, - diagnostics: AnchorRangeMultimap, + diagnostics: Arc<[DiagnosticEntry]>, cx: &mut ModelContext, ) { - self.diagnostics = diagnostics; + self.diagnostics = DiagnosticSet::from_sorted_entries(diagnostics.iter().cloned(), self); self.diagnostics_update_count += 1; cx.notify(); } @@ -1632,19 +1679,16 @@ impl Snapshot { let mut highlights = None; let mut diagnostic_endpoints = Vec::::new(); if let Some(theme) = theme { - for (_, range, diagnostic) in - self.diagnostics - .intersecting_ranges(range.clone(), self, true) - { + for entry in self.diagnostics.range(range.clone(), self, true) { diagnostic_endpoints.push(DiagnosticEndpoint { - offset: range.start, + offset: entry.range.start.to_offset(self), is_start: true, - severity: diagnostic.severity, + severity: entry.diagnostic.severity, }); diagnostic_endpoints.push(DiagnosticEndpoint { - offset: range.end, + offset: entry.range.end.to_offset(self), is_start: false, - severity: diagnostic.severity, + severity: entry.diagnostic.severity, }); } diagnostic_endpoints @@ -1939,6 +1983,19 @@ impl ToPointUtf16 for lsp::Position { } } +impl operation_queue::Operation for Operation { + fn lamport_timestamp(&self) -> clock::Lamport { + match self { + Operation::Buffer(_) => { + unreachable!("buffer operations should never be deferred at this layer") + } + Operation::UpdateDiagnostics { + lamport_timestamp, .. + } => *lamport_timestamp, + } + } +} + fn diagnostic_ranges<'a>( diagnostic: &'a lsp::Diagnostic, abs_path: Option<&'a Path>, @@ -1968,7 +2025,7 @@ fn diagnostic_ranges<'a>( } pub fn contiguous_ranges( - values: impl IntoIterator, + values: impl Iterator, max_len: usize, ) -> impl Iterator> { let mut values = values.into_iter(); diff --git a/crates/language/src/diagnostic_set.rs b/crates/language/src/diagnostic_set.rs new file mode 100644 index 0000000000000000000000000000000000000000..9640ded372f84de50149de9042059f9c28c68695 --- /dev/null +++ b/crates/language/src/diagnostic_set.rs @@ -0,0 +1,141 @@ +use crate::Diagnostic; +use std::{ + cmp::{Ordering, Reverse}, + iter, + ops::Range, +}; +use sum_tree::{self, Bias, SumTree}; +use text::{Anchor, PointUtf16, ToOffset}; + +#[derive(Clone, Default)] +pub struct DiagnosticSet { + diagnostics: SumTree, +} + +#[derive(Clone, Debug)] +pub struct DiagnosticEntry { + pub range: Range, + pub diagnostic: Diagnostic, +} + +#[derive(Clone, Debug)] +pub struct Summary { + start: Anchor, + end: Anchor, + min_start: Anchor, + max_end: Anchor, + count: usize, +} + +impl DiagnosticSet { + pub fn from_sorted_entries(iter: I, buffer: &text::Snapshot) -> Self + where + I: IntoIterator, + { + Self { + diagnostics: SumTree::from_iter(iter, buffer), + } + } + + pub fn reset(&mut self, iter: I) + where + I: IntoIterator, Diagnostic)>, + { + let mut entries = iter.into_iter().collect::>(); + entries.sort_unstable_by_key(|(range, _)| (range.start, Reverse(range.end))); + } + + pub fn iter(&self) -> impl Iterator { + self.diagnostics.iter() + } + + pub fn range<'a, T>( + &'a self, + range: Range, + buffer: &'a text::Snapshot, + inclusive: bool, + ) -> impl Iterator + where + T: 'a + ToOffset, + { + let end_bias = if inclusive { Bias::Right } else { Bias::Left }; + let range = buffer.anchor_before(range.start)..buffer.anchor_at(range.end, end_bias); + let mut cursor = self.diagnostics.filter::<_, ()>( + { + move |summary: &Summary| { + let start_cmp = range.start.cmp(&summary.max_end, buffer).unwrap(); + let end_cmp = range.end.cmp(&summary.min_start, buffer).unwrap(); + if inclusive { + start_cmp <= Ordering::Equal && end_cmp >= Ordering::Equal + } else { + start_cmp == Ordering::Less && end_cmp == Ordering::Greater + } + } + }, + buffer, + ); + + iter::from_fn({ + move || { + if let Some(diagnostic) = cursor.item() { + cursor.next(buffer); + Some(diagnostic) + } else { + None + } + } + }) + } + + pub fn group(&self, group_id: usize) -> impl Iterator { + self.iter() + .filter(move |entry| entry.diagnostic.group_id == group_id) + } +} + +impl sum_tree::Item for DiagnosticEntry { + type Summary = Summary; + + fn summary(&self) -> Self::Summary { + Summary { + start: self.range.start.clone(), + end: self.range.end.clone(), + min_start: self.range.start.clone(), + max_end: self.range.end.clone(), + count: 1, + } + } +} + +impl Default for Summary { + fn default() -> Self { + Self { + start: Anchor::min(), + end: Anchor::max(), + min_start: Anchor::max(), + max_end: Anchor::min(), + count: 0, + } + } +} + +impl sum_tree::Summary for Summary { + type Context = text::Snapshot; + + fn add_summary(&mut self, other: &Self, buffer: &Self::Context) { + if other + .min_start + .cmp(&self.min_start, buffer) + .unwrap() + .is_lt() + { + self.min_start = other.min_start.clone(); + } + if other.max_end.cmp(&self.max_end, buffer).unwrap().is_gt() { + self.max_end = other.max_end.clone(); + } + self.start = other.start.clone(); + self.end = other.end.clone(); + self.count += other.count; + } +} diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 77d01c7ecf85b4b231d4cc03856a13e0c6f48dc8..619ce19689e84d719aa75d2ee3945a4f123f8286 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -1,4 +1,5 @@ mod buffer; +mod diagnostic_set; mod highlight_map; pub mod proto; #[cfg(test)] diff --git a/crates/language/src/proto.rs b/crates/language/src/proto.rs index 3e3455c6714c16acb512cbf32681a37348dd2e2f..851ab76bcaef2cbf0e2a55e2de9cf019974b9972 100644 --- a/crates/language/src/proto.rs +++ b/crates/language/src/proto.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use crate::{Diagnostic, Operation}; +use crate::{diagnostic_set::DiagnosticEntry, Diagnostic, Operation}; use anyhow::{anyhow, Result}; use clock::ReplicaId; use lsp::DiagnosticSeverity; @@ -49,14 +49,13 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation { replica_id: set_id.replica_id as u32, local_timestamp: set_id.value, lamport_timestamp: lamport_timestamp.value, - version: selections.version().into(), selections: selections - .full_offset_ranges() - .map(|(range, state)| proto::Selection { - id: state.id as u64, - start: range.start.0 as u64, - end: range.end.0 as u64, - reversed: state.reversed, + .iter() + .map(|selection| proto::Selection { + id: selection.id as u64, + start: Some(serialize_anchor(&selection.start)), + end: Some(serialize_anchor(&selection.end)), + reversed: selection.reversed, }) .collect(), }), @@ -78,9 +77,14 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation { lamport_timestamp: lamport_timestamp.value, }, ), - Operation::UpdateDiagnostics(diagnostic_set) => { - proto::operation::Variant::UpdateDiagnostics(serialize_diagnostics(diagnostic_set)) - } + Operation::UpdateDiagnostics { + diagnostics, + lamport_timestamp, + } => proto::operation::Variant::UpdateDiagnostics(proto::UpdateDiagnostics { + replica_id: lamport_timestamp.replica_id as u32, + lamport_timestamp: lamport_timestamp.value, + diagnostics: serialize_diagnostics(diagnostics.iter()), + }), }), } } @@ -105,44 +109,54 @@ pub fn serialize_edit_operation(operation: &EditOperation) -> proto::operation:: } pub fn serialize_selection_set(set: &SelectionSet) -> proto::SelectionSet { - let version = set.selections.version(); - let entries = set.selections.full_offset_ranges(); proto::SelectionSet { replica_id: set.id.replica_id as u32, lamport_timestamp: set.id.value as u32, is_active: set.active, - version: version.into(), - selections: entries - .map(|(range, state)| proto::Selection { - id: state.id as u64, - start: range.start.0 as u64, - end: range.end.0 as u64, - reversed: state.reversed, + selections: set + .selections + .iter() + .map(|selection| proto::Selection { + id: selection.id as u64, + start: Some(serialize_anchor(&selection.start)), + end: Some(serialize_anchor(&selection.end)), + reversed: selection.reversed, }) .collect(), } } -pub fn serialize_diagnostics(map: &AnchorRangeMultimap) -> proto::DiagnosticSet { - proto::DiagnosticSet { - version: map.version().into(), - diagnostics: map - .full_offset_ranges() - .map(|(range, diagnostic)| proto::Diagnostic { - start: range.start.0 as u64, - end: range.end.0 as u64, - message: diagnostic.message.clone(), - severity: match diagnostic.severity { - DiagnosticSeverity::ERROR => proto::diagnostic::Severity::Error, - DiagnosticSeverity::WARNING => proto::diagnostic::Severity::Warning, - DiagnosticSeverity::INFORMATION => proto::diagnostic::Severity::Information, - DiagnosticSeverity::HINT => proto::diagnostic::Severity::Hint, - _ => proto::diagnostic::Severity::None, - } as i32, - group_id: diagnostic.group_id as u64, - is_primary: diagnostic.is_primary, - }) - .collect(), +pub fn serialize_diagnostics<'a>( + diagnostics: impl IntoIterator, +) -> Vec { + diagnostics + .into_iter() + .map(|entry| proto::Diagnostic { + start: Some(serialize_anchor(&entry.range.start)), + end: Some(serialize_anchor(&entry.range.end)), + message: entry.diagnostic.message.clone(), + severity: match entry.diagnostic.severity { + DiagnosticSeverity::ERROR => proto::diagnostic::Severity::Error, + DiagnosticSeverity::WARNING => proto::diagnostic::Severity::Warning, + DiagnosticSeverity::INFORMATION => proto::diagnostic::Severity::Information, + DiagnosticSeverity::HINT => proto::diagnostic::Severity::Hint, + _ => proto::diagnostic::Severity::None, + } as i32, + group_id: entry.diagnostic.group_id as u64, + is_primary: entry.diagnostic.is_primary, + }) + .collect() +} + +fn serialize_anchor(anchor: &Anchor) -> proto::Anchor { + proto::Anchor { + replica_id: anchor.timestamp.replica_id as u32, + local_timestamp: anchor.timestamp.value, + offset: anchor.offset as u64, + bias: match anchor.bias { + Bias::Left => proto::Bias::Left as i32, + Bias::Right => proto::Bias::Right as i32, + }, } } @@ -187,27 +201,19 @@ pub fn deserialize_operation(message: proto::Operation) -> Result { }, }), proto::operation::Variant::UpdateSelections(message) => { - let version = message.version.into(); - let entries = message + let selections = message .selections - .iter() - .map(|selection| { - let range = FullOffset(selection.start as usize) - ..FullOffset(selection.end as usize); - let state = SelectionState { + .into_iter() + .filter_map(|selection| { + Some(Selection { id: selection.id as usize, + start: deserialize_anchor(selection.start?)?, + end: deserialize_anchor(selection.end?)?, reversed: selection.reversed, goal: SelectionGoal::None, - }; - (range, state) + }) }) - .collect(); - let selections = AnchorRangeMap::from_full_offset_ranges( - version, - Bias::Left, - Bias::Left, - entries, - ); + .collect::>(); Operation::Buffer(text::Operation::UpdateSelections { set_id: clock::Lamport { @@ -245,9 +251,13 @@ pub fn deserialize_operation(message: proto::Operation) -> Result { }, }) } - proto::operation::Variant::UpdateDiagnostics(message) => { - Operation::UpdateDiagnostics(deserialize_diagnostics(message)) - } + proto::operation::Variant::UpdateDiagnostics(message) => Operation::UpdateDiagnostics { + diagnostics: Arc::from(deserialize_diagnostics(message.diagnostics)), + lamport_timestamp: clock::Lamport { + replica_id: message.replica_id as ReplicaId, + value: message.lamport_timestamp, + }, + }, }, ) } @@ -277,36 +287,30 @@ pub fn deserialize_selection_set(set: proto::SelectionSet) -> SelectionSet { value: set.lamport_timestamp, }, active: set.is_active, - selections: Arc::new(AnchorRangeMap::from_full_offset_ranges( - set.version.into(), - Bias::Left, - Bias::Left, + selections: Arc::from( set.selections .into_iter() - .map(|selection| { - let range = - FullOffset(selection.start as usize)..FullOffset(selection.end as usize); - let state = SelectionState { + .filter_map(|selection| { + Some(Selection { id: selection.id as usize, + start: deserialize_anchor(selection.start?)?, + end: deserialize_anchor(selection.end?)?, reversed: selection.reversed, goal: SelectionGoal::None, - }; - (range, state) + }) }) - .collect(), - )), + .collect::>(), + ), } } -pub fn deserialize_diagnostics(message: proto::DiagnosticSet) -> AnchorRangeMultimap { - AnchorRangeMultimap::from_full_offset_ranges( - message.version.into(), - Bias::Left, - Bias::Right, - message.diagnostics.into_iter().filter_map(|diagnostic| { - Some(( - FullOffset(diagnostic.start as usize)..FullOffset(diagnostic.end as usize), - Diagnostic { +pub fn deserialize_diagnostics(diagnostics: Vec) -> Vec { + diagnostics + .into_iter() + .filter_map(|diagnostic| { + Some(DiagnosticEntry { + range: deserialize_anchor(diagnostic.start?)?..deserialize_anchor(diagnostic.end?)?, + diagnostic: Diagnostic { severity: match proto::diagnostic::Severity::from_i32(diagnostic.severity)? { proto::diagnostic::Severity::Error => DiagnosticSeverity::ERROR, proto::diagnostic::Severity::Warning => DiagnosticSeverity::WARNING, @@ -318,7 +322,21 @@ pub fn deserialize_diagnostics(message: proto::DiagnosticSet) -> AnchorRangeMult group_id: diagnostic.group_id as usize, is_primary: diagnostic.is_primary, }, - )) - }), - ) + }) + }) + .collect() +} + +fn deserialize_anchor(anchor: proto::Anchor) -> Option { + Some(Anchor { + timestamp: clock::Local { + replica_id: anchor.replica_id as ReplicaId, + value: anchor.local_timestamp, + }, + offset: anchor.offset as usize, + bias: match proto::Bias::from_i32(anchor.bias)? { + proto::Bias::Left => Bias::Left, + proto::Bias::Right => Bias::Right, + }, + }) } diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index cff74af1e3bee4b9c65416ecdd5986fbb787f4ac..d1f48245db62eccc939ea0eaad4cdad79febd436 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -533,6 +533,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { assert_eq!( buffer .diagnostics_in_range(Point::new(3, 0)..Point::new(5, 0)) + .map(|entry| (entry.range.to_point(buffer), &entry.diagnostic)) .collect::>(), &[ ( @@ -600,6 +601,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { assert_eq!( buffer .diagnostics_in_range(Point::new(2, 0)..Point::new(3, 0)) + .map(|entry| (entry.range.to_point(buffer), &entry.diagnostic)) .collect::>(), &[ ( @@ -679,6 +681,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { assert_eq!( buffer .diagnostics_in_range(0..buffer.len()) + .map(|entry| (entry.range.to_point(buffer), &entry.diagnostic)) .collect::>(), &[ ( @@ -863,7 +866,8 @@ async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) { buffer.update_diagnostics(None, diagnostics, cx).unwrap(); assert_eq!( buffer - .diagnostics_in_range::<_, Point>(0..buffer.len()) + .diagnostics_in_range(0..buffer.len()) + .map(|entry| (entry.range.to_point(&buffer), &entry.diagnostic)) .collect::>(), &[ ( @@ -915,7 +919,10 @@ async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) { ); assert_eq!( - buffer.diagnostic_group(0).collect::>(), + buffer + .diagnostic_group(0) + .map(|entry| (entry.range.to_point(&buffer), &entry.diagnostic)) + .collect::>(), &[ ( Point::new(1, 8)..Point::new(1, 9), @@ -938,7 +945,10 @@ async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) { ] ); assert_eq!( - buffer.diagnostic_group(1).collect::>(), + buffer + .diagnostic_group(1) + .map(|entry| (entry.range.to_point(&buffer), &entry.diagnostic)) + .collect::>(), &[ ( Point::new(1, 13)..Point::new(1, 15), @@ -995,13 +1005,17 @@ fn chunks_with_diagnostics( #[test] fn test_contiguous_ranges() { assert_eq!( - contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12], 100).collect::>(), + contiguous_ranges([1, 2, 3, 5, 6, 9, 10, 11, 12].into_iter(), 100).collect::>(), &[1..4, 5..7, 9..13] ); // Respects the `max_len` parameter assert_eq!( - contiguous_ranges([2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31], 3).collect::>(), + contiguous_ranges( + [2, 3, 4, 5, 6, 7, 8, 9, 23, 24, 25, 26, 30, 31].into_iter(), + 3 + ) + .collect::>(), &[2..5, 5..8, 8..10, 23..26, 26..27, 30..32], ); } diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 29bc230b97c3c42d79eee264cd47816d3a413ffb..393e92dfb98aed443db8a9f86e90f31583373bd8 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -3005,7 +3005,7 @@ mod tests { use anyhow::Result; use client::test::{FakeHttpClient, FakeServer}; use fs::RealFs; - use language::{tree_sitter_rust, LanguageServerConfig}; + use language::{tree_sitter_rust, AnchorRangeExt, LanguageServerConfig}; use language::{Diagnostic, LanguageConfig}; use lsp::Url; use rand::prelude::*; @@ -3722,6 +3722,7 @@ mod tests { buffer.read_with(&cx, |buffer, _| { let diagnostics = buffer .diagnostics_in_range(0..buffer.len()) + .map(|entry| (entry.range.to_point(buffer), &entry.diagnostic)) .collect::>(); assert_eq!( diagnostics, diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index 775f94d595f8d3aa6f8425fc6ce2b334d46503f8..7e7a180cd2acb5b64492f534f89b626bd29bffe5 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -229,32 +229,44 @@ message Buffer { string content = 2; repeated Operation.Edit history = 3; repeated SelectionSet selections = 4; - DiagnosticSet diagnostics = 5; + repeated Diagnostic diagnostics = 5; } message SelectionSet { uint32 replica_id = 1; uint32 lamport_timestamp = 2; bool is_active = 3; - repeated VectorClockEntry version = 4; - repeated Selection selections = 5; + repeated Selection selections = 4; } message Selection { uint64 id = 1; - uint64 start = 2; - uint64 end = 3; + Anchor start = 2; + Anchor end = 3; bool reversed = 4; } -message DiagnosticSet { - repeated VectorClockEntry version = 1; - repeated Diagnostic diagnostics = 2; +message Anchor { + uint32 replica_id = 1; + uint32 local_timestamp = 2; + uint64 offset = 3; + Bias bias = 4; +} + +enum Bias { + Left = 0; + Right = 1; +} + +message UpdateDiagnostics { + uint32 replica_id = 1; + uint32 lamport_timestamp = 2; + repeated Diagnostic diagnostics = 3; } message Diagnostic { - uint64 start = 1; - uint64 end = 2; + Anchor start = 1; + Anchor end = 2; Severity severity = 3; string message = 4; uint64 group_id = 5; @@ -268,8 +280,6 @@ message Diagnostic { } } - - message Operation { oneof variant { Edit edit = 1; @@ -277,7 +287,7 @@ message Operation { UpdateSelections update_selections = 3; RemoveSelections remove_selections = 4; SetActiveSelections set_active_selections = 5; - DiagnosticSet update_diagnostics = 6; + UpdateDiagnostics update_diagnostics = 6; } message Edit { @@ -308,8 +318,7 @@ message Operation { uint32 replica_id = 1; uint32 local_timestamp = 2; uint32 lamport_timestamp = 3; - repeated VectorClockEntry version = 4; - repeated Selection selections = 5; + repeated Selection selections = 4; } message RemoveSelections { diff --git a/crates/rpc/src/peer.rs b/crates/rpc/src/peer.rs index 454881fece48654b5c4a994158eaf80fe5bc15b1..d2f2cb2c418e863aa0015e4ef3486959cef4db11 100644 --- a/crates/rpc/src/peer.rs +++ b/crates/rpc/src/peer.rs @@ -400,7 +400,7 @@ mod tests { content: "path/one content".to_string(), history: vec![], selections: vec![], - diagnostics: None, + diagnostics: vec![], }), } ); @@ -422,7 +422,7 @@ mod tests { content: "path/two content".to_string(), history: vec![], selections: vec![], - diagnostics: None, + diagnostics: vec![], }), } ); @@ -453,7 +453,7 @@ mod tests { content: "path/one content".to_string(), history: vec![], selections: vec![], - diagnostics: None, + diagnostics: vec![], }), } } @@ -465,7 +465,7 @@ mod tests { content: "path/two content".to_string(), history: vec![], selections: vec![], - diagnostics: None, + diagnostics: vec![], }), } } diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index 96949d05ff57192341f14995d1fd50fdd875cd4a..6d1b1238c65ad1971d41f2d4d7394387658ba81e 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -947,8 +947,8 @@ mod tests { editor::{Editor, EditorSettings, Input}, fs::{FakeFs, Fs as _}, language::{ - tree_sitter_rust, Diagnostic, Language, LanguageConfig, LanguageRegistry, - LanguageServerConfig, Point, + tree_sitter_rust, AnchorRangeExt, Diagnostic, Language, LanguageConfig, + LanguageRegistry, LanguageServerConfig, Point, }, lsp, project::{ProjectPath, Worktree}, @@ -1705,6 +1705,7 @@ mod tests { assert_eq!( buffer .diagnostics_in_range(0..buffer.len()) + .map(|entry| (entry.range.to_point(buffer), &entry.diagnostic)) .collect::>(), &[ ( diff --git a/crates/sum_tree/src/cursor.rs b/crates/sum_tree/src/cursor.rs index 7799bb2ff004f65c168a56505fdaac5b40492221..cbb6f7f6f5270931d5cba49e754fd79e5f0defe2 100644 --- a/crates/sum_tree/src/cursor.rs +++ b/crates/sum_tree/src/cursor.rs @@ -18,6 +18,11 @@ pub struct Cursor<'a, T: Item, D> { at_end: bool, } +pub struct Iter<'a, T: Item> { + tree: &'a SumTree, + stack: ArrayVec, 16>, +} + impl<'a, T, D> Cursor<'a, T, D> where T: Item, @@ -487,6 +492,71 @@ where } } +impl<'a, T: Item> Iter<'a, T> { + pub(crate) fn new(tree: &'a SumTree) -> Self { + Self { + tree, + stack: Default::default(), + } + } +} + +impl<'a, T: Item> Iterator for Iter<'a, T> { + type Item = &'a T; + + fn next(&mut self) -> Option { + let mut descend = false; + + if self.stack.is_empty() { + self.stack.push(StackEntry { + tree: self.tree, + index: 0, + position: (), + }); + descend = true; + } + + while self.stack.len() > 0 { + let new_subtree = { + let entry = self.stack.last_mut().unwrap(); + match entry.tree.0.as_ref() { + Node::Internal { child_trees, .. } => { + if !descend { + entry.index += 1; + } + child_trees.get(entry.index) + } + Node::Leaf { items, .. } => { + if !descend { + entry.index += 1; + } + + if let Some(next_item) = items.get(entry.index) { + return Some(next_item); + } else { + None + } + } + } + }; + + if let Some(subtree) = new_subtree { + descend = true; + self.stack.push(StackEntry { + tree: subtree, + index: 0, + position: (), + }); + } else { + descend = false; + self.stack.pop(); + } + } + + None + } +} + impl<'a, T, S, D> Iterator for Cursor<'a, T, D> where T: Item, diff --git a/crates/sum_tree/src/sum_tree.rs b/crates/sum_tree/src/sum_tree.rs index 8b4a45519f007fb70154f1576d34c1774bbe1af0..63fb379d537785220a36cc8217d8fd28924a0a76 100644 --- a/crates/sum_tree/src/sum_tree.rs +++ b/crates/sum_tree/src/sum_tree.rs @@ -1,8 +1,7 @@ mod cursor; use arrayvec::ArrayVec; -pub use cursor::Cursor; -pub use cursor::FilterCursor; +pub use cursor::{Cursor, FilterCursor, Iter}; use std::marker::PhantomData; use std::{cmp::Ordering, fmt, iter::FromIterator, sync::Arc}; @@ -156,6 +155,10 @@ impl SumTree { items } + pub fn iter(&self) -> Iter { + Iter::new(self) + } + pub fn cursor<'a, S>(&'a self) -> Cursor where S: Dimension<'a, T::Summary>, @@ -722,6 +725,10 @@ mod tests { }; assert_eq!(tree.items(&()), reference_items); + assert_eq!( + tree.iter().collect::>(), + tree.cursor::<()>().collect::>() + ); let mut filter_cursor = tree.filter::<_, Count>(|summary| summary.contains_even, &()); diff --git a/crates/text/src/operation_queue.rs b/crates/text/src/operation_queue.rs index 3c3a64402400623ab14d7a5c0aecaf1c96caab11..ef99faf3e2314c66084ff6b4dc77821445a78318 100644 --- a/crates/text/src/operation_queue.rs +++ b/crates/text/src/operation_queue.rs @@ -1,9 +1,15 @@ -use super::Operation; use std::{fmt::Debug, ops::Add}; -use sum_tree::{Cursor, Dimension, Edit, Item, KeyedItem, SumTree, Summary}; +use sum_tree::{Dimension, Edit, Item, KeyedItem, SumTree, Summary}; + +pub trait Operation: Clone + Debug { + fn lamport_timestamp(&self) -> clock::Lamport; +} + +#[derive(Clone, Debug)] +struct OperationItem(T); #[derive(Clone, Debug)] -pub struct OperationQueue(SumTree); +pub struct OperationQueue(SumTree>); #[derive(Clone, Copy, Debug, Default, Eq, Ord, PartialEq, PartialOrd)] pub struct OperationKey(clock::Lamport); @@ -20,7 +26,7 @@ impl OperationKey { } } -impl OperationQueue { +impl OperationQueue { pub fn new() -> Self { OperationQueue(SumTree::new()) } @@ -29,11 +35,15 @@ impl OperationQueue { self.0.summary().len } - pub fn insert(&mut self, mut ops: Vec) { + pub fn insert(&mut self, mut ops: Vec) { ops.sort_by_key(|op| op.lamport_timestamp()); ops.dedup_by_key(|op| op.lamport_timestamp()); - self.0 - .edit(ops.into_iter().map(Edit::Insert).collect(), &()); + self.0.edit( + ops.into_iter() + .map(|op| Edit::Insert(OperationItem(op))) + .collect(), + &(), + ); } pub fn drain(&mut self) -> Self { @@ -42,8 +52,8 @@ impl OperationQueue { clone } - pub fn cursor(&self) -> Cursor { - self.0.cursor() + pub fn iter(&self) -> impl Iterator { + self.0.cursor::<()>().map(|i| &i.0) } } @@ -76,22 +86,22 @@ impl<'a> Dimension<'a, OperationSummary> for OperationKey { } } -impl Item for Operation { +impl Item for OperationItem { type Summary = OperationSummary; fn summary(&self) -> Self::Summary { OperationSummary { - key: OperationKey::new(self.lamport_timestamp()), + key: OperationKey::new(self.0.lamport_timestamp()), len: 1, } } } -impl KeyedItem for Operation { +impl KeyedItem for OperationItem { type Key = OperationKey; fn key(&self) -> Self::Key { - OperationKey::new(self.lamport_timestamp()) + OperationKey::new(self.0.lamport_timestamp()) } } @@ -107,21 +117,27 @@ mod tests { assert_eq!(queue.len(), 0); queue.insert(vec![ - Operation::Test(clock.tick()), - Operation::Test(clock.tick()), + TestOperation(clock.tick()), + TestOperation(clock.tick()), ]); assert_eq!(queue.len(), 2); - queue.insert(vec![Operation::Test(clock.tick())]); + queue.insert(vec![TestOperation(clock.tick())]); assert_eq!(queue.len(), 3); drop(queue.drain()); assert_eq!(queue.len(), 0); - queue.insert(vec![Operation::Test(clock.tick())]); + queue.insert(vec![TestOperation(clock.tick())]); assert_eq!(queue.len(), 1); } #[derive(Clone, Debug, Eq, PartialEq)] struct TestOperation(clock::Lamport); + + impl Operation for TestOperation { + fn lamport_timestamp(&self) -> clock::Lamport { + self.0 + } + } } diff --git a/crates/text/src/selection.rs b/crates/text/src/selection.rs index e9e7dd1f22f443c54507eb640760e4d4d964d827..ae96e93e51c3b74ad4a7a35fd02af24b6889c99c 100644 --- a/crates/text/src/selection.rs +++ b/crates/text/src/selection.rs @@ -141,13 +141,13 @@ impl SelectionSet { let end = snapshot.anchor_at(range.end.0, range.end.1); let start_ix = match self .selections - .binary_search_by(|probe| probe.start.cmp(&start, snapshot).unwrap()) + .binary_search_by(|probe| probe.end.cmp(&start, snapshot).unwrap()) { Ok(ix) | Err(ix) => ix, }; let end_ix = match self .selections - .binary_search_by(|probe| probe.end.cmp(&end, snapshot).unwrap()) + .binary_search_by(|probe| probe.start.cmp(&end, snapshot).unwrap()) { Ok(ix) | Err(ix) => ix, }; diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index b896aa687eb5fb4f92efc7d0b24c7c7bcd5086a6..c2e0d8e4ef0b7b4f7bb327ae250620c19732d7fe 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -1,6 +1,6 @@ mod anchor; mod locator; -mod operation_queue; +pub mod operation_queue; mod patch; mod point; mod point_utf16; @@ -42,7 +42,7 @@ pub struct Buffer { last_edit: clock::Local, history: History, selections: HashMap, - deferred_ops: OperationQueue, + deferred_ops: OperationQueue, deferred_replicas: HashSet, replica_id: ReplicaId, remote_id: u64, @@ -441,8 +441,6 @@ pub enum Operation { set_id: Option, lamport_timestamp: clock::Lamport, }, - #[cfg(test)] - Test(clock::Lamport), } #[derive(Clone, Debug, Eq, PartialEq)] @@ -527,6 +525,10 @@ impl Buffer { self.local_clock.replica_id } + pub fn lamport_timestamp(&self) -> clock::Lamport { + self.lamport_clock + } + pub fn remote_id(&self) -> u64 { self.remote_id } @@ -808,8 +810,6 @@ impl Buffer { } self.lamport_clock.observe(lamport_timestamp); } - #[cfg(test)] - Operation::Test(_) => {} } Ok(()) } @@ -1103,7 +1103,7 @@ impl Buffer { fn flush_deferred_ops(&mut self) -> Result<()> { self.deferred_replicas.clear(); let mut deferred_ops = Vec::new(); - for op in self.deferred_ops.drain().cursor().cloned() { + for op in self.deferred_ops.drain().iter().cloned() { if self.can_apply_op(&op) { self.apply_op(op)?; } else { @@ -1129,13 +1129,11 @@ impl Buffer { Operation::SetActiveSelections { set_id, .. } => { set_id.map_or(true, |set_id| self.selections.contains_key(&set_id)) } - #[cfg(test)] - Operation::Test(_) => true, } } } - fn can_resolve(&self, anchor: &Anchor) -> bool { + pub fn can_resolve(&self, anchor: &Anchor) -> bool { *anchor == Anchor::min() || *anchor == Anchor::max() || self.version.observed(anchor.timestamp) @@ -2176,9 +2174,18 @@ impl<'a> sum_tree::SeekTarget<'a, FragmentSummary, Self> for VersionedFullOffset impl Operation { fn replica_id(&self) -> ReplicaId { - self.lamport_timestamp().replica_id + operation_queue::Operation::lamport_timestamp(self).replica_id + } + + pub fn is_edit(&self) -> bool { + match self { + Operation::Edit { .. } => true, + _ => false, + } } +} +impl operation_queue::Operation for Operation { fn lamport_timestamp(&self) -> clock::Lamport { match self { Operation::Edit(edit) => edit.timestamp.lamport(), @@ -2194,15 +2201,6 @@ impl Operation { Operation::SetActiveSelections { lamport_timestamp, .. } => *lamport_timestamp, - #[cfg(test)] - Operation::Test(lamport_timestamp) => *lamport_timestamp, - } - } - - pub fn is_edit(&self) -> bool { - match self { - Operation::Edit { .. } => true, - _ => false, } } } From 91a7bbbba2e4229641edba0175e8494bad35028b Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 9 Dec 2021 17:53:08 +0100 Subject: [PATCH 035/196] Fix some of the diagnostic tests and make DiagnosticEntry generic Co-Authored-By: Nathan Sobo --- crates/editor/src/editor.rs | 39 +++---- crates/editor/src/items.rs | 11 +- crates/language/src/buffer.rs | 61 +++++++---- crates/language/src/diagnostic_set.rs | 54 +++++++--- crates/language/src/language.rs | 1 + crates/language/src/proto.rs | 6 +- crates/language/src/tests.rs | 150 ++++++++++++-------------- crates/project/src/worktree.rs | 13 ++- crates/server/src/rpc.rs | 21 ++-- 9 files changed, 189 insertions(+), 167 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 497fbb2e83284130caaf9dd4c0ce07cecfed470e..e913174199f4a5df41706f803967d50b7a66ffcc 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -2825,14 +2825,13 @@ impl Editor { loop { let next_group = buffer - .diagnostics_in_range(search_start..buffer.len()) + .diagnostics_in_range::<_, usize>(search_start..buffer.len()) .find_map(|entry| { - let range = entry.range.to_offset(buffer); if entry.diagnostic.is_primary - && !range.is_empty() - && Some(range.end) != active_primary_range.as_ref().map(|r| *r.end()) + && !entry.range.is_empty() + && Some(entry.range.end) != active_primary_range.as_ref().map(|r| *r.end()) { - Some((range, entry.diagnostic.group_id)) + Some((entry.range, entry.diagnostic.group_id)) } else { None } @@ -2866,12 +2865,11 @@ impl Editor { let buffer = self.buffer.read(cx); let primary_range_start = active_diagnostics.primary_range.start.to_offset(buffer); let is_valid = buffer - .diagnostics_in_range(active_diagnostics.primary_range.clone()) + .diagnostics_in_range::<_, usize>(active_diagnostics.primary_range.clone()) .any(|entry| { - let range = entry.range.to_offset(buffer); entry.diagnostic.is_primary - && !range.is_empty() - && range.start == primary_range_start + && !entry.range.is_empty() + && entry.range.start == primary_range_start && entry.diagnostic.message == active_diagnostics.primary_message }); @@ -2902,17 +2900,16 @@ impl Editor { let mut primary_message = None; let mut group_end = Point::zero(); let diagnostic_group = buffer - .diagnostic_group(group_id) + .diagnostic_group::(group_id) .map(|entry| { - let range = entry.range.to_point(buffer); - if range.end > group_end { - group_end = range.end; + if entry.range.end > group_end { + group_end = entry.range.end; } if entry.diagnostic.is_primary { - primary_range = Some(range.clone()); + primary_range = Some(entry.range.clone()); primary_message = Some(entry.diagnostic.message.clone()); } - (range, entry.diagnostic.clone()) + entry }) .collect::>(); let primary_range = primary_range.unwrap(); @@ -2922,13 +2919,13 @@ impl Editor { let blocks = display_map .insert_blocks( - diagnostic_group.iter().map(|(range, diagnostic)| { + diagnostic_group.iter().map(|entry| { let build_settings = self.build_settings.clone(); - let diagnostic = diagnostic.clone(); + let diagnostic = entry.diagnostic.clone(); let message_height = diagnostic.message.lines().count() as u8; BlockProperties { - position: range.start, + position: entry.range.start, height: message_height, render: Arc::new(move |cx| { let settings = build_settings.borrow()(cx.cx); @@ -2941,11 +2938,7 @@ impl Editor { cx, ) .into_iter() - .zip( - diagnostic_group - .into_iter() - .map(|(_, diagnostic)| diagnostic), - ) + .zip(diagnostic_group.into_iter().map(|entry| entry.diagnostic)) .collect(); Some(ActiveDiagnosticGroup { diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 061aece652104e199ea2f9317abf0657d24ad226..7fa25eb884386c4f5dcf10aabcabe1a220bf1030 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -5,7 +5,7 @@ use gpui::{ MutableAppContext, RenderContext, Subscription, Task, View, ViewContext, ViewHandle, WeakModelHandle, }; -use language::{AnchorRangeExt, Buffer, Diagnostic, File as _}; +use language::{Buffer, Diagnostic, File as _}; use postage::watch; use project::{ProjectPath, Worktree}; use std::fmt::Write; @@ -317,11 +317,10 @@ impl DiagnosticMessage { let cursor_position = editor.newest_selection::(cx).head(); let buffer = editor.buffer().read(cx); let new_diagnostic = buffer - .diagnostics_in_range(cursor_position..cursor_position) - .map(|entry| (entry.range.to_offset(buffer), &entry.diagnostic)) - .filter(|(range, _)| !range.is_empty()) - .min_by_key(|(range, diagnostic)| (diagnostic.severity, range.len())) - .map(|(_, diagnostic)| diagnostic.clone()); + .diagnostics_in_range::<_, usize>(cursor_position..cursor_position) + .filter(|entry| !entry.range.is_empty()) + .min_by_key(|entry| (entry.diagnostic.severity, entry.range.len())) + .map(|entry| entry.diagnostic); if new_diagnostic != self.diagnostic { self.diagnostic = new_diagnostic; cx.notify(); diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 99239a3089f3c6929dbd4e62e71288f02ab30103..d34528c78409df2a63c652f5d294b7759445ff70 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -23,6 +23,7 @@ use std::{ ffi::OsString, future::Future, iter::{Iterator, Peekable}, + mem, ops::{Deref, DerefMut, Range}, path::{Path, PathBuf}, str, @@ -109,7 +110,7 @@ struct LanguageServerSnapshot { pub enum Operation { Buffer(text::Operation), UpdateDiagnostics { - diagnostics: Arc<[DiagnosticEntry]>, + diagnostics: Arc<[DiagnosticEntry]>, lamport_timestamp: clock::Lamport, }, } @@ -781,29 +782,33 @@ impl Buffer { diagnostics_by_group_id .entry(group_id) .or_insert(Vec::new()) - .push(( + .push(DiagnosticEntry { range, - Diagnostic { + diagnostic: Diagnostic { severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR), message: diagnostic.message.clone(), group_id, is_primary: false, }, - )); + }); } drop(edits_since_save); - self.diagnostics - .reset( - diagnostics_by_group_id - .into_values() - .flat_map(|mut diagnostics| { - let primary_diagnostic = - diagnostics.iter_mut().min_by_key(|d| d.1.severity).unwrap(); - primary_diagnostic.1.is_primary = true; - diagnostics - }), - ); + let mut diagnostics = mem::take(&mut self.diagnostics); + diagnostics.reset( + diagnostics_by_group_id + .into_values() + .flat_map(|mut diagnostics| { + let primary = diagnostics + .iter_mut() + .min_by_key(|entry| entry.diagnostic.severity) + .unwrap(); + primary.diagnostic.is_primary = true; + diagnostics + }), + self, + ); + self.diagnostics = diagnostics; if let Some(version) = version { let language_server = self.language_server.as_mut().unwrap(); @@ -826,18 +831,25 @@ impl Buffer { }) } - pub fn diagnostics_in_range<'a, T>( + pub fn diagnostics_in_range<'a, T, O>( &'a self, search_range: Range, - ) -> impl Iterator + ) -> impl 'a + Iterator> where T: 'a + ToOffset, + O: 'a + FromAnchor, { self.diagnostics.range(search_range, self, true) } - pub fn diagnostic_group(&self, group_id: usize) -> impl Iterator { - self.diagnostics.group(group_id) + pub fn diagnostic_group<'a, O>( + &'a self, + group_id: usize, + ) -> impl 'a + Iterator> + where + O: 'a + FromAnchor, + { + self.diagnostics.group(group_id, self) } pub fn diagnostics_update_count(&self) -> usize { @@ -1468,7 +1480,7 @@ impl Buffer { fn apply_diagnostic_update( &mut self, - diagnostics: Arc<[DiagnosticEntry]>, + diagnostics: Arc<[DiagnosticEntry]>, cx: &mut ModelContext, ) { self.diagnostics = DiagnosticSet::from_sorted_entries(diagnostics.iter().cloned(), self); @@ -1679,14 +1691,17 @@ impl Snapshot { let mut highlights = None; let mut diagnostic_endpoints = Vec::::new(); if let Some(theme) = theme { - for entry in self.diagnostics.range(range.clone(), self, true) { + for entry in self + .diagnostics + .range::<_, usize>(range.clone(), self, true) + { diagnostic_endpoints.push(DiagnosticEndpoint { - offset: entry.range.start.to_offset(self), + offset: entry.range.start, is_start: true, severity: entry.diagnostic.severity, }); diagnostic_endpoints.push(DiagnosticEndpoint { - offset: entry.range.end.to_offset(self), + offset: entry.range.end, is_start: false, severity: entry.diagnostic.severity, }); diff --git a/crates/language/src/diagnostic_set.rs b/crates/language/src/diagnostic_set.rs index 9640ded372f84de50149de9042059f9c28c68695..0a04ef17e88cab5d74548b4ca3af5c1e3310de58 100644 --- a/crates/language/src/diagnostic_set.rs +++ b/crates/language/src/diagnostic_set.rs @@ -5,16 +5,16 @@ use std::{ ops::Range, }; use sum_tree::{self, Bias, SumTree}; -use text::{Anchor, PointUtf16, ToOffset}; +use text::{Anchor, FromAnchor, PointUtf16, ToOffset}; #[derive(Clone, Default)] pub struct DiagnosticSet { - diagnostics: SumTree, + diagnostics: SumTree>, } -#[derive(Clone, Debug)] -pub struct DiagnosticEntry { - pub range: Range, +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct DiagnosticEntry { + pub range: Range, pub diagnostic: Diagnostic, } @@ -30,33 +30,42 @@ pub struct Summary { impl DiagnosticSet { pub fn from_sorted_entries(iter: I, buffer: &text::Snapshot) -> Self where - I: IntoIterator, + I: IntoIterator>, { Self { diagnostics: SumTree::from_iter(iter, buffer), } } - pub fn reset(&mut self, iter: I) + pub fn reset(&mut self, iter: I, buffer: &text::Snapshot) where - I: IntoIterator, Diagnostic)>, + I: IntoIterator>, { let mut entries = iter.into_iter().collect::>(); - entries.sort_unstable_by_key(|(range, _)| (range.start, Reverse(range.end))); + entries.sort_unstable_by_key(|entry| (entry.range.start, Reverse(entry.range.end))); + self.diagnostics = SumTree::from_iter( + entries.into_iter().map(|entry| DiagnosticEntry { + range: buffer.anchor_before(entry.range.start) + ..buffer.anchor_after(entry.range.end), + diagnostic: entry.diagnostic, + }), + buffer, + ); } - pub fn iter(&self) -> impl Iterator { + pub fn iter(&self) -> impl Iterator> { self.diagnostics.iter() } - pub fn range<'a, T>( + pub fn range<'a, T, O>( &'a self, range: Range, buffer: &'a text::Snapshot, inclusive: bool, - ) -> impl Iterator + ) -> impl 'a + Iterator> where T: 'a + ToOffset, + O: FromAnchor, { let end_bias = if inclusive { Bias::Right } else { Bias::Left }; let range = buffer.anchor_before(range.start)..buffer.anchor_at(range.end, end_bias); @@ -79,7 +88,7 @@ impl DiagnosticSet { move || { if let Some(diagnostic) = cursor.item() { cursor.next(buffer); - Some(diagnostic) + Some(diagnostic.resolve(buffer)) } else { None } @@ -87,13 +96,18 @@ impl DiagnosticSet { }) } - pub fn group(&self, group_id: usize) -> impl Iterator { + pub fn group<'a, O: FromAnchor>( + &'a self, + group_id: usize, + buffer: &'a text::Snapshot, + ) -> impl 'a + Iterator> { self.iter() .filter(move |entry| entry.diagnostic.group_id == group_id) + .map(|entry| entry.resolve(buffer)) } } -impl sum_tree::Item for DiagnosticEntry { +impl sum_tree::Item for DiagnosticEntry { type Summary = Summary; fn summary(&self) -> Self::Summary { @@ -107,6 +121,16 @@ impl sum_tree::Item for DiagnosticEntry { } } +impl DiagnosticEntry { + pub fn resolve(&self, buffer: &text::Snapshot) -> DiagnosticEntry { + DiagnosticEntry { + range: O::from_anchor(&self.range.start, buffer) + ..O::from_anchor(&self.range.end, buffer), + diagnostic: self.diagnostic.clone(), + } + } +} + impl Default for Summary { fn default() -> Self { Self { diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 619ce19689e84d719aa75d2ee3945a4f123f8286..99161d1f5c778464e8c5d6f98367801a8381ccf1 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -8,6 +8,7 @@ mod tests; use anyhow::{anyhow, Result}; pub use buffer::Operation; pub use buffer::*; +pub use diagnostic_set::DiagnosticEntry; use gpui::{executor::Background, AppContext}; use highlight_map::HighlightMap; use lazy_static::lazy_static; diff --git a/crates/language/src/proto.rs b/crates/language/src/proto.rs index 851ab76bcaef2cbf0e2a55e2de9cf019974b9972..6f36c7dc0bccdf6afaa60ba05bf7cb898c86f8ba 100644 --- a/crates/language/src/proto.rs +++ b/crates/language/src/proto.rs @@ -127,7 +127,7 @@ pub fn serialize_selection_set(set: &SelectionSet) -> proto::SelectionSet { } pub fn serialize_diagnostics<'a>( - diagnostics: impl IntoIterator, + diagnostics: impl IntoIterator>, ) -> Vec { diagnostics .into_iter() @@ -304,7 +304,9 @@ pub fn deserialize_selection_set(set: proto::SelectionSet) -> SelectionSet { } } -pub fn deserialize_diagnostics(diagnostics: Vec) -> Vec { +pub fn deserialize_diagnostics( + diagnostics: Vec, +) -> Vec> { diagnostics .into_iter() .filter_map(|diagnostic| { diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index d1f48245db62eccc939ea0eaad4cdad79febd436..cc873f253ad380e7f99d1b958887dd89fbe2f02c 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -532,28 +532,27 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { // The diagnostics have moved down since they were created. assert_eq!( buffer - .diagnostics_in_range(Point::new(3, 0)..Point::new(5, 0)) - .map(|entry| (entry.range.to_point(buffer), &entry.diagnostic)) + .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0)) .collect::>(), &[ - ( - Point::new(3, 9)..Point::new(3, 11), - &Diagnostic { + DiagnosticEntry { + range: Point::new(3, 9)..Point::new(3, 11), + diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'BB'".to_string(), group_id: 1, is_primary: true, }, - ), - ( - Point::new(4, 9)..Point::new(4, 12), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(4, 9)..Point::new(4, 12), + diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'CCC'".to_string(), group_id: 2, is_primary: true, } - ) + } ] ); assert_eq!( @@ -600,28 +599,27 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { .unwrap(); assert_eq!( buffer - .diagnostics_in_range(Point::new(2, 0)..Point::new(3, 0)) - .map(|entry| (entry.range.to_point(buffer), &entry.diagnostic)) + .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0)) .collect::>(), &[ - ( - Point::new(2, 9)..Point::new(2, 12), - &Diagnostic { + DiagnosticEntry { + range: Point::new(2, 9)..Point::new(2, 12), + diagnostic: Diagnostic { severity: DiagnosticSeverity::WARNING, message: "unreachable statement".to_string(), group_id: 1, is_primary: true, } - ), - ( - Point::new(2, 9)..Point::new(2, 10), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(2, 9)..Point::new(2, 10), + diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'A'".to_string(), group_id: 0, is_primary: true, }, - ) + } ] ); assert_eq!( @@ -680,28 +678,27 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { .unwrap(); assert_eq!( buffer - .diagnostics_in_range(0..buffer.len()) - .map(|entry| (entry.range.to_point(buffer), &entry.diagnostic)) + .diagnostics_in_range::<_, Point>(0..buffer.len()) .collect::>(), &[ - ( - Point::new(2, 21)..Point::new(2, 22), - &Diagnostic { + DiagnosticEntry { + range: Point::new(2, 21)..Point::new(2, 22), + diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'A'".to_string(), group_id: 0, is_primary: true, } - ), - ( - Point::new(3, 9)..Point::new(3, 11), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(3, 9)..Point::new(3, 11), + diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'BB'".to_string(), group_id: 1, is_primary: true, }, - ) + } ] ); }); @@ -866,117 +863,110 @@ async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) { buffer.update_diagnostics(None, diagnostics, cx).unwrap(); assert_eq!( buffer - .diagnostics_in_range(0..buffer.len()) - .map(|entry| (entry.range.to_point(&buffer), &entry.diagnostic)) + .diagnostics_in_range::<_, Point>(0..buffer.len()) .collect::>(), &[ - ( - Point::new(1, 8)..Point::new(1, 9), - &Diagnostic { + DiagnosticEntry { + range: Point::new(1, 8)..Point::new(1, 9), + diagnostic: Diagnostic { severity: DiagnosticSeverity::WARNING, message: "error 1".to_string(), group_id: 0, is_primary: true, } - ), - ( - Point::new(1, 8)..Point::new(1, 9), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(1, 8)..Point::new(1, 9), + diagnostic: Diagnostic { severity: DiagnosticSeverity::HINT, message: "error 1 hint 1".to_string(), group_id: 0, is_primary: false, } - ), - ( - Point::new(1, 13)..Point::new(1, 15), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(1, 13)..Point::new(1, 15), + diagnostic: Diagnostic { severity: DiagnosticSeverity::HINT, message: "error 2 hint 1".to_string(), group_id: 1, is_primary: false, } - ), - ( - Point::new(1, 13)..Point::new(1, 15), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(1, 13)..Point::new(1, 15), + diagnostic: Diagnostic { severity: DiagnosticSeverity::HINT, message: "error 2 hint 2".to_string(), group_id: 1, is_primary: false, } - ), - ( - Point::new(2, 8)..Point::new(2, 17), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(2, 8)..Point::new(2, 17), + diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "error 2".to_string(), group_id: 1, is_primary: true, } - ) + } ] ); assert_eq!( - buffer - .diagnostic_group(0) - .map(|entry| (entry.range.to_point(&buffer), &entry.diagnostic)) - .collect::>(), + buffer.diagnostic_group::(0).collect::>(), &[ - ( - Point::new(1, 8)..Point::new(1, 9), - &Diagnostic { + DiagnosticEntry { + range: Point::new(1, 8)..Point::new(1, 9), + diagnostic: Diagnostic { severity: DiagnosticSeverity::WARNING, message: "error 1".to_string(), group_id: 0, is_primary: true, } - ), - ( - Point::new(1, 8)..Point::new(1, 9), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(1, 8)..Point::new(1, 9), + diagnostic: Diagnostic { severity: DiagnosticSeverity::HINT, message: "error 1 hint 1".to_string(), group_id: 0, is_primary: false, } - ), + }, ] ); assert_eq!( - buffer - .diagnostic_group(1) - .map(|entry| (entry.range.to_point(&buffer), &entry.diagnostic)) - .collect::>(), + buffer.diagnostic_group::(1).collect::>(), &[ - ( - Point::new(1, 13)..Point::new(1, 15), - &Diagnostic { + DiagnosticEntry { + range: Point::new(1, 13)..Point::new(1, 15), + diagnostic: Diagnostic { severity: DiagnosticSeverity::HINT, message: "error 2 hint 1".to_string(), group_id: 1, is_primary: false, } - ), - ( - Point::new(1, 13)..Point::new(1, 15), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(1, 13)..Point::new(1, 15), + diagnostic: Diagnostic { severity: DiagnosticSeverity::HINT, message: "error 2 hint 2".to_string(), group_id: 1, is_primary: false, } - ), - ( - Point::new(2, 8)..Point::new(2, 17), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(2, 8)..Point::new(2, 17), + diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "error 2".to_string(), group_id: 1, is_primary: true, } - ) + } ] ); diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 393e92dfb98aed443db8a9f86e90f31583373bd8..943ab6dbd00dcd12522a978bb20803998f2f4d00 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -3005,7 +3005,7 @@ mod tests { use anyhow::Result; use client::test::{FakeHttpClient, FakeServer}; use fs::RealFs; - use language::{tree_sitter_rust, AnchorRangeExt, LanguageServerConfig}; + use language::{tree_sitter_rust, DiagnosticEntry, LanguageServerConfig}; use language::{Diagnostic, LanguageConfig}; use lsp::Url; use rand::prelude::*; @@ -3721,20 +3721,19 @@ mod tests { buffer.read_with(&cx, |buffer, _| { let diagnostics = buffer - .diagnostics_in_range(0..buffer.len()) - .map(|entry| (entry.range.to_point(buffer), &entry.diagnostic)) + .diagnostics_in_range::<_, Point>(0..buffer.len()) .collect::>(); assert_eq!( diagnostics, - &[( - Point::new(0, 9)..Point::new(0, 10), - &Diagnostic { + &[DiagnosticEntry { + range: Point::new(0, 9)..Point::new(0, 10), + diagnostic: Diagnostic { severity: lsp::DiagnosticSeverity::ERROR, message: "undefined variable 'A'".to_string(), group_id: 0, is_primary: true } - )] + }] ) }); } diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index 6d1b1238c65ad1971d41f2d4d7394387658ba81e..a37dc56532e19722dc2ff0abd9faefcf30848677 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -947,7 +947,7 @@ mod tests { editor::{Editor, EditorSettings, Input}, fs::{FakeFs, Fs as _}, language::{ - tree_sitter_rust, AnchorRangeExt, Diagnostic, Language, LanguageConfig, + tree_sitter_rust, Diagnostic, DiagnosticEntry, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig, Point, }, lsp, @@ -1704,28 +1704,27 @@ mod tests { buffer_b.read_with(&cx_b, |buffer, _| { assert_eq!( buffer - .diagnostics_in_range(0..buffer.len()) - .map(|entry| (entry.range.to_point(buffer), &entry.diagnostic)) + .diagnostics_in_range::<_, Point>(0..buffer.len()) .collect::>(), &[ - ( - Point::new(0, 4)..Point::new(0, 7), - &Diagnostic { + DiagnosticEntry { + range: Point::new(0, 4)..Point::new(0, 7), + diagnostic: Diagnostic { group_id: 0, message: "message 1".to_string(), severity: lsp::DiagnosticSeverity::ERROR, is_primary: true } - ), - ( - Point::new(0, 10)..Point::new(0, 13), - &Diagnostic { + }, + DiagnosticEntry { + range: Point::new(0, 10)..Point::new(0, 13), + diagnostic: Diagnostic { group_id: 1, severity: lsp::DiagnosticSeverity::WARNING, message: "message 2".to_string(), is_primary: true } - ) + } ] ); }); From e9c385e7a6706b791e811d0b82e2b49dd0788c97 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 9 Dec 2021 18:27:54 +0100 Subject: [PATCH 036/196] WIP --- crates/text/src/locator.rs | 21 +++++++++++++++------ crates/text/src/text.rs | 4 ++-- 2 files changed, 17 insertions(+), 8 deletions(-) diff --git a/crates/text/src/locator.rs b/crates/text/src/locator.rs index 0a22ea58f904b4fc28efc2ac785bd67ef5abd2dd..249e79b6fd866a149bc54187651e17235c97b7ad 100644 --- a/crates/text/src/locator.rs +++ b/crates/text/src/locator.rs @@ -2,23 +2,28 @@ use smallvec::{smallvec, SmallVec}; use std::iter; #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct Locator(SmallVec<[u8; 4]>); +pub struct Locator(SmallVec<[u64; 4]>); impl Locator { pub fn min() -> Self { - Self(smallvec![u8::MIN]) + Self(smallvec![u64::MIN]) } pub fn max() -> Self { - Self(smallvec![u8::MAX]) + Self(smallvec![u64::MAX]) + } + + pub fn assign(&mut self, other: &Self) { + self.0.resize(other.0.len(), 0); + self.0.copy_from_slice(&other.0); } pub fn between(lhs: &Self, rhs: &Self) -> Self { - let lhs = lhs.0.iter().copied().chain(iter::repeat(u8::MIN)); - let rhs = rhs.0.iter().copied().chain(iter::repeat(u8::MAX)); + let lhs = lhs.0.iter().copied().chain(iter::repeat(u64::MIN)); + let rhs = rhs.0.iter().copied().chain(iter::repeat(u64::MAX)); let mut location = SmallVec::new(); for (lhs, rhs) in lhs.zip(rhs) { - let mid = lhs + (rhs.saturating_sub(lhs)) / 2; + let mid = lhs + ((rhs.saturating_sub(lhs)) >> 48); location.push(mid); if mid > lhs { break; @@ -26,6 +31,10 @@ impl Locator { } Self(location) } + + pub fn len(&self) -> usize { + self.0.len() + } } impl Default for Locator { diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index c2e0d8e4ef0b7b4f7bb327ae250620c19732d7fe..398550165953dee820a72b4ce12bd840a3cb77c6 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -2002,7 +2002,7 @@ impl sum_tree::Summary for FragmentSummary { type Context = Option; fn add_summary(&mut self, other: &Self, _: &Self::Context) { - self.max_id = other.max_id.clone(); + self.max_id.assign(&other.max_id); self.text.visible += &other.text.visible; self.text.deleted += &other.text.deleted; self.max_version.join(&other.max_version); @@ -2113,7 +2113,7 @@ impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FullOffset { impl<'a> sum_tree::Dimension<'a, FragmentSummary> for Locator { fn add_summary(&mut self, summary: &FragmentSummary, _: &Option) { - *self = summary.max_id.clone(); + self.assign(&summary.max_id); } } From 1ed1ec21ddb3ec44e0111c2e87827cae87db76ab Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 9 Dec 2021 10:42:44 -0800 Subject: [PATCH 037/196] Batch anchor resolution, avoid cloning fragment ids when seeking --- crates/text/src/selection.rs | 13 ++++++- crates/text/src/tests.rs | 4 +-- crates/text/src/text.rs | 68 +++++++++++++++++++++++++++++++----- 3 files changed, 73 insertions(+), 12 deletions(-) diff --git a/crates/text/src/selection.rs b/crates/text/src/selection.rs index ae96e93e51c3b74ad4a7a35fd02af24b6889c99c..5142baf7f55a4f2370d9258473cb2a36e1e7e855 100644 --- a/crates/text/src/selection.rs +++ b/crates/text/src/selection.rs @@ -125,7 +125,18 @@ impl SelectionSet { where D: 'a + TextDimension<'a>, { - self.selections.iter().map(|s| s.resolve(snapshot)) + let anchors = self + .selections + .iter() + .flat_map(|selection| [&selection.start, &selection.end].into_iter()); + let mut positions = snapshot.summaries_for_anchors::(anchors); + self.selections.iter().map(move |selection| Selection { + start: positions.next().unwrap(), + end: positions.next().unwrap(), + goal: selection.goal, + reversed: selection.reversed, + id: selection.id, + }) } pub fn intersecting_selections<'a, D, I>( diff --git a/crates/text/src/tests.rs b/crates/text/src/tests.rs index f7f307049cbd4e089e59ee5a34141f3d116d9bf5..5439e71af73ac8d5f19006a404b6eb4fe6170f62 100644 --- a/crates/text/src/tests.rs +++ b/crates/text/src/tests.rs @@ -645,9 +645,9 @@ impl Buffer { assert_eq!(insertion_fragment.fragment_id, fragment.id); } - let mut cursor = self.snapshot.fragments.cursor::(); + let mut cursor = self.snapshot.fragments.cursor::>(); for insertion_fragment in self.snapshot.insertions.cursor::<()>() { - cursor.seek(&insertion_fragment.fragment_id, Bias::Left, &None); + cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left, &None); let fragment = cursor.item().unwrap(); assert_eq!(insertion_fragment.fragment_id, fragment.id); assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset); diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 398550165953dee820a72b4ce12bd840a3cb77c6..d8c9c43d5fc8556ee3495041c0f78aa05edd8101 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -1672,6 +1672,56 @@ impl Snapshot { result } + pub fn summaries_for_anchors<'a, D, A>(&'a self, anchors: A) -> impl 'a + Iterator + where + D: 'a + TextDimension<'a>, + A: 'a + IntoIterator, + { + let anchors = anchors.into_iter(); + let mut insertion_cursor = self.insertions.cursor::(); + let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, usize)>(); + let mut text_cursor = self.visible_text.cursor(0); + let mut position = D::default(); + + anchors.map(move |anchor| { + if *anchor == Anchor::min() { + return D::default(); + } else if *anchor == Anchor::max() { + return D::from_text_summary(&self.visible_text.summary()); + } + + let anchor_key = InsertionFragmentKey { + timestamp: anchor.timestamp, + split_offset: anchor.offset, + }; + insertion_cursor.seek(&anchor_key, anchor.bias, &()); + if let Some(insertion) = insertion_cursor.item() { + let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key); + if comparison == Ordering::Greater + || (anchor.bias == Bias::Left + && comparison == Ordering::Equal + && anchor.offset > 0) + { + insertion_cursor.prev(&()); + } + } else { + insertion_cursor.prev(&()); + } + let insertion = insertion_cursor.item().expect("invalid insertion"); + debug_assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion"); + + fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left, &None); + let fragment = fragment_cursor.item().unwrap(); + let mut fragment_offset = fragment_cursor.start().1; + if fragment.visible { + fragment_offset += anchor.offset - insertion.split_offset; + } + + position.add_assign(&text_cursor.summary(fragment_offset)); + position.clone() + }) + } + fn summary_for_anchor<'a, D>(&'a self, anchor: &Anchor) -> D where D: TextDimension<'a>, @@ -1702,8 +1752,8 @@ impl Snapshot { let insertion = insertion_cursor.item().expect("invalid insertion"); debug_assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion"); - let mut fragment_cursor = self.fragments.cursor::<(Locator, usize)>(); - fragment_cursor.seek(&insertion.fragment_id, Bias::Left, &None); + let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, usize)>(); + fragment_cursor.seek(&Some(&insertion.fragment_id), Bias::Left, &None); let fragment = fragment_cursor.item().unwrap(); let mut fragment_offset = fragment_cursor.start().1; if fragment.visible { @@ -1741,8 +1791,8 @@ impl Snapshot { let insertion = insertion_cursor.item().expect("invalid insertion"); debug_assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion"); - let mut fragment_cursor = self.fragments.cursor::<(Locator, FullOffset)>(); - fragment_cursor.seek(&insertion.fragment_id, Bias::Left, &None); + let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, FullOffset)>(); + fragment_cursor.seek(&Some(&insertion.fragment_id), Bias::Left, &None); fragment_cursor.start().1 + (anchor.offset - insertion.split_offset) } } @@ -1771,10 +1821,10 @@ impl Snapshot { } else if bias == Bias::Right && offset == self.len() { Anchor::max() } else { - let mut fragment_cursor = self.fragments.cursor::<(usize, Locator)>(); + let mut fragment_cursor = self.fragments.cursor::(); fragment_cursor.seek(&offset, bias, &None); let fragment = fragment_cursor.item().unwrap(); - let overshoot = offset - fragment_cursor.start().0; + let overshoot = offset - *fragment_cursor.start(); Anchor { timestamp: fragment.insertion_timestamp.local(), offset: fragment.insertion_offset + overshoot, @@ -2111,9 +2161,9 @@ impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FullOffset { } } -impl<'a> sum_tree::Dimension<'a, FragmentSummary> for Locator { - fn add_summary(&mut self, summary: &FragmentSummary, _: &Option) { - self.assign(&summary.max_id); +impl<'a> sum_tree::Dimension<'a, FragmentSummary> for Option<&'a Locator> { + fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option) { + *self = Some(&summary.max_id); } } From eeba0993aa936219e0e0b1a3f2a15fe6acce7488 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 10 Dec 2021 09:16:58 +0100 Subject: [PATCH 038/196] Optimize anchor comparison and take full advantage of fragment IDs --- Cargo.lock | 1 + crates/text/Cargo.toml | 1 + crates/text/src/anchor.rs | 12 +++++++----- crates/text/src/locator.rs | 6 ++++++ crates/text/src/text.rs | 12 ++++-------- 5 files changed, 19 insertions(+), 13 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a1188259ac23e9b18f57fd22c68da69da81b012d..3c89563e3a403aef5a379287dc45407f570e5d26 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4853,6 +4853,7 @@ dependencies = [ "ctor", "env_logger", "gpui", + "lazy_static", "log", "parking_lot", "rand 0.8.3", diff --git a/crates/text/Cargo.toml b/crates/text/Cargo.toml index 2b68a71a080ea412ee534d662b85e656fa51edc3..f4b7d7453f0d0902ca855da31f549ee1a48bef40 100644 --- a/crates/text/Cargo.toml +++ b/crates/text/Cargo.toml @@ -15,6 +15,7 @@ collections = { path = "../collections" } sum_tree = { path = "../sum_tree" } anyhow = "1.0.38" arrayvec = "0.7.1" +lazy_static = "1.4" log = "0.4" parking_lot = "0.11" rand = { version = "0.8.3", optional = true } diff --git a/crates/text/src/anchor.rs b/crates/text/src/anchor.rs index a781c9f8876e78d50f2f05b66d1f34d2ab6b7b0e..85be47ad90dc040df9b4db849e78b6f1bb7ded04 100644 --- a/crates/text/src/anchor.rs +++ b/crates/text/src/anchor.rs @@ -28,15 +28,17 @@ impl Anchor { } pub fn cmp<'a>(&self, other: &Anchor, buffer: &Snapshot) -> Result { - let offset_comparison = if self.timestamp == other.timestamp { - self.offset.cmp(&other.offset) + let fragment_id_comparison = if self.timestamp == other.timestamp { + Ordering::Equal } else { buffer - .full_offset_for_anchor(self) - .cmp(&buffer.full_offset_for_anchor(other)) + .fragment_id_for_anchor(self) + .cmp(&buffer.fragment_id_for_anchor(other)) }; - Ok(offset_comparison.then_with(|| self.bias.cmp(&other.bias))) + Ok(fragment_id_comparison + .then_with(|| self.offset.cmp(&other.offset)) + .then_with(|| self.bias.cmp(&other.bias))) } pub fn bias_left(&self, buffer: &Buffer) -> Anchor { diff --git a/crates/text/src/locator.rs b/crates/text/src/locator.rs index 249e79b6fd866a149bc54187651e17235c97b7ad..e4feaf99ac2e4252e8c002387faf7abce52c9467 100644 --- a/crates/text/src/locator.rs +++ b/crates/text/src/locator.rs @@ -1,6 +1,12 @@ +use lazy_static::lazy_static; use smallvec::{smallvec, SmallVec}; use std::iter; +lazy_static! { + pub static ref MIN: Locator = Locator::min(); + pub static ref MAX: Locator = Locator::max(); +} + #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct Locator(SmallVec<[u64; 4]>); diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index d8c9c43d5fc8556ee3495041c0f78aa05edd8101..6df4a67881ff021245ff9eda08ae714dd0feb677 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -1763,12 +1763,11 @@ impl Snapshot { } } - fn full_offset_for_anchor(&self, anchor: &Anchor) -> FullOffset { + fn fragment_id_for_anchor(&self, anchor: &Anchor) -> &Locator { if *anchor == Anchor::min() { - Default::default() + &locator::MIN } else if *anchor == Anchor::max() { - let text = self.fragments.summary().text; - FullOffset(text.visible + text.deleted) + &locator::MAX } else { let anchor_key = InsertionFragmentKey { timestamp: anchor.timestamp, @@ -1790,10 +1789,7 @@ impl Snapshot { } let insertion = insertion_cursor.item().expect("invalid insertion"); debug_assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion"); - - let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, FullOffset)>(); - fragment_cursor.seek(&Some(&insertion.fragment_id), Bias::Left, &None); - fragment_cursor.start().1 + (anchor.offset - insertion.split_offset) + &insertion.fragment_id } } From cb97b7cd1d766fafb1775ebaae73511f358b420a Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 10 Dec 2021 09:43:21 +0100 Subject: [PATCH 039/196] Fix diagnostic unit test --- crates/language/src/buffer.rs | 8 +++----- crates/language/src/diagnostic_set.rs | 20 +++++++++++--------- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index d34528c78409df2a63c652f5d294b7759445ff70..d1beff9dd5072c94f2f5026646abf3d7f8472df7 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -23,7 +23,6 @@ use std::{ ffi::OsString, future::Future, iter::{Iterator, Peekable}, - mem, ops::{Deref, DerefMut, Range}, path::{Path, PathBuf}, str, @@ -794,8 +793,7 @@ impl Buffer { } drop(edits_since_save); - let mut diagnostics = mem::take(&mut self.diagnostics); - diagnostics.reset( + let new_diagnostics = DiagnosticSet::new( diagnostics_by_group_id .into_values() .flat_map(|mut diagnostics| { @@ -806,9 +804,9 @@ impl Buffer { primary.diagnostic.is_primary = true; diagnostics }), - self, + content, ); - self.diagnostics = diagnostics; + self.diagnostics = new_diagnostics; if let Some(version) = version { let language_server = self.language_server.as_mut().unwrap(); diff --git a/crates/language/src/diagnostic_set.rs b/crates/language/src/diagnostic_set.rs index 0a04ef17e88cab5d74548b4ca3af5c1e3310de58..b67a2fe835597e543c99409943439ae34f07194f 100644 --- a/crates/language/src/diagnostic_set.rs +++ b/crates/language/src/diagnostic_set.rs @@ -37,20 +37,22 @@ impl DiagnosticSet { } } - pub fn reset(&mut self, iter: I, buffer: &text::Snapshot) + pub fn new(iter: I, buffer: &text::Snapshot) -> Self where I: IntoIterator>, { let mut entries = iter.into_iter().collect::>(); entries.sort_unstable_by_key(|entry| (entry.range.start, Reverse(entry.range.end))); - self.diagnostics = SumTree::from_iter( - entries.into_iter().map(|entry| DiagnosticEntry { - range: buffer.anchor_before(entry.range.start) - ..buffer.anchor_after(entry.range.end), - diagnostic: entry.diagnostic, - }), - buffer, - ); + Self { + diagnostics: SumTree::from_iter( + entries.into_iter().map(|entry| DiagnosticEntry { + range: buffer.anchor_before(entry.range.start) + ..buffer.anchor_after(entry.range.end), + diagnostic: entry.diagnostic, + }), + buffer, + ), + } } pub fn iter(&self) -> impl Iterator> { From d9da8effd4b52ed357f0e5f836c4a4ab6b028db8 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 10 Dec 2021 14:22:43 +0100 Subject: [PATCH 040/196] Re-implement `edits_since_in_range` in terms of `Locator` Co-Authored-By: Nathan Sobo --- crates/text/src/text.rs | 59 +++++++++++++++++------------------------ 1 file changed, 24 insertions(+), 35 deletions(-) diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 69439c7aa39f6d758154dc537ae7a73587838e3a..6a27d41ca200c858d8ea6568006ac6f06f491b0f 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -306,7 +306,7 @@ struct Edits<'a, D: TextDimension, F: FnMut(&FragmentSummary) -> bool> { since: &'a clock::Global, old_end: D, new_end: D, - range: Range, + range: Range<(&'a Locator, usize)>, } #[derive(Clone, Debug, Default, Eq, PartialEq)] @@ -396,12 +396,6 @@ struct FragmentTextSummary { deleted: usize, } -impl FragmentTextSummary { - pub fn full_offset(&self) -> FullOffset { - FullOffset(self.visible + self.deleted) - } -} - impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary { fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option) { self.visible += summary.text.visible; @@ -1871,28 +1865,24 @@ impl BufferSnapshot { .filter(move |summary| !since.ge(&summary.max_version), &None), ) }; - let mut cursor = self .fragments - .cursor::<(VersionedFullOffset, FragmentTextSummary)>(); - cursor.seek( - &VersionedFullOffset::Offset(range.start.full_offset), - range.start.bias, - &Some(range.start.version), - ); + .cursor::<(Option<&Locator>, FragmentTextSummary)>(); + + let start_fragment_id = self.fragment_id_for_anchor(&range.start); + cursor.seek(&Some(start_fragment_id), Bias::Left, &None); let mut visible_start = cursor.start().1.visible; let mut deleted_start = cursor.start().1.deleted; if let Some(fragment) = cursor.item() { - let overshoot = range.start.full_offset.0 - cursor.start().0.full_offset().0; + let overshoot = range.start.offset - fragment.insertion_offset; if fragment.visible { visible_start += overshoot; } else { deleted_start += overshoot; } } + let end_fragment_id = self.fragment_id_for_anchor(&range.end); - let full_offset_start = FullOffset(visible_start + deleted_start); - let full_offset_end = range.end.to_full_offset(self, range.end.bias); Edits { visible_cursor: self.visible_text.cursor(visible_start), deleted_cursor: self.deleted_text.cursor(deleted_start), @@ -1901,7 +1891,7 @@ impl BufferSnapshot { since, old_end: Default::default(), new_end: Default::default(), - range: full_offset_start..full_offset_end, + range: (start_fragment_id, range.start.offset)..(end_fragment_id, range.end.offset), } } } @@ -1967,10 +1957,10 @@ impl<'a, D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator fo let cursor = self.fragments_cursor.as_mut()?; while let Some(fragment) = cursor.item() { - if cursor.end(&None).full_offset() < self.range.start { + if fragment.id < *self.range.start.0 { cursor.next(&None); continue; - } else if cursor.start().full_offset() >= self.range.end { + } else if fragment.id > *self.range.end.0 { break; } @@ -1988,10 +1978,13 @@ impl<'a, D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator fo } if !fragment.was_visible(&self.since, &self.undos) && fragment.visible { - let visible_end = cmp::min( - cursor.end(&None).visible, - cursor.start().visible + (self.range.end - cursor.start().full_offset()), - ); + let mut visible_end = cursor.end(&None).visible; + if fragment.id == *self.range.end.0 { + visible_end = cmp::min( + visible_end, + cursor.start().visible + (self.range.end.1 - fragment.insertion_offset), + ); + } let fragment_summary = self.visible_cursor.summary(visible_end); let mut new_end = self.new_end.clone(); @@ -2007,10 +2000,13 @@ impl<'a, D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator fo self.new_end = new_end; } else if fragment.was_visible(&self.since, &self.undos) && !fragment.visible { - let deleted_end = cmp::min( - cursor.end(&None).deleted, - cursor.start().deleted + (self.range.end - cursor.start().full_offset()), - ); + let mut deleted_end = cursor.end(&None).deleted; + if fragment.id == *self.range.end.0 { + deleted_end = cmp::min( + deleted_end, + cursor.start().deleted + (self.range.end.1 - fragment.insertion_offset), + ); + } if cursor.start().deleted > self.deleted_cursor.offset() { self.deleted_cursor.seek_forward(cursor.start().deleted); @@ -2295,13 +2291,6 @@ impl operation_queue::Operation for Operation { pub trait ToOffset { fn to_offset<'a>(&self, snapshot: &BufferSnapshot) -> usize; - - fn to_full_offset<'a>(&self, snapshot: &BufferSnapshot, bias: Bias) -> FullOffset { - let offset = self.to_offset(&snapshot); - let mut cursor = snapshot.fragments.cursor::(); - cursor.seek(&offset, bias, &None); - FullOffset(offset + cursor.start().deleted) - } } impl ToOffset for Point { From 9c74deb9ecd2ec6f1a1e478db34e4240d85ce27a Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 10 Dec 2021 15:58:37 +0100 Subject: [PATCH 041/196] Finish removing anchor collections from MultiBuffer Co-Authored-By: Nathan Sobo --- crates/language/src/diagnostic_set.rs | 12 +- crates/language/src/multi_buffer.rs | 140 ++++++---- crates/language/src/multi_buffer/anchor.rs | 244 +----------------- crates/language/src/multi_buffer/location.rs | 76 ------ crates/language/src/multi_buffer/selection.rs | 122 +++++---- crates/text/src/text.rs | 2 +- 6 files changed, 181 insertions(+), 415 deletions(-) delete mode 100644 crates/language/src/multi_buffer/location.rs diff --git a/crates/language/src/diagnostic_set.rs b/crates/language/src/diagnostic_set.rs index b67a2fe835597e543c99409943439ae34f07194f..caef7569c53cf1315530c5534747cfaf8b329548 100644 --- a/crates/language/src/diagnostic_set.rs +++ b/crates/language/src/diagnostic_set.rs @@ -28,7 +28,7 @@ pub struct Summary { } impl DiagnosticSet { - pub fn from_sorted_entries(iter: I, buffer: &text::Snapshot) -> Self + pub fn from_sorted_entries(iter: I, buffer: &text::BufferSnapshot) -> Self where I: IntoIterator>, { @@ -37,7 +37,7 @@ impl DiagnosticSet { } } - pub fn new(iter: I, buffer: &text::Snapshot) -> Self + pub fn new(iter: I, buffer: &text::BufferSnapshot) -> Self where I: IntoIterator>, { @@ -62,7 +62,7 @@ impl DiagnosticSet { pub fn range<'a, T, O>( &'a self, range: Range, - buffer: &'a text::Snapshot, + buffer: &'a text::BufferSnapshot, inclusive: bool, ) -> impl 'a + Iterator> where @@ -101,7 +101,7 @@ impl DiagnosticSet { pub fn group<'a, O: FromAnchor>( &'a self, group_id: usize, - buffer: &'a text::Snapshot, + buffer: &'a text::BufferSnapshot, ) -> impl 'a + Iterator> { self.iter() .filter(move |entry| entry.diagnostic.group_id == group_id) @@ -124,7 +124,7 @@ impl sum_tree::Item for DiagnosticEntry { } impl DiagnosticEntry { - pub fn resolve(&self, buffer: &text::Snapshot) -> DiagnosticEntry { + pub fn resolve(&self, buffer: &text::BufferSnapshot) -> DiagnosticEntry { DiagnosticEntry { range: O::from_anchor(&self.range.start, buffer) ..O::from_anchor(&self.range.end, buffer), @@ -146,7 +146,7 @@ impl Default for Summary { } impl sum_tree::Summary for Summary { - type Context = text::Snapshot; + type Context = text::BufferSnapshot; fn add_summary(&mut self, other: &Self, buffer: &Self::Context) { if other diff --git a/crates/language/src/multi_buffer.rs b/crates/language/src/multi_buffer.rs index eb765688561545f3b9062675c13363834959d257..081f4ff8e172ac03e422461b9e04d9736f346f5a 100644 --- a/crates/language/src/multi_buffer.rs +++ b/crates/language/src/multi_buffer.rs @@ -1,32 +1,36 @@ mod anchor; -mod location; mod selection; -use self::location::*; use crate::{ buffer::{self, Buffer, Chunk, ToOffset as _, ToPoint as _}, BufferSnapshot, Diagnostic, File, Language, }; +pub use anchor::{Anchor, AnchorRangeExt}; use anyhow::Result; use clock::ReplicaId; use collections::HashMap; use gpui::{AppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task}; use parking_lot::{Mutex, MutexGuard}; -use smallvec::SmallVec; -use std::{cmp, io, ops::Range, sync::Arc, time::SystemTime}; +pub use selection::SelectionSet; +use std::{ + cmp, io, + ops::{Range, Sub}, + sync::Arc, + time::SystemTime, +}; use sum_tree::{Bias, Cursor, SumTree}; use text::{ + locator::Locator, rope::TextDimension, subscription::{Subscription, Topic}, AnchorRangeExt as _, Edit, Point, PointUtf16, Selection, SelectionSetId, TextSummary, }; use theme::SyntaxTheme; -pub use anchor::{Anchor, AnchorRangeExt, AnchorRangeMap, AnchorRangeSet}; -pub use selection::SelectionSet; - const NEWLINES: &'static [u8] = &[b'\n'; u8::MAX as usize]; +pub type ExcerptId = Locator; + #[derive(Default)] pub struct MultiBuffer { snapshot: Mutex, @@ -314,10 +318,10 @@ impl MultiBuffer { let mut edits = Vec::new(); let mut new_excerpts = SumTree::new(); - let mut cursor = snapshot.excerpts.cursor::<(ExcerptId, usize)>(); + let mut cursor = snapshot.excerpts.cursor::<(Option<&ExcerptId>, usize)>(); for (id, buffer_state) in excerpts_to_edit { - new_excerpts.push_tree(cursor.slice(id, Bias::Left, &()), &()); + new_excerpts.push_tree(cursor.slice(&Some(id), Bias::Left, &()), &()); let old_excerpt = cursor.item().unwrap(); let buffer = buffer_state.buffer.read(cx); @@ -411,36 +415,6 @@ impl MultiBuffer { self.snapshot.lock().anchor_at(position, bias) } - pub fn anchor_range_map( - &self, - start_bias: Bias, - end_bias: Bias, - entries: E, - ) -> AnchorRangeMap - where - E: IntoIterator, T)>, - { - let entries = entries.into_iter().peekable(); - let mut child_maps = SmallVec::new(); - if let Some((range, _)) = entries.peek() { - let mut cursor = self.snapshot.lock().excerpts.cursor::(); - cursor.seek(&range.start, Bias::Right, &()); - let mut excerpt_end = cursor.end(&()); - - // child_maps.push - - // for entry in entries {} - } - AnchorRangeMap { child_maps } - } - - pub fn anchor_range_set(&self, start_bias: Bias, end_bias: Bias, ranges: E) -> AnchorRangeSet - where - E: IntoIterator>, - { - todo!() - } - pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize { self.snapshot.lock().clip_offset(offset, bias) } @@ -863,6 +837,77 @@ impl MultiBufferSnapshot { summary } + fn summary_for_anchor(&self, anchor: &Anchor) -> D + where + D: TextDimension + Ord + Sub, + { + let mut cursor = self.excerpts.cursor::(); + cursor.seek(&Some(&anchor.excerpt_id), Bias::Left, &()); + if let Some(excerpt) = cursor.item() { + if excerpt.id == anchor.excerpt_id { + let mut excerpt_start = D::from_text_summary(&cursor.start().text); + excerpt_start.add_summary(&excerpt.header_summary(), &()); + let excerpt_buffer_start = excerpt.range.start.summary::(&excerpt.buffer); + let buffer_point = anchor.text_anchor.summary::(&excerpt.buffer); + if buffer_point > excerpt_buffer_start { + excerpt_start.add_assign(&(buffer_point - excerpt_buffer_start)); + } + return excerpt_start; + } + } + D::from_text_summary(&cursor.start().text) + } + + fn summaries_for_anchors<'a, D, I>(&'a self, anchors: I) -> Vec + where + D: TextDimension + Ord + Sub, + I: 'a + IntoIterator, + { + let mut anchors = anchors.into_iter().peekable(); + let mut cursor = self.excerpts.cursor::(); + let mut summaries = Vec::new(); + while let Some(anchor) = anchors.peek() { + let excerpt_id = &anchor.excerpt_id; + cursor.seek(&Some(excerpt_id), Bias::Left, &()); + if let Some(excerpt) = cursor.item() { + let excerpt_exists = excerpt.id == *excerpt_id; + let excerpt_anchors = std::iter::from_fn(|| { + let anchor = anchors.peek()?; + if anchor.excerpt_id == *excerpt_id { + Some(&anchors.next().unwrap().text_anchor) + } else { + None + } + }); + + if excerpt_exists { + let mut excerpt_start = D::from_text_summary(&cursor.start().text); + excerpt_start.add_summary(&excerpt.header_summary(), &()); + let excerpt_buffer_start = excerpt.range.start.summary::(&excerpt.buffer); + summaries.extend( + excerpt + .buffer + .summaries_for_anchors::(excerpt_anchors) + .map(move |summary| { + let mut excerpt_start = excerpt_start.clone(); + let excerpt_buffer_start = excerpt_buffer_start.clone(); + if summary > excerpt_buffer_start { + excerpt_start.add_assign(&(summary - excerpt_buffer_start)); + } + excerpt_start + }), + ); + } else { + excerpt_anchors.for_each(drop); + } + } else { + break; + } + } + + summaries + } + pub fn anchor_before(&self, position: T) -> Anchor { self.anchor_at(position, Bias::Left) } @@ -923,12 +968,12 @@ impl MultiBufferSnapshot { fn buffer_snapshot_for_excerpt<'a>( &'a self, - excerpt_id: &ExcerptId, + excerpt_id: &'a ExcerptId, ) -> Option<&'a BufferSnapshot> { - let mut cursor = self.excerpts.cursor::(); - cursor.seek(excerpt_id, Bias::Left, &()); + let mut cursor = self.excerpts.cursor::>(); + cursor.seek(&Some(excerpt_id), Bias::Left, &()); if let Some(excerpt) = cursor.item() { - if cursor.start() == excerpt_id { + if *cursor.start() == Some(excerpt_id) { return Some(&excerpt.buffer); } } @@ -1020,9 +1065,9 @@ impl<'a> sum_tree::SeekTarget<'a, ExcerptSummary, ExcerptSummary> for usize { } } -impl<'a> sum_tree::SeekTarget<'a, ExcerptSummary, ExcerptSummary> for Location { +impl<'a> sum_tree::SeekTarget<'a, ExcerptSummary, ExcerptSummary> for Option<&'a ExcerptId> { fn cmp(&self, cursor_location: &ExcerptSummary, _: &()) -> cmp::Ordering { - Ord::cmp(self, &cursor_location.excerpt_id) + Ord::cmp(self, &Some(&cursor_location.excerpt_id)) } } @@ -1038,10 +1083,9 @@ impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for PointUtf16 { } } -impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Location { +impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Option<&'a ExcerptId> { fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) { - debug_assert!(summary.excerpt_id > *self); - *self = summary.excerpt_id.clone(); + *self = Some(&summary.excerpt_id); } } diff --git a/crates/language/src/multi_buffer/anchor.rs b/crates/language/src/multi_buffer/anchor.rs index 5f5173925f40626ddb473d30e25f9c00c4be270c..033ed009b0be435e34fced2963bdfa59f86c5ce3 100644 --- a/crates/language/src/multi_buffer/anchor.rs +++ b/crates/language/src/multi_buffer/anchor.rs @@ -1,27 +1,18 @@ -use super::{location::*, ExcerptSummary, MultiBufferSnapshot, ToOffset, ToPoint}; +use super::{ExcerptId, MultiBufferSnapshot, ToOffset, ToPoint}; use anyhow::{anyhow, Result}; -use smallvec::SmallVec; use std::{ cmp::Ordering, ops::{Range, Sub}, }; use sum_tree::Bias; -use text::{rope::TextDimension, AnchorRangeExt as _, Point}; +use text::{rope::TextDimension, Point}; #[derive(Clone, Eq, PartialEq, Debug, Hash)] pub struct Anchor { - excerpt_id: ExcerptId, - text_anchor: text::Anchor, + pub(crate) excerpt_id: ExcerptId, + pub(crate) text_anchor: text::Anchor, } -#[derive(Clone, Debug, Default, PartialEq, Eq)] -pub struct AnchorRangeMap { - pub(crate) child_maps: SmallVec<[(ExcerptId, text::AnchorRangeMap); 1]>, -} - -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct AnchorRangeSet(AnchorRangeMap<()>); - impl Anchor { pub fn min() -> Self { Self { @@ -75,234 +66,11 @@ impl Anchor { self.clone() } - pub fn summary<'a, D>(&self, snapshot: &'a MultiBufferSnapshot) -> D + pub fn summary(&self, snapshot: &MultiBufferSnapshot) -> D where D: TextDimension + Ord + Sub, { - let mut cursor = snapshot.excerpts.cursor::(); - cursor.seek(&self.excerpt_id, Bias::Left, &()); - if let Some(excerpt) = cursor.item() { - if excerpt.id == self.excerpt_id { - let mut excerpt_start = D::from_text_summary(&cursor.start().text); - excerpt_start.add_summary(&excerpt.header_summary(), &()); - let excerpt_buffer_start = excerpt.range.start.summary::(&excerpt.buffer); - let buffer_point = self.text_anchor.summary::(&excerpt.buffer); - if buffer_point > excerpt_buffer_start { - excerpt_start.add_assign(&(buffer_point - excerpt_buffer_start)); - } - return excerpt_start; - } - } - D::from_text_summary(&cursor.start().text) - } -} - -impl AnchorRangeMap { - pub fn len(&self) -> usize { - self.child_maps - .iter() - .map(|(_, text_map)| text_map.len()) - .sum() - } - - pub fn ranges<'a, D>( - &'a self, - snapshot: &'a MultiBufferSnapshot, - ) -> impl Iterator, &'a T)> + 'a - where - D: TextDimension + Clone, - { - let mut cursor = snapshot.excerpts.cursor::(); - self.child_maps - .iter() - .filter_map(move |(excerpt_id, text_map)| { - cursor.seek_forward(excerpt_id, Bias::Left, &()); - if let Some(excerpt) = cursor.item() { - if excerpt.id == *excerpt_id { - let mut excerpt_start = D::from_text_summary(&cursor.start().text); - excerpt_start.add_summary(&excerpt.header_summary(), &()); - return Some(text_map.ranges::(&excerpt.buffer).map( - move |(range, value)| { - let mut full_range = excerpt_start.clone()..excerpt_start.clone(); - full_range.start.add_assign(&range.start); - full_range.end.add_assign(&range.end); - (full_range, value) - }, - )); - } - } - None - }) - .flatten() - } - - pub fn intersecting_ranges<'a, D, I>( - &'a self, - range: Range<(I, Bias)>, - snapshot: &'a MultiBufferSnapshot, - ) -> impl Iterator, &'a T)> + 'a - where - D: TextDimension, - I: ToOffset, - { - let start_bias = range.start.1; - let end_bias = range.end.1; - let start_offset = range.start.0.to_offset(snapshot); - let end_offset = range.end.0.to_offset(snapshot); - - let mut cursor = snapshot.excerpts.cursor::(); - cursor.seek(&start_offset, start_bias, &()); - let start_excerpt_id = &cursor.start().excerpt_id; - let start_ix = match self - .child_maps - .binary_search_by_key(&start_excerpt_id, |e| &e.0) - { - Ok(ix) | Err(ix) => ix, - }; - - let mut entry_ranges = None; - let mut entries = self.child_maps[start_ix..].iter(); - std::iter::from_fn(move || loop { - match &mut entry_ranges { - None => { - let (excerpt_id, text_map) = entries.next()?; - cursor.seek(excerpt_id, Bias::Left, &()); - if cursor.start().text.bytes >= end_offset { - return None; - } - - if let Some(excerpt) = cursor.item() { - if excerpt.id == *excerpt_id { - let mut excerpt_start = D::from_text_summary(&cursor.start().text); - excerpt_start.add_summary(&excerpt.header_summary(), &()); - - let excerpt_start_offset = cursor.start().text.bytes; - let excerpt_end_offset = cursor.end(&()).text.bytes; - let excerpt_buffer_range = excerpt.range.to_offset(&excerpt.buffer); - - let start; - if start_offset >= excerpt_start_offset { - start = ( - excerpt_buffer_range.start + start_offset - - excerpt_start_offset, - start_bias, - ); - } else { - start = (excerpt_buffer_range.start, Bias::Left); - } - - let end; - if end_offset <= excerpt_end_offset { - end = ( - excerpt_buffer_range.start + end_offset - excerpt_start_offset, - end_bias, - ); - } else { - end = (excerpt_buffer_range.end, Bias::Right); - } - - entry_ranges = Some( - text_map - .intersecting_ranges(start..end, &excerpt.buffer) - .map(move |(range, value)| { - let mut full_range = - excerpt_start.clone()..excerpt_start.clone(); - full_range.start.add_assign(&range.start); - full_range.end.add_assign(&range.end); - (full_range, value) - }), - ); - } - } - } - Some(ranges) => { - if let Some(item) = ranges.next() { - return Some(item); - } else { - entry_ranges.take(); - } - } - } - }) - } - - pub fn min_by_key<'a, D, F, K>( - &self, - snapshot: &'a MultiBufferSnapshot, - extract_key: F, - ) -> Option<(Range, &T)> - where - D: TextDimension, - F: FnMut(&T) -> K, - K: Ord, - { - self.min_or_max_by_key(snapshot, Ordering::Less, extract_key) - } - - pub fn max_by_key<'a, D, F, K>( - &self, - snapshot: &'a MultiBufferSnapshot, - extract_key: F, - ) -> Option<(Range, &T)> - where - D: TextDimension, - F: FnMut(&T) -> K, - K: Ord, - { - self.min_or_max_by_key(snapshot, Ordering::Greater, extract_key) - } - - fn min_or_max_by_key<'a, D, F, K>( - &self, - snapshot: &'a MultiBufferSnapshot, - target_ordering: Ordering, - mut extract_key: F, - ) -> Option<(Range, &T)> - where - D: TextDimension, - F: FnMut(&T) -> K, - K: Ord, - { - let mut cursor = snapshot.excerpts.cursor::(); - let mut max = None; - for (excerpt_id, text_map) in &self.child_maps { - cursor.seek(excerpt_id, Bias::Left, &()); - if let Some(excerpt) = cursor.item() { - if excerpt.id == *excerpt_id { - if let Some((range, value)) = - text_map.max_by_key(&excerpt.buffer, &mut extract_key) - { - if max.as_ref().map_or(true, |(_, max_value)| { - extract_key(value).cmp(&extract_key(*max_value)) == target_ordering - }) { - let mut excerpt_start = D::from_text_summary(&cursor.start().text); - excerpt_start.add_summary(&excerpt.header_summary(), &()); - let mut full_range = excerpt_start.clone()..excerpt_start.clone(); - full_range.start.add_assign(&range.start); - full_range.end.add_assign(&range.end); - max = Some((full_range, value)); - } - } - } - } - } - max - } -} - -impl AnchorRangeSet { - pub fn len(&self) -> usize { - self.0.len() - } - - pub fn ranges<'a, D>( - &'a self, - content: &'a MultiBufferSnapshot, - ) -> impl 'a + Iterator> - where - D: TextDimension, - { - self.0.ranges(content).map(|(range, _)| range) + snapshot.summary_for_anchor(self) } } diff --git a/crates/language/src/multi_buffer/location.rs b/crates/language/src/multi_buffer/location.rs deleted file mode 100644 index a61b2a76301dec9321eb771b3c427a1bc2a1db43..0000000000000000000000000000000000000000 --- a/crates/language/src/multi_buffer/location.rs +++ /dev/null @@ -1,76 +0,0 @@ -use smallvec::{smallvec, SmallVec}; -use std::iter; - -pub type ExcerptId = Location; - -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct Location(SmallVec<[u8; 4]>); - -impl Location { - pub fn min() -> Self { - Self(smallvec![u8::MIN]) - } - - pub fn max() -> Self { - Self(smallvec![u8::MAX]) - } - - pub fn between(lhs: &Self, rhs: &Self) -> Self { - let lhs = lhs.0.iter().copied().chain(iter::repeat(u8::MIN)); - let rhs = rhs.0.iter().copied().chain(iter::repeat(u8::MAX)); - let mut location = SmallVec::new(); - for (lhs, rhs) in lhs.zip(rhs) { - let mid = lhs + (rhs.saturating_sub(lhs)) / 2; - location.push(mid); - if mid > lhs { - break; - } - } - Self(location) - } -} - -impl Default for Location { - fn default() -> Self { - Self::min() - } -} - -#[cfg(test)] -mod tests { - use super::*; - use rand::prelude::*; - use std::mem; - - #[gpui::test(iterations = 100)] - fn test_location(mut rng: StdRng) { - let mut lhs = Default::default(); - let mut rhs = Default::default(); - while lhs == rhs { - lhs = Location( - (0..rng.gen_range(1..=5)) - .map(|_| rng.gen_range(0..=100)) - .collect(), - ); - rhs = Location( - (0..rng.gen_range(1..=5)) - .map(|_| rng.gen_range(0..=100)) - .collect(), - ); - } - - if lhs > rhs { - mem::swap(&mut lhs, &mut rhs); - } - - let middle = Location::between(&lhs, &rhs); - assert!(middle > lhs); - assert!(middle < rhs); - for ix in 0..middle.0.len() - 1 { - assert!( - middle.0[ix] == *lhs.0.get(ix).unwrap_or(&0) - || middle.0[ix] == *rhs.0.get(ix).unwrap_or(&0) - ); - } - } -} diff --git a/crates/language/src/multi_buffer/selection.rs b/crates/language/src/multi_buffer/selection.rs index 825b6a27b84629baf9af021829e4c45ea0107e40..3a4369b3da9527ffdaa00aa84606f2e1acd78c01 100644 --- a/crates/language/src/multi_buffer/selection.rs +++ b/crates/language/src/multi_buffer/selection.rs @@ -1,13 +1,16 @@ -use super::{anchor::AnchorRangeMap, MultiBufferSnapshot, ToOffset}; -use std::{ops::Range, sync::Arc}; +use super::{Anchor, MultiBufferSnapshot, ToOffset}; +use std::{ + ops::{Range, Sub}, + sync::Arc, +}; use sum_tree::Bias; -use text::{rope::TextDimension, Selection, SelectionSetId, SelectionState}; +use text::{rope::TextDimension, Selection, SelectionSetId}; #[derive(Clone, Debug, Eq, PartialEq)] pub struct SelectionSet { pub id: SelectionSetId, pub active: bool, - pub selections: Arc>, + pub selections: Arc<[Selection]>, } impl SelectionSet { @@ -17,75 +20,102 @@ impl SelectionSet { pub fn selections<'a, D>( &'a self, - content: &'a MultiBufferSnapshot, + snapshot: &'a MultiBufferSnapshot, ) -> impl 'a + Iterator> where - D: TextDimension, + D: TextDimension + Ord + Sub, { - self.selections - .ranges(content) - .map(|(range, state)| Selection { - id: state.id, - start: range.start, - end: range.end, - reversed: state.reversed, - goal: state.goal, - }) + resolve_selections(&self.selections, snapshot) } pub fn intersecting_selections<'a, D, I>( &'a self, range: Range<(I, Bias)>, - content: &'a MultiBufferSnapshot, + snapshot: &'a MultiBufferSnapshot, ) -> impl 'a + Iterator> where - D: TextDimension, + D: TextDimension + Ord + Sub, I: 'a + ToOffset, { - self.selections - .intersecting_ranges(range, content) - .map(|(range, state)| Selection { - id: state.id, - start: range.start, - end: range.end, - reversed: state.reversed, - goal: state.goal, - }) + let start = snapshot.anchor_at(range.start.0, range.start.1); + let end = snapshot.anchor_at(range.end.0, range.end.1); + let start_ix = match self + .selections + .binary_search_by(|probe| probe.end.cmp(&start, snapshot).unwrap()) + { + Ok(ix) | Err(ix) => ix, + }; + let end_ix = match self + .selections + .binary_search_by(|probe| probe.start.cmp(&end, snapshot).unwrap()) + { + Ok(ix) | Err(ix) => ix, + }; + resolve_selections(&self.selections[start_ix..end_ix], snapshot) } pub fn oldest_selection<'a, D>( &'a self, - content: &'a MultiBufferSnapshot, + snapshot: &'a MultiBufferSnapshot, ) -> Option> where - D: TextDimension, + D: TextDimension + Ord + Sub, { self.selections - .min_by_key(content, |selection| selection.id) - .map(|(range, state)| Selection { - id: state.id, - start: range.start, - end: range.end, - reversed: state.reversed, - goal: state.goal, - }) + .iter() + .min_by_key(|selection| selection.id) + .map(|selection| resolve_selection(selection, snapshot)) } pub fn newest_selection<'a, D>( &'a self, - content: &'a MultiBufferSnapshot, + snapshot: &'a MultiBufferSnapshot, ) -> Option> where - D: TextDimension, + D: TextDimension + Ord + Sub, { self.selections - .max_by_key(content, |selection| selection.id) - .map(|(range, state)| Selection { - id: state.id, - start: range.start, - end: range.end, - reversed: state.reversed, - goal: state.goal, - }) + .iter() + .max_by_key(|selection| selection.id) + .map(|selection| resolve_selection(selection, snapshot)) } } + +fn resolve_selection<'a, D>( + selection: &'a Selection, + snapshot: &'a MultiBufferSnapshot, +) -> Selection +where + D: TextDimension + Ord + Sub, +{ + Selection { + id: selection.id, + start: selection.start.summary::(snapshot), + end: selection.end.summary::(snapshot), + reversed: selection.reversed, + goal: selection.goal, + } +} + +fn resolve_selections<'a, D>( + selections: &'a [Selection], + snapshot: &'a MultiBufferSnapshot, +) -> impl 'a + Iterator> +where + D: TextDimension + Ord + Sub, +{ + let mut summaries = snapshot + .summaries_for_anchors::( + selections + .iter() + .flat_map(|selection| [&selection.start, &selection.end]), + ) + .into_iter(); + selections.iter().map(move |selection| Selection { + id: selection.id, + start: summaries.next().unwrap(), + end: summaries.next().unwrap(), + reversed: selection.reversed, + goal: selection.goal, + }) +} diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 6a27d41ca200c858d8ea6568006ac6f06f491b0f..cb271e6937516b35139d5285ec5b94ffbd6d6c0a 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -1,5 +1,5 @@ mod anchor; -mod locator; +pub mod locator; pub mod operation_queue; mod patch; mod point; From da09247e5e8bd0f503bd877b83403fba1b0e04b2 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 10 Dec 2021 16:14:12 +0100 Subject: [PATCH 042/196] WIP Co-Authored-By: Nathan Sobo --- crates/editor/src/editor.rs | 11 +++++------ crates/language/src/multi_buffer.rs | 11 +++++++++-- 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index e14991614a77d215b0a78f671df6f717f91690b6..56948bfd6e7c8abdd3065934633ef88194de0ffe 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -22,8 +22,7 @@ use gpui::{ use items::BufferItemHandle; use language::{ multi_buffer::{ - Anchor, AnchorRangeExt, AnchorRangeSet, MultiBuffer, MultiBufferSnapshot, SelectionSet, - ToOffset, ToPoint, + Anchor, AnchorRangeExt, MultiBuffer, MultiBufferSnapshot, SelectionSet, ToOffset, ToPoint, }, BracketPair, Buffer, Diagnostic, DiagnosticSeverity, Language, Point, Selection, SelectionGoal, SelectionSetId, @@ -1292,19 +1291,19 @@ impl Editor { } fn autoclose_pairs(&mut self, cx: &mut ViewContext) { - let selections = self.selections::(cx).collect::>(); + let selections = self.selections::(cx); let new_autoclose_pair = self.buffer.update(cx, |buffer, cx| { let autoclose_pair = buffer.language().and_then(|language| { let first_selection_start = selections.first().unwrap().start; let pair = language.brackets().iter().find(|pair| { - buffer_snapshot.contains_str_at( + buffer.contains_str_at( first_selection_start.saturating_sub(pair.start.len()), &pair.start, ) }); pair.and_then(|pair| { let should_autoclose = selections[1..].iter().all(|selection| { - buffer_snapshot.contains_str_at( + buffer.contains_str_at( selection.start.saturating_sub(pair.start.len()), &pair.start, ) @@ -1322,7 +1321,7 @@ impl Editor { let selection_ranges = selections .iter() .map(|selection| { - let start = selection.start.to_offset(&buffer_snapshot); + let start = selection.start.to_offset(buffer); start..start }) .collect::>(); diff --git a/crates/language/src/multi_buffer.rs b/crates/language/src/multi_buffer.rs index 081f4ff8e172ac03e422461b9e04d9736f346f5a..8f8274a744d0b50ec9521f1d4f2b86f50a0435d0 100644 --- a/crates/language/src/multi_buffer.rs +++ b/crates/language/src/multi_buffer.rs @@ -3,7 +3,7 @@ mod selection; use crate::{ buffer::{self, Buffer, Chunk, ToOffset as _, ToPoint as _}, - BufferSnapshot, Diagnostic, File, Language, + BufferSnapshot, Diagnostic, DiagnosticEntry, File, Language, }; pub use anchor::{Anchor, AnchorRangeExt}; use anyhow::Result; @@ -383,6 +383,13 @@ impl MultiBuffer { [].into_iter() } + pub fn contains_str_at(&self, _: T, _: &str) -> bool + where + T: ToOffset, + { + todo!() + } + pub fn max_point(&self) -> Point { self.snapshot.lock().max_point() } @@ -438,7 +445,7 @@ impl MultiBuffer { pub fn diagnostics_in_range<'a, T, O>( &'a self, search_range: Range, - ) -> impl Iterator, &Diagnostic)> + 'a + ) -> impl Iterator> + 'a where T: 'a + ToOffset, O: 'a, From 7524974f1976d98ebe85215951e100e1724dd822 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 10 Dec 2021 17:15:16 +0100 Subject: [PATCH 043/196] Get everything compiling again Co-Authored-By: Nathan Sobo --- crates/editor/src/display_map.rs | 32 ++-- crates/editor/src/display_map/block_map.rs | 141 ++++++++---------- crates/editor/src/display_map/fold_map.rs | 14 +- crates/editor/src/display_map/tab_map.rs | 8 +- crates/editor/src/display_map/wrap_map.rs | 11 +- crates/editor/src/editor.rs | 164 ++++++++++----------- crates/editor/src/items.rs | 5 +- crates/go_to_line/src/go_to_line.rs | 4 +- crates/language/src/multi_buffer.rs | 112 ++------------ crates/language/src/multi_buffer/anchor.rs | 5 + crates/server/src/rpc.rs | 8 +- 11 files changed, 207 insertions(+), 297 deletions(-) diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index e99df6b0a2e0ef2da2b11298383b169c4488d2d3..41683863e61d02040e822417bf11510dc4498d2b 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -74,7 +74,7 @@ impl DisplayMap { let (wraps_snapshot, edits) = self .wrap_map .update(cx, |map, cx| map.sync(tabs_snapshot.clone(), edits, cx)); - let blocks_snapshot = self.block_map.read(wraps_snapshot.clone(), edits, cx); + let blocks_snapshot = self.block_map.read(wraps_snapshot.clone(), edits); DisplaySnapshot { buffer_snapshot: self.buffer.read(cx).snapshot(cx), @@ -97,13 +97,13 @@ impl DisplayMap { let (snapshot, edits) = self .wrap_map .update(cx, |map, cx| map.sync(snapshot, edits, cx)); - self.block_map.read(snapshot, edits, cx); + self.block_map.read(snapshot, edits); let (snapshot, edits) = fold_map.fold(ranges); let (snapshot, edits) = self.tab_map.sync(snapshot, edits); let (snapshot, edits) = self .wrap_map .update(cx, |map, cx| map.sync(snapshot, edits, cx)); - self.block_map.read(snapshot, edits, cx); + self.block_map.read(snapshot, edits); } pub fn unfold( @@ -118,13 +118,13 @@ impl DisplayMap { let (snapshot, edits) = self .wrap_map .update(cx, |map, cx| map.sync(snapshot, edits, cx)); - self.block_map.read(snapshot, edits, cx); + self.block_map.read(snapshot, edits); let (snapshot, edits) = fold_map.unfold(ranges); let (snapshot, edits) = self.tab_map.sync(snapshot, edits); let (snapshot, edits) = self .wrap_map .update(cx, |map, cx| map.sync(snapshot, edits, cx)); - self.block_map.read(snapshot, edits, cx); + self.block_map.read(snapshot, edits); } pub fn insert_blocks

( @@ -142,8 +142,8 @@ impl DisplayMap { let (snapshot, edits) = self .wrap_map .update(cx, |map, cx| map.sync(snapshot, edits, cx)); - let mut block_map = self.block_map.write(snapshot, edits, cx); - block_map.insert(blocks, cx) + let mut block_map = self.block_map.write(snapshot, edits); + block_map.insert(blocks) } pub fn replace_blocks(&mut self, styles: HashMap) @@ -161,8 +161,8 @@ impl DisplayMap { let (snapshot, edits) = self .wrap_map .update(cx, |map, cx| map.sync(snapshot, edits, cx)); - let mut block_map = self.block_map.write(snapshot, edits, cx); - block_map.remove(ids, cx); + let mut block_map = self.block_map.write(snapshot, edits); + block_map.remove(ids); } pub fn set_font(&self, font_id: FontId, font_size: f32, cx: &mut ModelContext) { @@ -518,7 +518,8 @@ mod tests { 20..=80 => { let mut ranges = Vec::new(); for _ in 0..rng.gen_range(1..=3) { - buffer.read_with(&cx, |buffer, _| { + buffer.read_with(&cx, |buffer, cx| { + let buffer = buffer.read(cx); let end = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Right); let start = buffer.clip_offset(rng.gen_range(0..=end), Left); ranges.push(start..end); @@ -548,7 +549,7 @@ mod tests { let snapshot = map.update(&mut cx, |map, cx| map.snapshot(cx)); fold_count = snapshot.fold_count(); - log::info!("buffer text: {:?}", buffer.read_with(&cx, |b, _| b.text())); + log::info!("buffer text: {:?}", snapshot.buffer_snapshot.text()); log::info!("display text: {:?}", snapshot.text()); // Line boundaries @@ -566,12 +567,9 @@ mod tests { assert_eq!(prev_display_bound.column(), 0); if next_display_bound < snapshot.max_point() { assert_eq!( - buffer.read_with(&cx, |buffer, _| buffer - .as_snapshot() - .chars_at(next_buffer_bound) - .next()), + snapshot.buffer_snapshot.chars_at(next_buffer_bound).next(), Some('\n') - ) + ); } assert_eq!( @@ -705,8 +703,8 @@ mod tests { (DisplayPoint::new(2, 4), SelectionGoal::Column(10)) ); + let ix = snapshot.buffer_snapshot.text().find("seven").unwrap(); buffer.update(cx, |buffer, cx| { - let ix = buffer.text().find("seven").unwrap(); buffer.edit(vec![ix..ix], "and ", cx); }); diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 00c70c361f67ec7f1b41313c42eededa528e0f88..1b6c8a162ef15700bdcb12b5fa103cf5fbc867a9 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -125,13 +125,8 @@ impl BlockMap { } } - pub fn read( - &self, - wrap_snapshot: WrapSnapshot, - edits: Vec, - cx: &AppContext, - ) -> BlockSnapshot { - self.sync(&wrap_snapshot, edits, cx); + pub fn read(&self, wrap_snapshot: WrapSnapshot, edits: Vec) -> BlockSnapshot { + self.sync(&wrap_snapshot, edits); *self.wrap_snapshot.lock() = wrap_snapshot.clone(); BlockSnapshot { wrap_snapshot, @@ -139,24 +134,18 @@ impl BlockMap { } } - pub fn write( - &mut self, - wrap_snapshot: WrapSnapshot, - edits: Vec, - cx: &AppContext, - ) -> BlockMapWriter { - self.sync(&wrap_snapshot, edits, cx); + pub fn write(&mut self, wrap_snapshot: WrapSnapshot, edits: Vec) -> BlockMapWriter { + self.sync(&wrap_snapshot, edits); *self.wrap_snapshot.lock() = wrap_snapshot; BlockMapWriter(self) } - fn sync(&self, wrap_snapshot: &WrapSnapshot, edits: Vec, cx: &AppContext) { + fn sync(&self, wrap_snapshot: &WrapSnapshot, edits: Vec) { if edits.is_empty() { return; } - let buffer = self.buffer.read(cx); - let buffer = buffer.as_snapshot(); + let buffer = wrap_snapshot.buffer_snapshot(); let mut transforms = self.transforms.lock(); let mut new_transforms = SumTree::new(); let old_row_count = transforms.summary().input_rows; @@ -378,16 +367,14 @@ impl<'a> BlockMapWriter<'a> { pub fn insert

( &mut self, blocks: impl IntoIterator>, - cx: &AppContext, ) -> Vec where P: ToOffset + Clone, { - let buffer = self.0.buffer.read(cx); - let buffer = buffer.as_snapshot(); let mut ids = Vec::new(); let mut edits = Vec::>::new(); let wrap_snapshot = &*self.0.wrap_snapshot.lock(); + let buffer = wrap_snapshot.buffer_snapshot(); for block in blocks { let id = BlockId(self.0.next_block_id.fetch_add(1, SeqCst)); @@ -435,14 +422,13 @@ impl<'a> BlockMapWriter<'a> { } } - self.0.sync(wrap_snapshot, edits, cx); + self.0.sync(wrap_snapshot, edits); ids } - pub fn remove(&mut self, block_ids: HashSet, cx: &AppContext) { - let buffer = self.0.buffer.read(cx); - let buffer = buffer.as_snapshot(); + pub fn remove(&mut self, block_ids: HashSet) { let wrap_snapshot = &*self.0.wrap_snapshot.lock(); + let buffer = wrap_snapshot.buffer_snapshot(); let mut edits = Vec::new(); let mut last_block_buffer_row = None; self.0.blocks.retain(|block| { @@ -470,7 +456,7 @@ impl<'a> BlockMapWriter<'a> { true } }); - self.0.sync(wrap_snapshot, edits, cx); + self.0.sync(wrap_snapshot, edits); } } @@ -918,32 +904,29 @@ mod tests { let (wrap_map, wraps_snapshot) = WrapMap::new(tabs_snapshot, font_id, 14.0, None, cx); let mut block_map = BlockMap::new(buffer.clone(), wraps_snapshot.clone()); - let mut writer = block_map.write(wraps_snapshot.clone(), vec![], cx); - writer.insert( - vec![ - BlockProperties { - position: Point::new(1, 0), - height: 1, - disposition: BlockDisposition::Above, - render: Arc::new(|_| Empty::new().named("block 1")), - }, - BlockProperties { - position: Point::new(1, 2), - height: 2, - disposition: BlockDisposition::Above, - render: Arc::new(|_| Empty::new().named("block 2")), - }, - BlockProperties { - position: Point::new(3, 3), - height: 3, - disposition: BlockDisposition::Below, - render: Arc::new(|_| Empty::new().named("block 3")), - }, - ], - cx, - ); + let mut writer = block_map.write(wraps_snapshot.clone(), vec![]); + writer.insert(vec![ + BlockProperties { + position: Point::new(1, 0), + height: 1, + disposition: BlockDisposition::Above, + render: Arc::new(|_| Empty::new().named("block 1")), + }, + BlockProperties { + position: Point::new(1, 2), + height: 2, + disposition: BlockDisposition::Above, + render: Arc::new(|_| Empty::new().named("block 2")), + }, + BlockProperties { + position: Point::new(3, 3), + height: 3, + disposition: BlockDisposition::Below, + render: Arc::new(|_| Empty::new().named("block 3")), + }, + ]); - let mut snapshot = block_map.read(wraps_snapshot, vec![], cx); + let mut snapshot = block_map.read(wraps_snapshot, vec![]); assert_eq!(snapshot.text(), "aaa\n\n\n\nbbb\nccc\nddd\n\n\n"); let blocks = snapshot @@ -1068,7 +1051,7 @@ mod tests { let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| { wrap_map.sync(tabs_snapshot, tab_edits, cx) }); - let mut snapshot = block_map.read(wraps_snapshot, wrap_edits, cx); + let mut snapshot = block_map.read(wraps_snapshot, wrap_edits); assert_eq!(snapshot.text(), "aaa\n\nb!!!\n\n\nbb\nccc\nddd\n\n\n"); } @@ -1088,28 +1071,25 @@ mod tests { let (_, wraps_snapshot) = WrapMap::new(tabs_snapshot, font_id, 14.0, Some(60.), cx); let mut block_map = BlockMap::new(buffer.clone(), wraps_snapshot.clone()); - let mut writer = block_map.write(wraps_snapshot.clone(), vec![], cx); - writer.insert( - vec![ - BlockProperties { - position: Point::new(1, 12), - disposition: BlockDisposition::Above, - render: Arc::new(|_| Empty::new().named("block 1")), - height: 1, - }, - BlockProperties { - position: Point::new(1, 1), - disposition: BlockDisposition::Below, - render: Arc::new(|_| Empty::new().named("block 2")), - height: 1, - }, - ], - cx, - ); + let mut writer = block_map.write(wraps_snapshot.clone(), vec![]); + writer.insert(vec![ + BlockProperties { + position: Point::new(1, 12), + disposition: BlockDisposition::Above, + render: Arc::new(|_| Empty::new().named("block 1")), + height: 1, + }, + BlockProperties { + position: Point::new(1, 1), + disposition: BlockDisposition::Below, + render: Arc::new(|_| Empty::new().named("block 2")), + height: 1, + }, + ]); // Blocks with an 'above' disposition go above their corresponding buffer line. // Blocks with a 'below' disposition go below their corresponding buffer line. - let mut snapshot = block_map.read(wraps_snapshot, vec![], cx); + let mut snapshot = block_map.read(wraps_snapshot, vec![]); assert_eq!( snapshot.text(), "one two \nthree\n\nfour five \nsix\n\nseven \neight" @@ -1166,7 +1146,7 @@ mod tests { let block_count = rng.gen_range(1..=1); let block_properties = (0..block_count) .map(|_| { - let buffer = buffer.read(cx); + let buffer = buffer.read(cx).read(cx); let position = buffer.anchor_after( buffer.clip_offset(rng.gen_range(0..=buffer.len()), Bias::Left), ); @@ -1180,7 +1160,7 @@ mod tests { log::info!( "inserting block {:?} {:?} with height {}", disposition, - position.to_point(&buffer.as_snapshot()), + position.to_point(&buffer), height ); BlockProperties { @@ -1198,8 +1178,8 @@ mod tests { let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| { wrap_map.sync(tabs_snapshot, tab_edits, cx) }); - let mut block_map = block_map.write(wraps_snapshot, wrap_edits, cx); - let block_ids = block_map.insert(block_properties.clone(), cx); + let mut block_map = block_map.write(wraps_snapshot, wrap_edits); + let block_ids = block_map.insert(block_properties.clone()); for (block_id, props) in block_ids.into_iter().zip(block_properties) { expected_blocks.push((block_id, props)); } @@ -1220,17 +1200,17 @@ mod tests { let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| { wrap_map.sync(tabs_snapshot, tab_edits, cx) }); - let mut block_map = block_map.write(wraps_snapshot, wrap_edits, cx); - block_map.remove(block_ids_to_remove, cx); + let mut block_map = block_map.write(wraps_snapshot, wrap_edits); + block_map.remove(block_ids_to_remove); } _ => { buffer.update(cx, |buffer, cx| { let edit_count = rng.gen_range(1..=5); let subscription = buffer.subscribe(); buffer.randomly_edit(&mut rng, edit_count, cx); - log::info!("buffer text: {:?}", buffer.text()); buffer_edits.extend(subscription.consume()); buffer_snapshot = buffer.snapshot(cx); + log::info!("buffer text: {:?}", buffer_snapshot.text()); }); } } @@ -1240,26 +1220,25 @@ mod tests { let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| { wrap_map.sync(tabs_snapshot, tab_edits, cx) }); - let mut blocks_snapshot = block_map.read(wraps_snapshot.clone(), wrap_edits, cx); + let mut blocks_snapshot = block_map.read(wraps_snapshot.clone(), wrap_edits); assert_eq!( blocks_snapshot.transforms.summary().input_rows, wraps_snapshot.max_point().row() + 1 ); log::info!("blocks text: {:?}", blocks_snapshot.text()); - let buffer = buffer.read(cx); let mut sorted_blocks = expected_blocks .iter() .cloned() .map(|(id, block)| { - let mut position = block.position.to_point(&buffer.as_snapshot()); + let mut position = block.position.to_point(&buffer_snapshot); let column = wraps_snapshot.from_point(position, Bias::Left).column(); match block.disposition { BlockDisposition::Above => { position.column = 0; } BlockDisposition::Below => { - position.column = buffer.line_len(position.row); + position.column = buffer_snapshot.line_len(position.row); } }; let row = wraps_snapshot.from_point(position, Bias::Left).row(); diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index 1b9402bcc5664b8558f59495c464c0d03359babc..1290fe33fa2e31a377bee7ebf38a568206738216 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -482,6 +482,10 @@ pub struct FoldSnapshot { } impl FoldSnapshot { + pub fn buffer_snapshot(&self) -> &MultiBufferSnapshot { + &self.buffer_snapshot + } + #[cfg(test)] pub fn text(&self) -> String { self.chunks(FoldOffset(0)..self.len(), None) @@ -1232,9 +1236,7 @@ mod tests { let (snapshot, _) = map.read(buffer_snapshot.clone(), vec![]); let fold_ranges = snapshot .folds_in_range(Point::new(1, 0)..Point::new(1, 3)) - .map(|fold| { - fold.start.to_point(&buffer.as_snapshot())..fold.end.to_point(&buffer.as_snapshot()) - }) + .map(|fold| fold.start.to_point(&buffer_snapshot)..fold.end.to_point(&buffer_snapshot)) .collect::>(); assert_eq!( fold_ranges, @@ -1254,14 +1256,14 @@ mod tests { let len = rng.gen_range(0..10); let text = RandomCharIter::new(&mut rng).take(len).collect::(); let buffer = MultiBuffer::build_simple(&text, cx); - let buffer_snapshot = buffer.read(cx).snapshot(cx); + let mut buffer_snapshot = buffer.read(cx).snapshot(cx); let mut map = FoldMap::new(buffer_snapshot.clone()).0; let (mut initial_snapshot, _) = map.read(buffer_snapshot.clone(), vec![]); let mut snapshot_edits = Vec::new(); for _ in 0..operations { - log::info!("text: {:?}", buffer.read(cx).text()); + log::info!("text: {:?}", buffer_snapshot.text()); let mut buffer_edits = Vec::new(); match rng.gen_range(0..=100) { 0..=59 => { @@ -1276,7 +1278,7 @@ mod tests { buffer_edits.extend(edits); }), }; - let buffer_snapshot = buffer.read(cx).snapshot(cx); + buffer_snapshot = buffer.read(cx).snapshot(cx); let (snapshot, edits) = map.read(buffer_snapshot.clone(), buffer_edits); snapshot_edits.push((snapshot.clone(), edits)); diff --git a/crates/editor/src/display_map/tab_map.rs b/crates/editor/src/display_map/tab_map.rs index 52b4c3e6a2ca127c9c8cabc9147ae38ec6bd85fe..2786c594b6a677f2f8ef60a11d0bfe1511772216 100644 --- a/crates/editor/src/display_map/tab_map.rs +++ b/crates/editor/src/display_map/tab_map.rs @@ -1,5 +1,5 @@ use super::fold_map::{self, FoldEdit, FoldPoint, FoldSnapshot, ToFoldPoint}; -use language::{rope, Chunk}; +use language::{multi_buffer::MultiBufferSnapshot, rope, Chunk}; use parking_lot::Mutex; use std::{cmp, mem, ops::Range}; use sum_tree::Bias; @@ -87,6 +87,10 @@ pub struct TabSnapshot { } impl TabSnapshot { + pub fn buffer_snapshot(&self) -> &MultiBufferSnapshot { + self.fold_snapshot.buffer_snapshot() + } + pub fn text_summary(&self) -> TextSummary { self.text_summary_for_range(TabPoint::zero()..self.max_point()) } @@ -453,7 +457,7 @@ mod tests { let text = RandomCharIter::new(&mut rng).take(len).collect::(); let buffer = MultiBuffer::build_simple(&text, cx); let buffer_snapshot = buffer.read(cx).snapshot(cx); - log::info!("Buffer text: {:?}", buffer.read(cx).text()); + log::info!("Buffer text: {:?}", buffer_snapshot.text()); let (mut fold_map, _) = FoldMap::new(buffer_snapshot.clone()); fold_map.randomly_mutate(&mut rng); diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index c346ab03c37647c2a2892fdb80eb172112519058..d5c11f61dc9c0af85f9a7d7389c2300450c0536e 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -6,7 +6,7 @@ use gpui::{ fonts::FontId, text_layout::LineWrapper, Entity, ModelContext, ModelHandle, MutableAppContext, Task, }; -use language::{Chunk, Point}; +use language::{multi_buffer::MultiBufferSnapshot, Chunk, Point}; use lazy_static::lazy_static; use smol::future::yield_now; use std::{collections::VecDeque, mem, ops::Range, time::Duration}; @@ -305,6 +305,10 @@ impl WrapSnapshot { } } + pub fn buffer_snapshot(&self) -> &MultiBufferSnapshot { + self.tab_snapshot.buffer_snapshot() + } + fn interpolate(&mut self, new_tab_snapshot: TabSnapshot, tab_edits: &[TabEdit]) -> Patch { let mut new_transforms; if tab_edits.is_empty() { @@ -1012,10 +1016,7 @@ mod tests { let buffer_snapshot = buffer.read_with(&cx, |buffer, cx| buffer.snapshot(cx)); let (mut fold_map, folds_snapshot) = FoldMap::new(buffer_snapshot.clone()); let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), tab_size); - log::info!( - "Unwrapped text (no folds): {:?}", - buffer.read_with(&cx, |buf, _| buf.text()) - ); + log::info!("Unwrapped text (no folds): {:?}", buffer_snapshot.text()); log::info!( "Unwrapped text (unexpanded tabs): {:?}", folds_snapshot.text() diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 56948bfd6e7c8abdd3065934633ef88194de0ffe..f9e6951f8a67f24ed7b4ec1e835c04a6b53f9ab7 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -559,7 +559,7 @@ impl Editor { } pub fn language<'a>(&self, cx: &'a AppContext) -> Option<&'a Arc> { - self.buffer.read(cx).language() + self.buffer.read(cx).read(cx).language() } pub fn set_placeholder_text( @@ -575,9 +575,8 @@ impl Editor { let map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let scroll_top_buffer_offset = DisplayPoint::new(scroll_position.y() as u32, 0).to_offset(&map, Bias::Right); - self.scroll_top_anchor = self - .buffer - .read(cx) + self.scroll_top_anchor = map + .buffer_snapshot .anchor_at(scroll_top_buffer_offset, Bias::Right); self.scroll_position = vec2f( scroll_position.x(), @@ -771,9 +770,8 @@ impl Editor { let tail = self.newest_selection::(cx).tail(); self.begin_selection(position, false, click_count, cx); - let buffer = self.buffer.read(cx); let position = position.to_offset(&display_map, Bias::Left); - let tail_anchor = buffer.anchor_before(tail); + let tail_anchor = display_map.buffer_snapshot.anchor_before(tail); let pending = self.pending_selection.as_mut().unwrap(); if position >= tail { @@ -804,7 +802,7 @@ impl Editor { } let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let buffer = self.buffer.read(cx); + let buffer = &display_map.buffer_snapshot; let start; let end; let mode; @@ -874,10 +872,8 @@ impl Editor { } let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let buffer = self.buffer.read(cx); - let tail = self.newest_selection::(cx).tail(); - self.columnar_selection_tail = Some(buffer.anchor_before(tail)); + self.columnar_selection_tail = Some(display_map.buffer_snapshot.anchor_before(tail)); self.select_columns( tail.to_display_point(&display_map), @@ -1293,17 +1289,18 @@ impl Editor { fn autoclose_pairs(&mut self, cx: &mut ViewContext) { let selections = self.selections::(cx); let new_autoclose_pair = self.buffer.update(cx, |buffer, cx| { - let autoclose_pair = buffer.language().and_then(|language| { + let snapshot = buffer.snapshot(cx); + let autoclose_pair = snapshot.language().and_then(|language| { let first_selection_start = selections.first().unwrap().start; let pair = language.brackets().iter().find(|pair| { - buffer.contains_str_at( + snapshot.contains_str_at( first_selection_start.saturating_sub(pair.start.len()), &pair.start, ) }); pair.and_then(|pair| { let should_autoclose = selections[1..].iter().all(|selection| { - buffer.contains_str_at( + snapshot.contains_str_at( selection.start.saturating_sub(pair.start.len()), &pair.start, ) @@ -1321,12 +1318,13 @@ impl Editor { let selection_ranges = selections .iter() .map(|selection| { - let start = selection.start.to_offset(buffer); + let start = selection.start.to_offset(&snapshot); start..start }) .collect::>(); buffer.edit(selection_ranges, &pair.end, cx); + let snapshot = buffer.snapshot(cx); if pair.end.len() == 1 { let mut delta = 0; @@ -1336,7 +1334,7 @@ impl Editor { .map(move |selection| { let offset = selection.start + delta; delta += 1; - buffer.anchor_before(offset)..buffer.anchor_after(offset) + snapshot.anchor_before(offset)..snapshot.anchor_after(offset) }) .collect(), pair, @@ -1350,7 +1348,7 @@ impl Editor { } fn skip_autoclose_end(&mut self, text: &str, cx: &mut ViewContext) -> bool { - let old_selections = self.selections::(cx).collect::>(); + let old_selections = self.selections::(cx); let autoclose_pair = if let Some(autoclose_pair) = self.autoclose_stack.last() { autoclose_pair } else { @@ -1446,7 +1444,7 @@ impl Editor { for selection in &mut selections { if selection.is_empty() { let char_column = buffer - .as_snapshot() + .read(cx) .text_for_range(Point::new(selection.start.row, 0)..selection.start) .flat_map(str::chars) .count(); @@ -1482,7 +1480,7 @@ impl Editor { } for row in start_row..end_row { - let indent_column = buffer.indent_column_for_line(row) as usize; + let indent_column = buffer.read(cx).indent_column_for_line(row) as usize; let columns_to_next_tab_stop = tab_size - (indent_column % tab_size); let row_start = Point::new(row, 0); buffer.edit( @@ -1515,7 +1513,8 @@ impl Editor { let selections = self.selections::(cx); let mut deletion_ranges = Vec::new(); let mut last_outdent = None; - self.buffer.update(cx, |buffer, cx| { + { + let buffer = self.buffer.read(cx).read(cx); for selection in &selections { let mut start_row = selection.start.row; let mut end_row = selection.end.row + 1; @@ -1546,6 +1545,8 @@ impl Editor { } } } + } + self.buffer.update(cx, |buffer, cx| { buffer.edit(deletion_ranges, "", cx); }); @@ -1627,7 +1628,7 @@ impl Editor { let mut selections = self.selections::(cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let buffer = self.buffer.read(cx); + let buffer = &display_map.buffer_snapshot; let mut edits = Vec::new(); let mut selections_iter = selections.iter().peekable(); @@ -1863,7 +1864,7 @@ impl Editor { let mut selections = self.selections::(cx); let mut clipboard_selections = Vec::with_capacity(selections.len()); { - let buffer = self.buffer.read(cx); + let buffer = self.buffer.read(cx).read(cx); let max_point = buffer.max_point(); for selection in &mut selections { let is_entire_line = selection.is_empty(); @@ -1892,27 +1893,29 @@ impl Editor { pub fn copy(&mut self, _: &Copy, cx: &mut ViewContext) { let selections = self.selections::(cx); - let buffer = self.buffer.read(cx); - let max_point = buffer.max_point(); let mut text = String::new(); let mut clipboard_selections = Vec::with_capacity(selections.len()); - for selection in selections.iter() { - let mut start = selection.start; - let mut end = selection.end; - let is_entire_line = selection.is_empty(); - if is_entire_line { - start = Point::new(start.row, 0); - end = cmp::min(max_point, Point::new(start.row + 1, 0)); - } - let mut len = 0; - for chunk in buffer.text_for_range(start..end) { - text.push_str(chunk); - len += chunk.len(); + { + let buffer = self.buffer.read(cx).read(cx); + let max_point = buffer.max_point(); + for selection in selections.iter() { + let mut start = selection.start; + let mut end = selection.end; + let is_entire_line = selection.is_empty(); + if is_entire_line { + start = Point::new(start.row, 0); + end = cmp::min(max_point, Point::new(start.row + 1, 0)); + } + let mut len = 0; + for chunk in buffer.text_for_range(start..end) { + text.push_str(chunk); + len += chunk.len(); + } + clipboard_selections.push(ClipboardSelection { + len, + is_entire_line, + }); } - clipboard_selections.push(ClipboardSelection { - len, - is_entire_line, - }); } cx.as_mut() @@ -1954,8 +1957,7 @@ impl Editor { // selection was copied. If this selection is also currently empty, // then paste the line before the current line of the buffer. let range = if selection.is_empty() && entire_line { - let column = - selection.start.to_point(&*buffer.as_snapshot()).column as usize; + let column = selection.start.to_point(&buffer.read(cx)).column as usize; let line_start = selection.start - column; line_start..line_start } else { @@ -2352,8 +2354,7 @@ impl Editor { } pub fn move_to_end(&mut self, _: &MoveToEnd, cx: &mut ViewContext) { - let buffer = self.buffer.read(cx); - let cursor = buffer.len(); + let cursor = self.buffer.read(cx).read(cx).len(); let selection = Selection { id: post_inc(&mut self.next_selection_id), start: cursor, @@ -2365,8 +2366,8 @@ impl Editor { } pub fn select_to_end(&mut self, _: &SelectToEnd, cx: &mut ViewContext) { - let mut selection = self.selections::(cx).last().unwrap().clone(); - selection.set_head(self.buffer.read(cx).len()); + let mut selection = self.selections::(cx).first().unwrap().clone(); + selection.set_head(self.buffer.read(cx).read(cx).len()); self.update_selections(vec![selection], Some(Autoscroll::Fit), cx); } @@ -2374,7 +2375,7 @@ impl Editor { let selection = Selection { id: post_inc(&mut self.next_selection_id), start: 0, - end: self.buffer.read(cx).len(), + end: self.buffer.read(cx).read(cx).len(), reversed: false, goal: SelectionGoal::None, }; @@ -2384,8 +2385,7 @@ impl Editor { pub fn select_line(&mut self, _: &SelectLine, cx: &mut ViewContext) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let mut selections = self.selections::(cx); - let buffer = self.buffer.read(cx); - let max_point = buffer.max_point(); + let max_point = display_map.buffer_snapshot.max_point(); for selection in &mut selections { let rows = selection.spanned_rows(true, &display_map).buffer_rows; selection.start = Point::new(rows.start, 0); @@ -2400,30 +2400,30 @@ impl Editor { _: &SplitSelectionIntoLines, cx: &mut ViewContext, ) { - let selections = self.selections::(cx); - let buffer = self.buffer.read(cx); - let mut to_unfold = Vec::new(); let mut new_selections = Vec::new(); - for selection in selections.iter() { - for row in selection.start.row..selection.end.row { - let cursor = Point::new(row, buffer.line_len(row)); + { + let buffer = self.buffer.read(cx).read(cx); + for selection in self.selections::(cx) { + for row in selection.start.row..selection.end.row { + let cursor = Point::new(row, buffer.line_len(row)); + new_selections.push(Selection { + id: post_inc(&mut self.next_selection_id), + start: cursor, + end: cursor, + reversed: false, + goal: SelectionGoal::None, + }); + } new_selections.push(Selection { - id: post_inc(&mut self.next_selection_id), - start: cursor, - end: cursor, + id: selection.id, + start: selection.end, + end: selection.end, reversed: false, goal: SelectionGoal::None, }); + to_unfold.push(selection.start..selection.end); } - new_selections.push(Selection { - id: selection.id, - start: selection.end, - end: selection.end, - reversed: false, - goal: SelectionGoal::None, - }); - to_unfold.push(selection.start..selection.end); } self.unfold_ranges(to_unfold, cx); self.update_selections(new_selections, Some(Autoscroll::Fit), cx); @@ -2646,9 +2646,9 @@ impl Editor { let mut edit_ranges = Vec::new(); let mut last_toggled_row = None; self.buffer.update(cx, |buffer, cx| { - let buffer_snapshot = buffer.snapshot(cx); for selection in &mut selections { edit_ranges.clear(); + let snapshot = buffer.snapshot(cx); let end_row = if selection.end.row > selection.start.row && selection.end.column == 0 { @@ -2666,13 +2666,13 @@ impl Editor { last_toggled_row = Some(row); } - if buffer_snapshot.is_line_blank(row) { + if snapshot.is_line_blank(row) { continue; } - let start = Point::new(row, buffer_snapshot.indent_column_for_line(row)); - let mut line_bytes = buffer_snapshot - .bytes_in_range(start..buffer.max_point()) + let start = Point::new(row, snapshot.indent_column_for_line(row)); + let mut line_bytes = snapshot + .bytes_in_range(start..snapshot.max_point()) .flatten() .copied(); @@ -3037,7 +3037,7 @@ impl Editor { buffer .selection_set(set_id) .unwrap() - .intersecting_selections::(range, &buffer.as_snapshot()) + .intersecting_selections::(range, &buffer.read(cx)) .map(move |s| Selection { id: s.id, start: s.start.to_display_point(&display_map), @@ -3087,7 +3087,7 @@ impl Editor { &self, cx: &AppContext, ) -> Option> { - let buffer = self.buffer.read(cx).as_snapshot(); + let buffer = self.buffer.read(cx).read(cx); self.pending_selection.as_ref().map(|pending| Selection { id: pending.selection.id, start: pending.selection.start.summary::(&buffer), @@ -3123,7 +3123,7 @@ impl Editor { self.pending_selection(cx) .or_else(|| { self.selection_set(cx) - .newest_selection(&self.buffer.read(cx).as_snapshot()) + .newest_selection(&self.buffer.read(cx).read(cx)) }) .unwrap() } @@ -3169,7 +3169,7 @@ impl Editor { if selections.len() == autoclose_pair.ranges.len() { selections .iter() - .zip(autoclose_pair.ranges.iter().map(|r| r.to_point(buffer))) + .zip(autoclose_pair.ranges.iter().map(|r| r.to_point(&buffer))) .all(|(selection, autoclose_range)| { let head = selection.head().to_point(&buffer); autoclose_range.start <= head && autoclose_range.end >= head @@ -3253,7 +3253,7 @@ impl Editor { pub fn unfold(&mut self, _: &Unfold, cx: &mut ViewContext) { let selections = self.selections::(cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let buffer = self.buffer.read(cx); + let buffer = &display_map.buffer_snapshot; let ranges = selections .iter() .map(|s| { @@ -3351,7 +3351,7 @@ impl Editor { } pub fn text(&self, cx: &AppContext) -> String { - self.buffer.read(cx).text() + self.buffer.read(cx).read(cx).text() } pub fn display_text(&self, cx: &mut MutableAppContext) -> String { @@ -3938,7 +3938,7 @@ mod tests { ); view.unfold(&Unfold, cx); - assert_eq!(view.display_text(cx), buffer.read(cx).text()); + assert_eq!(view.display_text(cx), buffer.read(cx).read(cx).text()); }); } @@ -4465,7 +4465,7 @@ mod tests { view.delete_to_previous_word_boundary(&DeleteToPreviousWordBoundary, cx); }); - assert_eq!(buffer.read(cx).text(), "e two te four"); + assert_eq!(buffer.read(cx).read(cx).text(), "e two te four"); view.update(cx, |view, cx| { view.select_display_ranges( @@ -4481,7 +4481,7 @@ mod tests { view.delete_to_next_word_boundary(&DeleteToNextWordBoundary, cx); }); - assert_eq!(buffer.read(cx).text(), "e t te our"); + assert_eq!(buffer.read(cx).read(cx).text(), "e t te our"); } #[gpui::test] @@ -4595,7 +4595,7 @@ mod tests { }); assert_eq!( - buffer.read(cx).text(), + buffer.read(cx).read(cx).text(), "oe two three\nfou five six\nseven ten\n" ); } @@ -4626,7 +4626,7 @@ mod tests { }); assert_eq!( - buffer.read(cx).text(), + buffer.read(cx).read(cx).text(), "on two three\nfou five six\nseven ten\n" ); } @@ -5650,7 +5650,7 @@ mod tests { view.newline(&Newline, cx); assert_eq!( - view.buffer().read(cx).text(), + view.buffer().read(cx).read(cx).text(), concat!( "{ \n", // Suppress rustfmt "\n", // diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index fe4336126a0490c478aca1e5bdcf4e9317748ee3..aeef3f71b92190b9645772ce179da090fcad6201 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -76,7 +76,9 @@ impl ItemHandle for BufferItemHandle { font_properties, underline: None, }; - let language = buffer.upgrade(cx).and_then(|buf| buf.read(cx).language()); + let language = buffer + .upgrade(cx) + .and_then(|buf| buf.read(cx).read(cx).language()); let soft_wrap = match settings.soft_wrap(language) { settings::SoftWrap::None => crate::SoftWrap::None, settings::SoftWrap::EditorWidth => crate::SoftWrap::EditorWidth, @@ -327,6 +329,7 @@ impl DiagnosticMessage { let cursor_position = editor.newest_selection::(cx).head(); let buffer = editor.buffer().read(cx); let new_diagnostic = buffer + .read(cx) .diagnostics_in_range::<_, usize>(cursor_position..cursor_position) .filter(|entry| !entry.range.is_empty()) .min_by_key(|entry| (entry.diagnostic.severity, entry.range.len())) diff --git a/crates/go_to_line/src/go_to_line.rs b/crates/go_to_line/src/go_to_line.rs index b35a6fe0025c403e408cddaaca295d4701c8cb94..4c021ac1a44d9b085c6240d745cee9a59ff9d3d0 100644 --- a/crates/go_to_line/src/go_to_line.rs +++ b/crates/go_to_line/src/go_to_line.rs @@ -73,7 +73,7 @@ impl GoToLine { ( restore_state, editor.newest_selection(cx).head(), - editor.buffer().read(cx).max_point(), + editor.buffer().read(cx).read(cx).max_point(), ) }); @@ -127,7 +127,7 @@ impl GoToLine { match event { editor::Event::Blurred => cx.emit(Event::Dismissed), editor::Event::Edited => { - let line_editor = self.line_editor.read(cx).buffer().read(cx).text(); + let line_editor = self.line_editor.read(cx).buffer().read(cx).read(cx).text(); let mut components = line_editor.trim().split(&[',', ':'][..]); let row = components.next().and_then(|row| row.parse::().ok()); let column = components.next().and_then(|row| row.parse::().ok()); diff --git a/crates/language/src/multi_buffer.rs b/crates/language/src/multi_buffer.rs index 8f8274a744d0b50ec9521f1d4f2b86f50a0435d0..35aef1aecbdf56ba874e8df584bf6318e187e3c7 100644 --- a/crates/language/src/multi_buffer.rs +++ b/crates/language/src/multi_buffer.rs @@ -13,6 +13,7 @@ use gpui::{AppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Tas use parking_lot::{Mutex, MutexGuard}; pub use selection::SelectionSet; use std::{ + cell::{Ref, RefCell}, cmp, io, ops::{Range, Sub}, sync::Arc, @@ -33,7 +34,7 @@ pub type ExcerptId = Locator; #[derive(Default)] pub struct MultiBuffer { - snapshot: Mutex, + snapshot: RefCell, buffers: HashMap, subscriptions: Topic, selection_sets: HashMap, @@ -115,13 +116,18 @@ impl MultiBuffer { cx.add_model(|cx| Self::singleton(buffer, cx)) } + pub fn replica_id(&self) -> clock::ReplicaId { + todo!() + } + pub fn snapshot(&self, cx: &AppContext) -> MultiBufferSnapshot { self.sync(cx); - self.snapshot.lock().clone() + self.snapshot.borrow().clone() } - pub fn as_snapshot(&self) -> MutexGuard { - self.snapshot.lock() + pub fn read(&self, cx: &AppContext) -> Ref { + self.sync(cx); + self.snapshot.borrow() } pub fn as_singleton(&self) -> Option<&ModelHandle> { @@ -246,7 +252,7 @@ impl MultiBuffer { let buffer = props.buffer.read(cx); let range = buffer.anchor_before(props.range.start)..buffer.anchor_after(props.range.end); - let mut snapshot = self.snapshot.lock(); + let mut snapshot = self.snapshot.borrow_mut(); let prev_id = snapshot.excerpts.last().map(|e| &e.id); let id = ExcerptId::between(prev_id.unwrap_or(&ExcerptId::min()), &ExcerptId::max()); @@ -297,7 +303,7 @@ impl MultiBuffer { } fn sync(&self, cx: &AppContext) { - let mut snapshot = self.snapshot.lock(); + let mut snapshot = self.snapshot.borrow_mut(); let mut excerpts_to_edit = Vec::new(); for buffer_state in self.buffers.values() { if buffer_state @@ -365,96 +371,6 @@ impl MultiBuffer { } } -// Methods delegating to the snapshot -impl MultiBuffer { - pub fn replica_id(&self) -> ReplicaId { - self.snapshot.lock().replica_id() - } - - pub fn text(&self) -> String { - self.snapshot.lock().text() - } - - pub fn text_for_range<'a, T: ToOffset>( - &'a self, - range: Range, - ) -> impl Iterator { - todo!(); - [].into_iter() - } - - pub fn contains_str_at(&self, _: T, _: &str) -> bool - where - T: ToOffset, - { - todo!() - } - - pub fn max_point(&self) -> Point { - self.snapshot.lock().max_point() - } - - pub fn len(&self) -> usize { - self.snapshot.lock().len() - } - - pub fn line_len(&self, row: u32) -> u32 { - self.snapshot.lock().line_len(row) - } - - pub fn is_line_blank(&self, row: u32) -> bool { - self.snapshot.lock().is_line_blank(row) - } - - pub fn indent_column_for_line(&self, row: u32) -> u32 { - self.snapshot.lock().indent_column_for_line(row) - } - - pub fn anchor_before(&self, position: T) -> Anchor { - self.snapshot.lock().anchor_before(position) - } - - pub fn anchor_after(&self, position: T) -> Anchor { - self.snapshot.lock().anchor_after(position) - } - - pub fn anchor_at(&self, position: T, bias: Bias) -> Anchor { - self.snapshot.lock().anchor_at(position, bias) - } - - pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize { - self.snapshot.lock().clip_offset(offset, bias) - } - - pub fn clip_point(&self, point: Point, bias: Bias) -> Point { - self.snapshot.lock().clip_point(point, bias) - } - - pub fn language<'a>(&self) -> Option<&'a Arc> { - todo!() - } - - pub fn parse_count(&self) -> usize { - self.snapshot.lock().parse_count() - } - - pub fn diagnostics_update_count(&self) -> usize { - self.snapshot.lock().diagnostics_update_count() - } - - pub fn diagnostics_in_range<'a, T, O>( - &'a self, - search_range: Range, - ) -> impl Iterator> + 'a - where - T: 'a + ToOffset, - O: 'a, - { - todo!(); - None.into_iter() - } -} - #[cfg(any(test, feature = "test-support"))] impl MultiBuffer { pub fn randomly_edit(&mut self, _: &mut R, _: usize, _: &mut ModelContext) { @@ -949,7 +865,7 @@ impl MultiBufferSnapshot { pub fn diagnostic_group<'a, O>( &'a self, group_id: usize, - ) -> impl Iterator, &Diagnostic)> + 'a + ) -> impl Iterator> + 'a where O: 'a, { @@ -960,7 +876,7 @@ impl MultiBufferSnapshot { pub fn diagnostics_in_range<'a, T, O>( &'a self, search_range: Range, - ) -> impl Iterator, &Diagnostic)> + 'a + ) -> impl Iterator> + 'a where T: 'a + ToOffset, O: 'a, diff --git a/crates/language/src/multi_buffer/anchor.rs b/crates/language/src/multi_buffer/anchor.rs index 033ed009b0be435e34fced2963bdfa59f86c5ce3..7bc8c85848900c8dbd3bfa5c8362967c8302437f 100644 --- a/crates/language/src/multi_buffer/anchor.rs +++ b/crates/language/src/multi_buffer/anchor.rs @@ -89,6 +89,7 @@ impl ToPoint for Anchor { pub trait AnchorRangeExt { fn cmp(&self, b: &Range, buffer: &MultiBufferSnapshot) -> Result; fn to_offset(&self, content: &MultiBufferSnapshot) -> Range; + fn to_point(&self, content: &MultiBufferSnapshot) -> Range; } impl AnchorRangeExt for Range { @@ -102,4 +103,8 @@ impl AnchorRangeExt for Range { fn to_offset(&self, content: &MultiBufferSnapshot) -> Range { self.start.to_offset(&content)..self.end.to_offset(&content) } + + fn to_point(&self, content: &MultiBufferSnapshot) -> Range { + self.start.to_point(&content)..self.end.to_point(&content) + } } diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index f3fd04ff745bdaf7baf6ba748ad831f41ddb0ad8..144d7b93fa8ece15ba06022d6092ee1b752ad7d1 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -947,8 +947,8 @@ mod tests { editor::{Editor, EditorSettings, Input}, fs::{FakeFs, Fs as _}, language::{ - tree_sitter_rust, Diagnostic, Language, LanguageConfig, LanguageRegistry, - LanguageServerConfig, MultiBuffer, Point, + tree_sitter_rust, Diagnostic, DiagnosticEntry, Language, LanguageConfig, + LanguageRegistry, LanguageServerConfig, MultiBuffer, Point, }, lsp, project::{ProjectPath, Worktree}, @@ -1036,7 +1036,9 @@ mod tests { .await .unwrap(); let buffer_b = cx_b.add_model(|cx| MultiBuffer::singleton(buffer_b, cx)); - buffer_b.read_with(&cx_b, |buf, _| assert_eq!(buf.text(), "b-contents")); + buffer_b.read_with(&cx_b, |buf, cx| { + assert_eq!(buf.read(cx).text(), "b-contents") + }); worktree_a.read_with(&cx_a, |tree, cx| assert!(tree.has_open_buffer("b.txt", cx))); let buffer_a = worktree_a .update(&mut cx_a, |tree, cx| tree.open_buffer("b.txt", cx)) From 5b31c1ba4e67391aba55be3163ac8fb3bc9f07a5 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 10 Dec 2021 18:04:11 +0100 Subject: [PATCH 044/196] Start making `MultiBuffer` work with a singleton buffer Co-Authored-By: Nathan Sobo --- crates/editor/src/editor.rs | 4 +- crates/language/src/multi_buffer.rs | 144 +++++++++++++++++++++++----- 2 files changed, 124 insertions(+), 24 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index f9e6951f8a67f24ed7b4ec1e835c04a6b53f9ab7..ab1b402ac02f444d12d1324b81624e8b4a2a9540 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -3035,7 +3035,7 @@ impl Editor { let range = (range.start.to_offset(&display_map, Bias::Left), Bias::Left) ..(range.end.to_offset(&display_map, Bias::Left), Bias::Right); buffer - .selection_set(set_id) + .selection_set(set_id, cx) .unwrap() .intersecting_selections::(range, &buffer.read(cx)) .map(move |s| Selection { @@ -3131,7 +3131,7 @@ impl Editor { fn selection_set<'a>(&self, cx: &'a AppContext) -> &'a SelectionSet { self.buffer .read(cx) - .selection_set(self.selection_set_id) + .selection_set(self.selection_set_id, cx) .unwrap() } diff --git a/crates/language/src/multi_buffer.rs b/crates/language/src/multi_buffer.rs index 35aef1aecbdf56ba874e8df584bf6318e187e3c7..f339662c98055b66f34ea0006b419d5a14f25787 100644 --- a/crates/language/src/multi_buffer.rs +++ b/crates/language/src/multi_buffer.rs @@ -3,14 +3,14 @@ mod selection; use crate::{ buffer::{self, Buffer, Chunk, ToOffset as _, ToPoint as _}, - BufferSnapshot, Diagnostic, DiagnosticEntry, File, Language, + BufferSnapshot, DiagnosticEntry, File, Language, }; pub use anchor::{Anchor, AnchorRangeExt}; +use anyhow::anyhow; use anyhow::Result; use clock::ReplicaId; use collections::HashMap; use gpui::{AppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task}; -use parking_lot::{Mutex, MutexGuard}; pub use selection::SelectionSet; use std::{ cell::{Ref, RefCell}, @@ -32,12 +32,12 @@ const NEWLINES: &'static [u8] = &[b'\n'; u8::MAX as usize]; pub type ExcerptId = Locator; -#[derive(Default)] pub struct MultiBuffer { snapshot: RefCell, buffers: HashMap, subscriptions: Topic, selection_sets: HashMap, + replica_id: ReplicaId, } pub trait ToOffset: 'static { @@ -58,6 +58,7 @@ struct BufferState { #[derive(Clone, Default)] pub struct MultiBufferSnapshot { excerpts: SumTree, + replica_id: ReplicaId, } pub struct ExcerptProperties<'a, T> { @@ -94,12 +95,18 @@ pub struct MultiBufferBytes<'a> { } impl MultiBuffer { - pub fn new() -> Self { - Self::default() + pub fn new(replica_id: ReplicaId) -> Self { + Self { + snapshot: Default::default(), + buffers: Default::default(), + subscriptions: Default::default(), + selection_sets: Default::default(), + replica_id, + } } pub fn singleton(buffer: ModelHandle, cx: &mut ModelContext) -> Self { - let mut this = Self::new(); + let mut this = Self::new(buffer.read(cx).replica_id()); this.push( ExcerptProperties { buffer: &buffer, @@ -116,8 +123,8 @@ impl MultiBuffer { cx.add_model(|cx| Self::singleton(buffer, cx)) } - pub fn replica_id(&self) -> clock::ReplicaId { - todo!() + pub fn replica_id(&self) -> ReplicaId { + self.replica_id } pub fn snapshot(&self, cx: &AppContext) -> MultiBufferSnapshot { @@ -175,7 +182,18 @@ impl MultiBuffer { S: ToOffset, T: Into, { - todo!() + // TODO + let snapshot = self.read(cx); + let ranges_iter = ranges_iter + .into_iter() + .map(|range| range.start.to_offset(&snapshot)..range.end.to_offset(&snapshot)); + self.as_singleton().unwrap().update(cx, |buffer, cx| { + if autoindent { + buffer.edit_with_autoindent(ranges_iter, new_text, cx); + } else { + buffer.edit(ranges_iter, new_text, cx); + } + }); } pub fn start_transaction( @@ -183,7 +201,10 @@ impl MultiBuffer { selection_set_ids: impl IntoIterator, cx: &mut ModelContext, ) -> Result<()> { - todo!() + // TODO + self.as_singleton() + .unwrap() + .update(cx, |buffer, _| buffer.start_transaction(selection_set_ids)) } pub fn end_transaction( @@ -191,19 +212,62 @@ impl MultiBuffer { selection_set_ids: impl IntoIterator, cx: &mut ModelContext, ) -> Result<()> { - todo!() + // TODO + self.as_singleton().unwrap().update(cx, |buffer, cx| { + buffer.end_transaction(selection_set_ids, cx) + }) } pub fn undo(&mut self, cx: &mut ModelContext) { - todo!() + // TODO + self.as_singleton() + .unwrap() + .update(cx, |buffer, cx| buffer.undo(cx)) } pub fn redo(&mut self, cx: &mut ModelContext) { - todo!() - } - - pub fn selection_set(&self, set_id: SelectionSetId) -> Result<&SelectionSet> { - todo!() + // TODO + self.as_singleton() + .unwrap() + .update(cx, |buffer, cx| buffer.redo(cx)) + } + + pub fn selection_set(&self, set_id: SelectionSetId, cx: &AppContext) -> Result<&SelectionSet> { + // TODO + let set = self + .as_singleton() + .unwrap() + .read(cx) + .selection_set(set_id)?; + let excerpt_id = self.snapshot.borrow().excerpts.first().unwrap().id.clone(); + + let selection_sets: &mut HashMap = + unsafe { &mut *(&self.selection_sets as *const _ as *mut _) }; + selection_sets.insert( + set_id, + SelectionSet { + id: set.id, + active: set.active, + selections: set + .selections + .iter() + .map(|selection| Selection { + id: selection.id, + start: Anchor { + excerpt_id: excerpt_id.clone(), + text_anchor: selection.start.clone(), + }, + end: Anchor { + excerpt_id: excerpt_id.clone(), + text_anchor: selection.end.clone(), + }, + reversed: selection.reversed, + goal: selection.goal, + }) + .collect(), + }, + ); + Ok(self.selection_sets.get(&set.id).unwrap()) } pub fn add_selection_set( @@ -211,7 +275,23 @@ impl MultiBuffer { selections: &[Selection], cx: &mut ModelContext, ) -> SelectionSetId { - todo!() + // TODO + let snapshot = self.read(cx); + self.as_singleton().unwrap().update(cx, |buffer, cx| { + buffer.add_selection_set( + &selections + .iter() + .map(|selection| Selection { + id: selection.id, + start: selection.start.to_offset(&snapshot), + end: selection.end.to_offset(&snapshot), + reversed: selection.reversed, + goal: selection.goal, + }) + .collect::>(), + cx, + ) + }) } pub fn remove_selection_set( @@ -219,7 +299,10 @@ impl MultiBuffer { set_id: SelectionSetId, cx: &mut ModelContext, ) -> Result<()> { - todo!() + // TODO + self.as_singleton() + .unwrap() + .update(cx, |buffer, cx| buffer.remove_selection_set(set_id, cx)) } pub fn update_selection_set( @@ -228,7 +311,24 @@ impl MultiBuffer { selections: &[Selection], cx: &mut ModelContext, ) -> Result<()> { - todo!() + // TODO + let snapshot = self.read(cx); + self.as_singleton().unwrap().update(cx, |buffer, cx| { + buffer.update_selection_set( + set_id, + &selections + .iter() + .map(|selection| Selection { + id: selection.id, + start: selection.start.to_offset(&snapshot), + end: selection.end.to_offset(&snapshot), + reversed: selection.reversed, + goal: selection.goal, + }) + .collect::>(), + cx, + ) + }) } pub fn set_active_selection_set( @@ -1136,7 +1236,7 @@ mod tests { let buffer_1 = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'g'), cx)); - let list = cx.add_model(|_| MultiBuffer::new()); + let list = cx.add_model(|_| MultiBuffer::new(0)); let subscription = list.update(cx, |list, cx| { let subscription = list.subscribe(); @@ -1245,7 +1345,7 @@ mod tests { .unwrap_or(10); let mut buffers: Vec> = Vec::new(); - let list = cx.add_model(|_| MultiBuffer::new()); + let list = cx.add_model(|_| MultiBuffer::new(0)); let mut excerpt_ids = Vec::new(); let mut expected_excerpts = Vec::new(); let mut old_versions = Vec::new(); From a758bd4f8d88586a965002d0c4d64b86d2b7c618 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 10 Dec 2021 12:58:32 -0800 Subject: [PATCH 045/196] Fill in some missing methods on MultiBuffer, MultiBufferSnapshot --- crates/editor/src/display_map.rs | 2 +- crates/editor/src/display_map/block_map.rs | 15 +- crates/editor/src/display_map/fold_map.rs | 1 - crates/editor/src/editor.rs | 4 +- crates/editor/src/items.rs | 8 +- crates/language/src/buffer.rs | 132 +++++----- crates/language/src/multi_buffer.rs | 286 ++++++++++++++------- crates/language/src/tests.rs | 26 +- crates/project/src/worktree.rs | 1 + crates/server/src/rpc.rs | 1 + 10 files changed, 287 insertions(+), 189 deletions(-) diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 41683863e61d02040e822417bf11510dc4498d2b..35d859709cbe3140f2979cac0f7dc7a24e89aeeb 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -54,7 +54,7 @@ impl DisplayMap { let (fold_map, snapshot) = FoldMap::new(buffer.read(cx).snapshot(cx)); let (tab_map, snapshot) = TabMap::new(snapshot, tab_size); let (wrap_map, snapshot) = WrapMap::new(snapshot, font_id, font_size, wrap_width, cx); - let block_map = BlockMap::new(buffer.clone(), snapshot); + let block_map = BlockMap::new(snapshot); cx.observe(&wrap_map, |_, _, cx| cx.notify()).detach(); DisplayMap { buffer, diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 1b6c8a162ef15700bdcb12b5fa103cf5fbc867a9..58ac3d7308348135300257b1bcab6cc21f721b90 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -1,7 +1,7 @@ use super::wrap_map::{self, WrapEdit, WrapPoint, WrapSnapshot}; -use gpui::{AppContext, ElementBox, ModelHandle}; +use gpui::{AppContext, ElementBox}; use language::{ - multi_buffer::{Anchor, MultiBuffer, ToOffset, ToPoint as _}, + multi_buffer::{Anchor, ToOffset, ToPoint as _}, Chunk, }; use parking_lot::Mutex; @@ -22,7 +22,6 @@ use theme::SyntaxTheme; const NEWLINES: &'static [u8] = &[b'\n'; u8::MAX as usize]; pub struct BlockMap { - buffer: ModelHandle, next_block_id: AtomicUsize, wrap_snapshot: Mutex, blocks: Vec>, @@ -112,9 +111,8 @@ pub struct BlockBufferRows<'a> { } impl BlockMap { - pub fn new(buffer: ModelHandle, wrap_snapshot: WrapSnapshot) -> Self { + pub fn new(wrap_snapshot: WrapSnapshot) -> Self { Self { - buffer, next_block_id: AtomicUsize::new(0), blocks: Vec::new(), transforms: Mutex::new(SumTree::from_item( @@ -869,6 +867,7 @@ mod tests { use super::*; use crate::display_map::{fold_map::FoldMap, tab_map::TabMap, wrap_map::WrapMap}; use gpui::{elements::Empty, Element}; + use language::multi_buffer::MultiBuffer; use rand::prelude::*; use std::env; use text::RandomCharIter; @@ -902,7 +901,7 @@ mod tests { let (fold_map, folds_snapshot) = FoldMap::new(buffer.read(cx).snapshot(cx)); let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), 1); let (wrap_map, wraps_snapshot) = WrapMap::new(tabs_snapshot, font_id, 14.0, None, cx); - let mut block_map = BlockMap::new(buffer.clone(), wraps_snapshot.clone()); + let mut block_map = BlockMap::new(wraps_snapshot.clone()); let mut writer = block_map.write(wraps_snapshot.clone(), vec![]); writer.insert(vec![ @@ -1069,7 +1068,7 @@ mod tests { let (_, folds_snapshot) = FoldMap::new(buffer.read(cx).snapshot(cx)); let (_, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), 1); let (_, wraps_snapshot) = WrapMap::new(tabs_snapshot, font_id, 14.0, Some(60.), cx); - let mut block_map = BlockMap::new(buffer.clone(), wraps_snapshot.clone()); + let mut block_map = BlockMap::new(wraps_snapshot.clone()); let mut writer = block_map.write(wraps_snapshot.clone(), vec![]); writer.insert(vec![ @@ -1127,7 +1126,7 @@ mod tests { let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), tab_size); let (wrap_map, wraps_snapshot) = WrapMap::new(tabs_snapshot, font_id, font_size, wrap_width, cx); - let mut block_map = BlockMap::new(buffer.clone(), wraps_snapshot); + let mut block_map = BlockMap::new(wraps_snapshot); let mut expected_blocks = Vec::new(); for _ in 0..operations { diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index 1290fe33fa2e31a377bee7ebf38a568206738216..f2dfcb5385270308a9a2b2ef027e606f9fd6d6b3 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -1224,7 +1224,6 @@ mod tests { let buffer = MultiBuffer::build_simple(&sample_text(5, 6, 'a'), cx); let buffer_snapshot = buffer.read(cx).snapshot(cx); let mut map = FoldMap::new(buffer_snapshot.clone()).0; - let buffer = buffer.read(cx); let (mut writer, _, _) = map.write(buffer_snapshot.clone(), vec![]); writer.fold(vec![ diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index ab1b402ac02f444d12d1324b81624e8b4a2a9540..f23f79669266528738070e3e8d88b62b1875a1ed 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -559,7 +559,7 @@ impl Editor { } pub fn language<'a>(&self, cx: &'a AppContext) -> Option<&'a Arc> { - self.buffer.read(cx).read(cx).language() + self.buffer.read(cx).language(cx) } pub fn set_placeholder_text( @@ -2996,7 +2996,7 @@ impl Editor { let buffer = self.buffer.read(cx); let replica_id = buffer.replica_id(); buffer - .selection_sets() + .selection_sets(cx) .filter(move |(set_id, set)| { set.active && (set_id.replica_id != replica_id || **set_id == self.selection_set_id) }) diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index aeef3f71b92190b9645772ce179da090fcad6201..9a80456255a0ffe1845c49f727cd5f75a81dad15 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -76,9 +76,7 @@ impl ItemHandle for BufferItemHandle { font_properties, underline: None, }; - let language = buffer - .upgrade(cx) - .and_then(|buf| buf.read(cx).read(cx).language()); + let language = buffer.upgrade(cx).and_then(|buf| buf.read(cx).language(cx)); let soft_wrap = match settings.soft_wrap(language) { settings::SoftWrap::None => crate::SoftWrap::None, settings::SoftWrap::EditorWidth => crate::SoftWrap::EditorWidth, @@ -222,11 +220,11 @@ impl ItemView for Editor { } fn is_dirty(&self, cx: &AppContext) -> bool { - self.buffer().read(cx).is_dirty() + self.buffer().read(cx).is_dirty(cx) } fn has_conflict(&self, cx: &AppContext) -> bool { - self.buffer().read(cx).has_conflict() + self.buffer().read(cx).has_conflict(cx) } } diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 56d887e2e2277aa98761ce4cce7beefcc9754669..a8a8b9b5e90ecdaaa31564da9784e542ab6fe572 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -829,31 +829,6 @@ impl Buffer { }) } - pub fn diagnostics_in_range<'a, T, O>( - &'a self, - search_range: Range, - ) -> impl 'a + Iterator> - where - T: 'a + ToOffset, - O: 'a + FromAnchor, - { - self.diagnostics.range(search_range, self, true) - } - - pub fn diagnostic_group<'a, O>( - &'a self, - group_id: usize, - ) -> impl 'a + Iterator> - where - O: 'a + FromAnchor, - { - self.diagnostics.group(group_id, self) - } - - pub fn diagnostics_update_count(&self) -> usize { - self.diagnostics_update_count - } - fn request_autoindent(&mut self, cx: &mut ModelContext) { if let Some(indent_columns) = self.compute_autoindents() { let indent_columns = cx.background().spawn(indent_columns); @@ -1057,47 +1032,6 @@ impl Buffer { } } - pub fn range_for_syntax_ancestor(&self, range: Range) -> Option> { - if let Some(tree) = self.syntax_tree() { - let root = tree.root_node(); - let range = range.start.to_offset(self)..range.end.to_offset(self); - let mut node = root.descendant_for_byte_range(range.start, range.end); - while node.map_or(false, |n| n.byte_range() == range) { - node = node.unwrap().parent(); - } - node.map(|n| n.byte_range()) - } else { - None - } - } - - pub fn enclosing_bracket_ranges( - &self, - range: Range, - ) -> Option<(Range, Range)> { - let (grammar, tree) = self.grammar().zip(self.syntax_tree())?; - let open_capture_ix = grammar.brackets_query.capture_index_for_name("open")?; - let close_capture_ix = grammar.brackets_query.capture_index_for_name("close")?; - - // Find bracket pairs that *inclusively* contain the given range. - let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1; - let mut cursor = QueryCursorHandle::new(); - let matches = cursor.set_byte_range(range).matches( - &grammar.brackets_query, - tree.root_node(), - TextProvider(self.as_rope()), - ); - - // Get the ranges of the innermost pair of brackets. - matches - .filter_map(|mat| { - let open = mat.nodes_for_capture_index(open_capture_ix).next()?; - let close = mat.nodes_for_capture_index(close_capture_ix).next()?; - Some((open.byte_range(), close.byte_range())) - }) - .min_by_key(|(open_range, close_range)| close_range.end - open_range.start) - } - pub(crate) fn diff(&self, new_text: Arc, cx: &AppContext) -> Task { // TODO: it would be nice to not allocate here. let old_text = self.text(); @@ -1745,12 +1679,78 @@ impl BufferSnapshot { } } + pub fn language(&self) -> Option<&Arc> { + self.language.as_ref() + } + fn grammar(&self) -> Option<&Arc> { self.language .as_ref() .and_then(|language| language.grammar.as_ref()) } + pub fn range_for_syntax_ancestor(&self, range: Range) -> Option> { + if let Some(tree) = self.tree.as_ref() { + let root = tree.root_node(); + let range = range.start.to_offset(self)..range.end.to_offset(self); + let mut node = root.descendant_for_byte_range(range.start, range.end); + while node.map_or(false, |n| n.byte_range() == range) { + node = node.unwrap().parent(); + } + node.map(|n| n.byte_range()) + } else { + None + } + } + + pub fn enclosing_bracket_ranges( + &self, + range: Range, + ) -> Option<(Range, Range)> { + let (grammar, tree) = self.grammar().zip(self.tree.as_ref())?; + let open_capture_ix = grammar.brackets_query.capture_index_for_name("open")?; + let close_capture_ix = grammar.brackets_query.capture_index_for_name("close")?; + + // Find bracket pairs that *inclusively* contain the given range. + let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1; + let mut cursor = QueryCursorHandle::new(); + let matches = cursor.set_byte_range(range).matches( + &grammar.brackets_query, + tree.root_node(), + TextProvider(self.as_rope()), + ); + + // Get the ranges of the innermost pair of brackets. + matches + .filter_map(|mat| { + let open = mat.nodes_for_capture_index(open_capture_ix).next()?; + let close = mat.nodes_for_capture_index(close_capture_ix).next()?; + Some((open.byte_range(), close.byte_range())) + }) + .min_by_key(|(open_range, close_range)| close_range.end - open_range.start) + } + + pub fn diagnostics_in_range<'a, T, O>( + &'a self, + search_range: Range, + ) -> impl 'a + Iterator> + where + T: 'a + ToOffset, + O: 'a + FromAnchor, + { + self.diagnostics.range(search_range, self, true) + } + + pub fn diagnostic_group<'a, O>( + &'a self, + group_id: usize, + ) -> impl 'a + Iterator> + where + O: 'a + FromAnchor, + { + self.diagnostics.group(group_id, self) + } + pub fn diagnostics_update_count(&self) -> usize { self.diagnostics_update_count } diff --git a/crates/language/src/multi_buffer.rs b/crates/language/src/multi_buffer.rs index f339662c98055b66f34ea0006b419d5a14f25787..fd48c4b1be03852d8ab1d19010b15bf4ffb5b98f 100644 --- a/crates/language/src/multi_buffer.rs +++ b/crates/language/src/multi_buffer.rs @@ -6,7 +6,6 @@ use crate::{ BufferSnapshot, DiagnosticEntry, File, Language, }; pub use anchor::{Anchor, AnchorRangeExt}; -use anyhow::anyhow; use anyhow::Result; use clock::ReplicaId; use collections::HashMap; @@ -15,6 +14,7 @@ pub use selection::SelectionSet; use std::{ cell::{Ref, RefCell}, cmp, io, + iter::Peekable, ops::{Range, Sub}, sync::Arc, time::SystemTime, @@ -58,7 +58,6 @@ struct BufferState { #[derive(Clone, Default)] pub struct MultiBufferSnapshot { excerpts: SumTree, - replica_id: ReplicaId, } pub struct ExcerptProperties<'a, T> { @@ -91,7 +90,7 @@ pub struct MultiBufferChunks<'a> { } pub struct MultiBufferBytes<'a> { - chunks: MultiBufferChunks<'a>, + chunks: Peekable>, } impl MultiBuffer { @@ -336,12 +335,46 @@ impl MultiBuffer { set_id: Option, cx: &mut ModelContext, ) -> Result<()> { - todo!() + self.as_singleton() + .unwrap() + .update(cx, |buffer, cx| buffer.set_active_selection_set(set_id, cx)) } - pub fn selection_sets(&self) -> impl Iterator { - todo!(); - None.into_iter() + pub fn selection_sets( + &self, + cx: &AppContext, + ) -> impl Iterator { + let excerpt_id = self.snapshot.borrow().excerpts.first().unwrap().id.clone(); + let selection_sets: &mut HashMap = + unsafe { &mut *(&self.selection_sets as *const _ as *mut _) }; + selection_sets.clear(); + for (selection_set_id, set) in self.as_singleton().unwrap().read(cx).selection_sets() { + selection_sets.insert( + *selection_set_id, + SelectionSet { + id: set.id, + active: set.active, + selections: set + .selections + .iter() + .map(|selection| Selection { + id: selection.id, + start: Anchor { + excerpt_id: excerpt_id.clone(), + text_anchor: selection.start.clone(), + }, + end: Anchor { + excerpt_id: excerpt_id.clone(), + text_anchor: selection.end.clone(), + }, + reversed: selection.reversed, + goal: selection.goal, + }) + .collect(), + }, + ); + } + self.selection_sets.iter() } pub fn push(&mut self, props: ExcerptProperties, cx: &mut ModelContext) -> ExcerptId @@ -382,7 +415,13 @@ impl MultiBuffer { &mut self, cx: &mut ModelContext, ) -> Result>> { - todo!() + self.as_singleton() + .unwrap() + .update(cx, |buffer, cx| buffer.save(cx)) + } + + pub fn language<'a>(&self, cx: &'a AppContext) -> Option<&'a Arc> { + self.as_singleton().unwrap().read(cx).language() } pub fn file<'a>(&self, cx: &'a AppContext) -> Option<&'a dyn File> { @@ -390,16 +429,16 @@ impl MultiBuffer { .and_then(|buffer| buffer.read(cx).file()) } - pub fn is_dirty(&self) -> bool { - todo!() + pub fn is_dirty(&self, cx: &AppContext) -> bool { + self.as_singleton().unwrap().read(cx).is_dirty() } - pub fn has_conflict(&self) -> bool { - todo!() + pub fn has_conflict(&self, cx: &AppContext) -> bool { + self.as_singleton().unwrap().read(cx).has_conflict() } - pub fn is_parsing(&self, _: &AppContext) -> bool { - todo!() + pub fn is_parsing(&self, cx: &AppContext) -> bool { + self.as_singleton().unwrap().read(cx).is_parsing() } fn sync(&self, cx: &AppContext) { @@ -473,12 +512,21 @@ impl MultiBuffer { #[cfg(any(test, feature = "test-support"))] impl MultiBuffer { - pub fn randomly_edit(&mut self, _: &mut R, _: usize, _: &mut ModelContext) { - todo!() + pub fn randomly_edit( + &mut self, + rng: &mut R, + count: usize, + cx: &mut ModelContext, + ) { + self.as_singleton() + .unwrap() + .update(cx, |buffer, cx| buffer.randomly_edit(rng, count, cx)) } pub fn randomly_mutate(&mut self, rng: &mut R, cx: &mut ModelContext) { - todo!() + self.as_singleton() + .unwrap() + .update(cx, |buffer, cx| buffer.randomly_mutate(rng, cx)) } } @@ -487,10 +535,6 @@ impl Entity for MultiBuffer { } impl MultiBufferSnapshot { - pub fn replica_id(&self) -> ReplicaId { - todo!() - } - pub fn text(&self) -> String { self.chunks(0..self.len(), None) .map(|chunk| chunk.text) @@ -501,8 +545,9 @@ impl MultiBufferSnapshot { &'a self, position: T, ) -> impl Iterator + 'a { - todo!(); - None.into_iter() + // TODO + let offset = position.to_offset(self); + self.as_singleton().unwrap().reversed_chars_at(offset) } pub fn chars_at<'a, T: ToOffset>(&'a self, position: T) -> impl Iterator + 'a { @@ -523,11 +568,22 @@ impl MultiBufferSnapshot { .all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none()) } - pub fn contains_str_at(&self, _: T, _: &str) -> bool + pub fn contains_str_at(&self, position: T, needle: &str) -> bool where T: ToOffset, { - todo!() + let offset = position.to_offset(self); + self.as_singleton().unwrap().contains_str_at(offset, needle) + } + + fn as_singleton(&self) -> Option<&BufferSnapshot> { + let mut excerpts = self.excerpts.iter(); + let buffer = excerpts.next().map(|excerpt| &excerpt.buffer); + if excerpts.next().is_none() { + buffer + } else { + None + } } pub fn len(&self) -> usize { @@ -610,7 +666,9 @@ impl MultiBufferSnapshot { } pub fn bytes_in_range<'a, T: ToOffset>(&'a self, range: Range) -> MultiBufferBytes<'a> { - todo!() + MultiBufferBytes { + chunks: self.chunks(range, None).peekable(), + } } pub fn chunks<'a, T: ToOffset>( @@ -618,48 +676,15 @@ impl MultiBufferSnapshot { range: Range, theme: Option<&'a SyntaxTheme>, ) -> MultiBufferChunks<'a> { - let range = range.start.to_offset(self)..range.end.to_offset(self); - let mut cursor = self.excerpts.cursor::(); - cursor.seek(&range.start, Bias::Right, &()); - - let mut header_height: u8 = 0; - let excerpt_chunks = cursor.item().map(|excerpt| { - let buffer_range = excerpt.range.to_offset(&excerpt.buffer); - header_height = excerpt.header_height; - - let buffer_start; - let start_overshoot = range.start - cursor.start(); - if start_overshoot < excerpt.header_height as usize { - header_height -= start_overshoot as u8; - buffer_start = buffer_range.start; - } else { - buffer_start = - buffer_range.start + start_overshoot - excerpt.header_height as usize; - header_height = 0; - } - - let buffer_end; - let end_overshoot = range.end - cursor.start(); - if end_overshoot < excerpt.header_height as usize { - header_height -= excerpt.header_height - end_overshoot as u8; - buffer_end = buffer_start; - } else { - buffer_end = cmp::min( - buffer_range.end, - buffer_range.start + end_overshoot - excerpt.header_height as usize, - ); - } - - excerpt.buffer.chunks(buffer_start..buffer_end, theme) - }); - - MultiBufferChunks { - range, - cursor, - header_height, - excerpt_chunks, + let mut result = MultiBufferChunks { + range: 0..range.end.to_offset(self), + cursor: self.excerpts.cursor::(), + header_height: 0, + excerpt_chunks: None, theme, - } + }; + result.seek(range.start.to_offset(self)); + result } pub fn offset_to_point(&self, offset: usize) -> Point { @@ -736,33 +761,43 @@ impl MultiBufferSnapshot { } pub fn indent_column_for_line(&self, row: u32) -> u32 { - todo!() + if let Some((buffer, range)) = self.buffer_line_for_row(row) { + buffer + .indent_column_for_line(range.start.row) + .min(range.end.column) + .saturating_sub(range.start.column) + } else { + 0 + } } pub fn line_len(&self, row: u32) -> u32 { + if let Some((_, range)) = self.buffer_line_for_row(row) { + range.end.column - range.start.column + } else { + 0 + } + } + + fn buffer_line_for_row(&self, row: u32) -> Option<(&BufferSnapshot, Range)> { let mut cursor = self.excerpts.cursor::(); cursor.seek(&Point::new(row, 0), Bias::Right, &()); if let Some(excerpt) = cursor.item() { let overshoot = row - cursor.start().row; let header_height = excerpt.header_height as u32; - if overshoot < header_height { - 0 - } else { + if overshoot >= header_height { let excerpt_start = excerpt.range.start.to_point(&excerpt.buffer); let excerpt_end = excerpt.range.end.to_point(&excerpt.buffer); let buffer_row = excerpt_start.row + overshoot - header_height; - let mut len = excerpt.buffer.line_len(buffer_row); - if buffer_row == excerpt_end.row { - len = excerpt_end.column; - } - if buffer_row == excerpt_start.row { - len -= excerpt_start.column - } - len + let line_start = Point::new(buffer_row, 0); + let line_end = Point::new(buffer_row, excerpt.buffer.line_len(buffer_row)); + return Some(( + &excerpt.buffer, + line_start.max(excerpt_start)..line_end.min(excerpt_end), + )); } - } else { - 0 } + None } pub fn max_point(&self) -> Point { @@ -940,26 +975,42 @@ impl MultiBufferSnapshot { } pub fn anchor_at(&self, position: T, bias: Bias) -> Anchor { - todo!() + let offset = position.to_offset(self); + let mut cursor = self.excerpts.cursor::<(usize, Option<&ExcerptId>)>(); + cursor.seek(&offset, bias, &()); + if let Some(excerpt) = cursor.item() { + let overshoot = + (offset - cursor.start().0).saturating_sub(excerpt.header_height as usize); + let buffer_start = excerpt.range.start.to_offset(&excerpt.buffer); + Anchor { + excerpt_id: excerpt.id.clone(), + text_anchor: excerpt.buffer.anchor_at(buffer_start + overshoot, bias), + } + } else if offset == 0 && bias == Bias::Left { + Anchor::min() + } else { + Anchor::max() + } } pub fn parse_count(&self) -> usize { - todo!() + self.as_singleton().unwrap().parse_count() } pub fn enclosing_bracket_ranges( &self, range: Range, ) -> Option<(Range, Range)> { - todo!() + let range = range.start.to_offset(self)..range.end.to_offset(self); + self.as_singleton().unwrap().enclosing_bracket_ranges(range) } pub fn diagnostics_update_count(&self) -> usize { - todo!() + self.as_singleton().unwrap().diagnostics_update_count() } - pub fn language<'a>(&self) -> Option<&'a Arc> { - todo!() + pub fn language(&self) -> Option<&Arc> { + self.as_singleton().unwrap().language() } pub fn diagnostic_group<'a, O>( @@ -967,26 +1018,28 @@ impl MultiBufferSnapshot { group_id: usize, ) -> impl Iterator> + 'a where - O: 'a, + O: text::FromAnchor + 'a, { - todo!(); - None.into_iter() + self.as_singleton().unwrap().diagnostic_group(group_id) } pub fn diagnostics_in_range<'a, T, O>( &'a self, - search_range: Range, + range: Range, ) -> impl Iterator> + 'a where T: 'a + ToOffset, - O: 'a, + O: 'a + text::FromAnchor, { - todo!(); - None.into_iter() + let range = range.start.to_offset(self)..range.end.to_offset(self); + self.as_singleton().unwrap().diagnostics_in_range(range) } pub fn range_for_syntax_ancestor(&self, range: Range) -> Option> { - todo!() + let range = range.start.to_offset(self)..range.end.to_offset(self); + self.as_singleton() + .unwrap() + .range_for_syntax_ancestor(range) } fn buffer_snapshot_for_excerpt<'a>( @@ -996,7 +1049,7 @@ impl MultiBufferSnapshot { let mut cursor = self.excerpts.cursor::>(); cursor.seek(&Some(excerpt_id), Bias::Left, &()); if let Some(excerpt) = cursor.item() { - if *cursor.start() == Some(excerpt_id) { + if excerpt.id == *excerpt_id { return Some(&excerpt.buffer); } } @@ -1114,11 +1167,43 @@ impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Option<&'a ExcerptId> { impl<'a> MultiBufferChunks<'a> { pub fn offset(&self) -> usize { - todo!() + self.range.start } pub fn seek(&mut self, offset: usize) { - todo!() + self.range.start = offset; + self.cursor.seek_forward(&offset, Bias::Right, &()); + self.header_height = 0; + self.excerpt_chunks = None; + if let Some(excerpt) = self.cursor.item() { + let buffer_range = excerpt.range.to_offset(&excerpt.buffer); + self.header_height = excerpt.header_height; + + let buffer_start; + let start_overshoot = self.range.start - self.cursor.start(); + if start_overshoot < excerpt.header_height as usize { + self.header_height -= start_overshoot as u8; + buffer_start = buffer_range.start; + } else { + buffer_start = + buffer_range.start + start_overshoot - excerpt.header_height as usize; + self.header_height = 0; + } + + let buffer_end; + let end_overshoot = self.range.end - self.cursor.start(); + if end_overshoot < excerpt.header_height as usize { + self.header_height -= excerpt.header_height - end_overshoot as u8; + buffer_end = buffer_start; + } else { + buffer_end = cmp::min( + buffer_range.end, + buffer_range.start + end_overshoot - excerpt.header_height as usize, + ); + } + + self.excerpt_chunks = Some(excerpt.buffer.chunks(buffer_start..buffer_end, self.theme)); + } } } @@ -1134,16 +1219,19 @@ impl<'a> Iterator for MultiBufferChunks<'a> { }, ..Default::default() }; + self.range.start += self.header_height as usize; self.header_height = 0; return Some(chunk); } if let Some(excerpt_chunks) = self.excerpt_chunks.as_mut() { if let Some(chunk) = excerpt_chunks.next() { + self.range.start += chunk.text.len(); return Some(chunk); } self.excerpt_chunks.take(); if self.cursor.end(&()) <= self.range.end { + self.range.start += 1; return Some(Chunk { text: "\n", ..Default::default() @@ -1180,7 +1268,7 @@ impl<'a> Iterator for MultiBufferBytes<'a> { type Item = &'a [u8]; fn next(&mut self) -> Option { - todo!() + self.chunks.next().map(|chunk| chunk.text.as_bytes()) } } diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index c10c2064af8b8fc720564be2313193b075d478a2..cb99575a34aa32f9257ae27f3294b7634aeb2f4d 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -539,6 +539,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { // The diagnostics have moved down since they were created. assert_eq!( buffer + .snapshot() .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0)) .collect::>(), &[ @@ -606,6 +607,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { .unwrap(); assert_eq!( buffer + .snapshot() .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0)) .collect::>(), &[ @@ -685,6 +687,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { .unwrap(); assert_eq!( buffer + .snapshot() .diagnostics_in_range::<_, Point>(0..buffer.len()) .collect::>(), &[ @@ -870,6 +873,7 @@ async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) { buffer.update_diagnostics(None, diagnostics, cx).unwrap(); assert_eq!( buffer + .snapshot() .diagnostics_in_range::<_, Point>(0..buffer.len()) .collect::>(), &[ @@ -922,7 +926,10 @@ async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) { ); assert_eq!( - buffer.diagnostic_group::(0).collect::>(), + buffer + .snapshot() + .diagnostic_group::(0) + .collect::>(), &[ DiagnosticEntry { range: Point::new(1, 8)..Point::new(1, 9), @@ -945,7 +952,10 @@ async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) { ] ); assert_eq!( - buffer.diagnostic_group::(1).collect::>(), + buffer + .snapshot() + .diagnostic_group::(1) + .collect::>(), &[ DiagnosticEntry { range: Point::new(1, 13)..Point::new(1, 15), @@ -1022,11 +1032,13 @@ impl Buffer { &self, range: Range, ) -> Option<(Range, Range)> { - self.enclosing_bracket_ranges(range).map(|(start, end)| { - let point_start = start.start.to_point(self)..start.end.to_point(self); - let point_end = end.start.to_point(self)..end.end.to_point(self); - (point_start, point_end) - }) + self.snapshot() + .enclosing_bracket_ranges(range) + .map(|(start, end)| { + let point_start = start.start.to_point(self)..start.end.to_point(self); + let point_end = end.start.to_point(self)..end.end.to_point(self); + (point_start, point_end) + }) } } diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 943ab6dbd00dcd12522a978bb20803998f2f4d00..020bf64b7f326de0b5356bede785486cdc7b85c3 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -3721,6 +3721,7 @@ mod tests { buffer.read_with(&cx, |buffer, _| { let diagnostics = buffer + .snapshot() .diagnostics_in_range::<_, Point>(0..buffer.len()) .collect::>(); assert_eq!( diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index 144d7b93fa8ece15ba06022d6092ee1b752ad7d1..0c8dd4b45826fa0ef69047c299f4dededb9843f7 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -1707,6 +1707,7 @@ mod tests { buffer_b.read_with(&cx_b, |buffer, _| { assert_eq!( buffer + .snapshot() .diagnostics_in_range::<_, Point>(0..buffer.len()) .collect::>(), &[ From ceff57d02ff0d3466e4d902aab2cb11deeeb70ba Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 10 Dec 2021 14:27:58 -0800 Subject: [PATCH 046/196] Don't append a trailing newline in singleton MultiBuffer --- crates/language/src/multi_buffer.rs | 81 ++++++++++++++++++++++------- 1 file changed, 61 insertions(+), 20 deletions(-) diff --git a/crates/language/src/multi_buffer.rs b/crates/language/src/multi_buffer.rs index fd48c4b1be03852d8ab1d19010b15bf4ffb5b98f..2e6d94309d9239e121b89c7af936b814de9bada3 100644 --- a/crates/language/src/multi_buffer.rs +++ b/crates/language/src/multi_buffer.rs @@ -37,6 +37,7 @@ pub struct MultiBuffer { buffers: HashMap, subscriptions: Topic, selection_sets: HashMap, + singleton: bool, replica_id: ReplicaId, } @@ -73,6 +74,7 @@ struct Excerpt { range: Range, text_summary: TextSummary, header_height: u8, + has_trailing_newline: bool, } #[derive(Clone, Debug, Default)] @@ -85,6 +87,7 @@ pub struct MultiBufferChunks<'a> { range: Range, cursor: Cursor<'a, Excerpt, usize>, header_height: u8, + has_trailing_newline: bool, excerpt_chunks: Option>, theme: Option<&'a SyntaxTheme>, } @@ -100,12 +103,14 @@ impl MultiBuffer { buffers: Default::default(), subscriptions: Default::default(), selection_sets: Default::default(), + singleton: false, replica_id, } } pub fn singleton(buffer: ModelHandle, cx: &mut ModelContext) -> Self { let mut this = Self::new(buffer.read(cx).replica_id()); + this.singleton = true; this.push( ExcerptProperties { buffer: &buffer, @@ -390,7 +395,13 @@ impl MultiBuffer { let id = ExcerptId::between(prev_id.unwrap_or(&ExcerptId::min()), &ExcerptId::max()); let edit_start = snapshot.excerpts.summary().text.bytes; - let excerpt = Excerpt::new(id.clone(), buffer.snapshot(), range, props.header_height); + let excerpt = Excerpt::new( + id.clone(), + buffer.snapshot(), + range, + props.header_height, + !self.singleton, + ); let edit = Edit { old: edit_start..edit_start, new: edit_start..edit_start + excerpt.text_summary.bytes, @@ -495,6 +506,7 @@ impl MultiBuffer { buffer.snapshot(), old_excerpt.range.clone(), old_excerpt.header_height, + !self.singleton, ), &(), ); @@ -520,13 +532,15 @@ impl MultiBuffer { ) { self.as_singleton() .unwrap() - .update(cx, |buffer, cx| buffer.randomly_edit(rng, count, cx)) + .update(cx, |buffer, cx| buffer.randomly_edit(rng, count, cx)); + self.sync(cx); } pub fn randomly_mutate(&mut self, rng: &mut R, cx: &mut ModelContext) { self.as_singleton() .unwrap() - .update(cx, |buffer, cx| buffer.randomly_mutate(rng, cx)) + .update(cx, |buffer, cx| buffer.randomly_mutate(rng, cx)); + self.sync(cx); } } @@ -681,6 +695,7 @@ impl MultiBufferSnapshot { cursor: self.excerpts.cursor::(), header_height: 0, excerpt_chunks: None, + has_trailing_newline: false, theme, }; result.seek(range.start.to_offset(self)); @@ -834,7 +849,11 @@ impl MultiBufferSnapshot { range.end = cmp::max(range.start, range.end); } - let end_before_newline = cursor.end(&()) - 1; + let mut end_before_newline = cursor.end(&()); + if excerpt.has_trailing_newline { + end_before_newline -= 1; + } + let excerpt_start = excerpt.range.start.to_offset(&excerpt.buffer); let start_in_excerpt = excerpt_start + (range.start - start_after_header); let end_in_excerpt = @@ -1063,6 +1082,7 @@ impl Excerpt { buffer: buffer::BufferSnapshot, range: Range, header_height: u8, + has_trailing_newline: bool, ) -> Self { let mut text_summary = buffer.text_summary_for_range::(range.to_offset(&buffer)); @@ -1073,12 +1093,14 @@ impl Excerpt { text_summary.bytes += header_height as usize; text_summary.longest_row += header_height as u32; } - text_summary.last_line_chars = 0; - text_summary.lines.row += 1; - text_summary.lines.column = 0; - text_summary.lines_utf16.row += 1; - text_summary.lines_utf16.column = 0; - text_summary.bytes += 1; + if has_trailing_newline { + text_summary.last_line_chars = 0; + text_summary.lines.row += 1; + text_summary.lines.column = 0; + text_summary.lines_utf16.row += 1; + text_summary.lines_utf16.column = 0; + text_summary.bytes += 1; + } Excerpt { id, @@ -1086,6 +1108,7 @@ impl Excerpt { range, text_summary, header_height, + has_trailing_newline, } } @@ -1178,6 +1201,7 @@ impl<'a> MultiBufferChunks<'a> { if let Some(excerpt) = self.cursor.item() { let buffer_range = excerpt.range.to_offset(&excerpt.buffer); self.header_height = excerpt.header_height; + self.has_trailing_newline = excerpt.has_trailing_newline; let buffer_start; let start_overshoot = self.range.start - self.cursor.start(); @@ -1230,7 +1254,7 @@ impl<'a> Iterator for MultiBufferChunks<'a> { return Some(chunk); } self.excerpt_chunks.take(); - if self.cursor.end(&()) <= self.range.end { + if self.has_trailing_newline && self.cursor.end(&()) <= self.range.end { self.range.start += 1; return Some(Chunk { text: "\n", @@ -1255,6 +1279,7 @@ impl<'a> Iterator for MultiBufferChunks<'a> { ); self.header_height = excerpt.header_height; + self.has_trailing_newline = excerpt.has_trailing_newline; self.excerpt_chunks = Some( excerpt .buffer @@ -1273,7 +1298,7 @@ impl<'a> Iterator for MultiBufferBytes<'a> { } impl<'a> io::Read for MultiBufferBytes<'a> { - fn read(&mut self, buf: &mut [u8]) -> io::Result { + fn read(&mut self, _: &mut [u8]) -> io::Result { todo!() } } @@ -1319,16 +1344,32 @@ mod tests { use text::{Point, RandomCharIter}; use util::test::sample_text; + #[gpui::test] + fn test_singleton_multibuffer(cx: &mut MutableAppContext) { + let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'a'), cx)); + let multibuffer = cx.add_model(|cx| MultiBuffer::singleton(buffer.clone(), cx)); + assert_eq!( + multibuffer.read(cx).snapshot(cx).text(), + buffer.read(cx).text() + ); + + buffer.update(cx, |buffer, cx| buffer.edit([1..3], "XXX", cx)); + assert_eq!( + multibuffer.read(cx).snapshot(cx).text(), + buffer.read(cx).text() + ); + } + #[gpui::test] fn test_excerpt_buffer(cx: &mut MutableAppContext) { let buffer_1 = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'g'), cx)); - let list = cx.add_model(|_| MultiBuffer::new(0)); + let multibuffer = cx.add_model(|_| MultiBuffer::new(0)); - let subscription = list.update(cx, |list, cx| { - let subscription = list.subscribe(); - list.push( + let subscription = multibuffer.update(cx, |multibuffer, cx| { + let subscription = multibuffer.subscribe(); + multibuffer.push( ExcerptProperties { buffer: &buffer_1, range: Point::new(1, 2)..Point::new(2, 5), @@ -1344,7 +1385,7 @@ mod tests { }] ); - list.push( + multibuffer.push( ExcerptProperties { buffer: &buffer_1, range: Point::new(3, 3)..Point::new(4, 4), @@ -1352,7 +1393,7 @@ mod tests { }, cx, ); - list.push( + multibuffer.push( ExcerptProperties { buffer: &buffer_2, range: Point::new(3, 1)..Point::new(3, 3), @@ -1372,7 +1413,7 @@ mod tests { }); assert_eq!( - list.read(cx).snapshot(cx).text(), + multibuffer.read(cx).snapshot(cx).text(), concat!( "\n", // Preserve newlines "\n", // @@ -1400,7 +1441,7 @@ mod tests { }); assert_eq!( - list.read(cx).snapshot(cx).text(), + multibuffer.read(cx).snapshot(cx).text(), concat!( "\n", // Preserve newlines "\n", // From 75dd37d873c54d133c6cae65d93169efa2aa2292 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 10 Dec 2021 15:53:53 -0800 Subject: [PATCH 047/196] Update multibuffer when buffers' syntax trees or diagnostics change Co-Authored-By: Nathan Sobo --- crates/language/src/buffer.rs | 4 ++ crates/language/src/multi_buffer.rs | 88 ++++++++++++++++++----------- 2 files changed, 58 insertions(+), 34 deletions(-) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index a8a8b9b5e90ecdaaa31564da9784e542ab6fe572..8a483d37066a9decc7c3e1c68069bff25cbf64bf 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -580,6 +580,10 @@ impl Buffer { self.parse_count } + pub fn diagnostics_update_count(&self) -> usize { + self.diagnostics_update_count + } + pub(crate) fn syntax_tree(&self) -> Option { if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() { self.interpolate_tree(syntax_tree); diff --git a/crates/language/src/multi_buffer.rs b/crates/language/src/multi_buffer.rs index 2e6d94309d9239e121b89c7af936b814de9bada3..e54cb009f965030b90b755660596edd8c708a92b 100644 --- a/crates/language/src/multi_buffer.rs +++ b/crates/language/src/multi_buffer.rs @@ -3,7 +3,7 @@ mod selection; use crate::{ buffer::{self, Buffer, Chunk, ToOffset as _, ToPoint as _}, - BufferSnapshot, DiagnosticEntry, File, Language, + BufferSnapshot, DiagnosticEntry, Event, File, Language, }; pub use anchor::{Anchor, AnchorRangeExt}; use anyhow::Result; @@ -52,7 +52,9 @@ pub trait ToPoint: 'static { #[derive(Debug)] struct BufferState { buffer: ModelHandle, - last_sync: clock::Global, + last_version: clock::Global, + last_parse_count: usize, + last_diagnostics_update_count: usize, excerpts: Vec, } @@ -388,6 +390,9 @@ impl MultiBuffer { { self.sync(cx); + let buffer = &props.buffer; + cx.subscribe(buffer, Self::on_buffer_event).detach(); + let buffer = props.buffer.read(cx); let range = buffer.anchor_before(props.range.start)..buffer.anchor_after(props.range.end); let mut snapshot = self.snapshot.borrow_mut(); @@ -411,7 +416,9 @@ impl MultiBuffer { .entry(props.buffer.id()) .or_insert_with(|| BufferState { buffer: props.buffer.clone(), - last_sync: buffer.version(), + last_version: buffer.version(), + last_parse_count: buffer.parse_count(), + last_diagnostics_update_count: buffer.diagnostics_update_count(), excerpts: Default::default(), }) .excerpts @@ -422,6 +429,15 @@ impl MultiBuffer { id } + fn on_buffer_event( + &mut self, + _: ModelHandle, + event: &Event, + cx: &mut ModelContext, + ) { + cx.emit(event.clone()); + } + pub fn save( &mut self, cx: &mut ModelContext, @@ -456,61 +472,65 @@ impl MultiBuffer { let mut snapshot = self.snapshot.borrow_mut(); let mut excerpts_to_edit = Vec::new(); for buffer_state in self.buffers.values() { - if buffer_state - .buffer - .read(cx) - .version() - .gt(&buffer_state.last_sync) + let buffer = buffer_state.buffer.read(cx); + let buffer_changed = buffer.version().gt(&buffer_state.last_version); + if buffer_changed + || buffer.parse_count() > buffer_state.last_parse_count + || buffer.diagnostics_update_count() > buffer_state.last_diagnostics_update_count { excerpts_to_edit.extend( buffer_state .excerpts .iter() - .map(|excerpt_id| (excerpt_id, buffer_state)), + .map(|excerpt_id| (excerpt_id, buffer_state, buffer_changed)), ); } } - excerpts_to_edit.sort_unstable_by_key(|(excerpt_id, _)| *excerpt_id); + excerpts_to_edit.sort_unstable_by_key(|(excerpt_id, _, _)| *excerpt_id); let mut edits = Vec::new(); let mut new_excerpts = SumTree::new(); let mut cursor = snapshot.excerpts.cursor::<(Option<&ExcerptId>, usize)>(); - for (id, buffer_state) in excerpts_to_edit { + for (id, buffer_state, buffer_changed) in excerpts_to_edit { new_excerpts.push_tree(cursor.slice(&Some(id), Bias::Left, &()), &()); let old_excerpt = cursor.item().unwrap(); let buffer = buffer_state.buffer.read(cx); - edits.extend( - buffer - .edits_since_in_range::( - old_excerpt.buffer.version(), - old_excerpt.range.clone(), - ) - .map(|mut edit| { - let excerpt_old_start = - cursor.start().1 + old_excerpt.header_height as usize; - let excerpt_new_start = - new_excerpts.summary().text.bytes + old_excerpt.header_height as usize; - edit.old.start += excerpt_old_start; - edit.old.end += excerpt_old_start; - edit.new.start += excerpt_new_start; - edit.new.end += excerpt_new_start; - edit - }), - ); + let mut new_excerpt; + if buffer_changed { + edits.extend( + buffer + .edits_since_in_range::( + old_excerpt.buffer.version(), + old_excerpt.range.clone(), + ) + .map(|mut edit| { + let excerpt_old_start = + cursor.start().1 + old_excerpt.header_height as usize; + let excerpt_new_start = new_excerpts.summary().text.bytes + + old_excerpt.header_height as usize; + edit.old.start += excerpt_old_start; + edit.old.end += excerpt_old_start; + edit.new.start += excerpt_new_start; + edit.new.end += excerpt_new_start; + edit + }), + ); - new_excerpts.push( - Excerpt::new( + new_excerpt = Excerpt::new( id.clone(), buffer.snapshot(), old_excerpt.range.clone(), old_excerpt.header_height, !self.singleton, - ), - &(), - ); + ); + } else { + new_excerpt = old_excerpt.clone(); + new_excerpt.buffer = buffer.snapshot(); + } + new_excerpts.push(new_excerpt, &()); cursor.next(&()); } new_excerpts.push_tree(cursor.suffix(&()), &()); From 6caf016df9321f12d678483fba40eadbe52ea06f Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 10 Dec 2021 15:54:25 -0800 Subject: [PATCH 048/196] Get tests passing w/ multibuffer in editor Co-Authored-By: Nathan Sobo --- crates/editor/src/editor.rs | 3 ++- crates/language/src/multi_buffer/anchor.rs | 3 +++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index f23f79669266528738070e3e8d88b62b1875a1ed..3467d55771bf1c77aac9b4009a5ab3510a113e11 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -2403,8 +2403,9 @@ impl Editor { let mut to_unfold = Vec::new(); let mut new_selections = Vec::new(); { + let selections = self.selections::(cx); let buffer = self.buffer.read(cx).read(cx); - for selection in self.selections::(cx) { + for selection in selections { for row in selection.start.row..selection.end.row { let cursor = Point::new(row, buffer.line_len(row)); new_selections.push(Selection { diff --git a/crates/language/src/multi_buffer/anchor.rs b/crates/language/src/multi_buffer/anchor.rs index 7bc8c85848900c8dbd3bfa5c8362967c8302437f..2b8b35ef4114eea3cd8620903fb8d562fffa44a4 100644 --- a/crates/language/src/multi_buffer/anchor.rs +++ b/crates/language/src/multi_buffer/anchor.rs @@ -31,6 +31,9 @@ impl Anchor { pub fn cmp<'a>(&self, other: &Anchor, snapshot: &MultiBufferSnapshot) -> Result { let excerpt_id_cmp = self.excerpt_id.cmp(&other.excerpt_id); if excerpt_id_cmp.is_eq() { + if self.excerpt_id == ExcerptId::max() { + return Ok(Ordering::Equal); + } self.text_anchor.cmp( &other.text_anchor, snapshot From c8b43e30784fe51e01390e90d96d46de6a9c9855 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Fri, 10 Dec 2021 17:37:53 -0700 Subject: [PATCH 049/196] Move multi_buffer to editor crate --- Cargo.lock | 1 + crates/editor/Cargo.toml | 3 +++ crates/editor/src/display_map.rs | 6 ++---- crates/editor/src/display_map/block_map.rs | 8 +++----- crates/editor/src/display_map/fold_map.rs | 9 +++------ crates/editor/src/display_map/tab_map.rs | 6 +++--- crates/editor/src/display_map/wrap_map.rs | 6 ++++-- crates/editor/src/editor.rs | 9 ++++++--- crates/editor/src/element.rs | 6 +++--- crates/editor/src/items.rs | 6 ++---- crates/editor/src/movement.rs | 5 ++--- .../{language => editor}/src/multi_buffer.rs | 20 +++++++++---------- .../src/multi_buffer/anchor.rs | 0 .../src/multi_buffer/selection.rs | 0 crates/language/src/language.rs | 2 -- crates/server/src/rpc.rs | 4 ++-- 16 files changed, 44 insertions(+), 47 deletions(-) rename crates/{language => editor}/src/multi_buffer.rs (99%) rename crates/{language => editor}/src/multi_buffer/anchor.rs (100%) rename crates/{language => editor}/src/multi_buffer/selection.rs (100%) diff --git a/Cargo.lock b/Cargo.lock index 3fd01281a504d683f81b531a75467380e1cbe7a1..35d21eb7862a1aed599c4b4821dfac6b75cb42a4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1527,6 +1527,7 @@ dependencies = [ "aho-corasick", "anyhow", "clock", + "collections", "ctor", "env_logger", "gpui", diff --git a/crates/editor/Cargo.toml b/crates/editor/Cargo.toml index f87ec63b3ed2fa02774955b207112c0c79bef92f..24dbcff3da878a303964edc1f1f5a447450f275a 100644 --- a/crates/editor/Cargo.toml +++ b/crates/editor/Cargo.toml @@ -8,6 +8,7 @@ path = "src/editor.rs" [features] test-support = [ + "rand", "text/test-support", "language/test-support", "gpui/test-support", @@ -17,6 +18,7 @@ test-support = [ [dependencies] text = { path = "../text" } clock = { path = "../clock" } +collections = { path = "../collections" } gpui = { path = "../gpui" } language = { path = "../language" } project = { path = "../project" } @@ -30,6 +32,7 @@ lazy_static = "1.4" log = "0.4" parking_lot = "0.11" postage = { version = "0.4", features = ["futures-traits"] } +rand = { version = "0.8.3", optional = true } serde = { version = "1", features = ["derive", "rc"] } smallvec = { version = "1.6", features = ["union"] } smol = "1.2" diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 35d859709cbe3140f2979cac0f7dc7a24e89aeeb..1f420681a109279ff7a867137335011ae4085203 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -3,13 +3,11 @@ mod fold_map; mod tab_map; mod wrap_map; +use crate::{Anchor, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint}; use block_map::{BlockMap, BlockPoint}; use fold_map::{FoldMap, ToFoldPoint as _}; use gpui::{fonts::FontId, ElementBox, Entity, ModelContext, ModelHandle}; -use language::{ - multi_buffer::{Anchor, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint}, - Point, Subscription as BufferSubscription, -}; +use language::{Point, Subscription as BufferSubscription}; use std::{ collections::{HashMap, HashSet}, ops::Range, diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 58ac3d7308348135300257b1bcab6cc21f721b90..cd25e5889924733f06cc667e7613f190006c0fa9 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -1,9 +1,7 @@ use super::wrap_map::{self, WrapEdit, WrapPoint, WrapSnapshot}; +use crate::{Anchor, ToOffset, ToPoint as _}; use gpui::{AppContext, ElementBox}; -use language::{ - multi_buffer::{Anchor, ToOffset, ToPoint as _}, - Chunk, -}; +use language::Chunk; use parking_lot::Mutex; use std::{ cmp::{self, Ordering}, @@ -866,8 +864,8 @@ fn offset_for_row(s: &str, target: u32) -> (u32, usize) { mod tests { use super::*; use crate::display_map::{fold_map::FoldMap, tab_map::TabMap, wrap_map::WrapMap}; + use crate::multi_buffer::MultiBuffer; use gpui::{elements::Empty, Element}; - use language::multi_buffer::MultiBuffer; use rand::prelude::*; use std::env; use text::RandomCharIter; diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index f2dfcb5385270308a9a2b2ef027e606f9fd6d6b3..662c381f617979f06ea52808f94526faf5a9cfeb 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -1,7 +1,5 @@ -use language::{ - multi_buffer::{Anchor, AnchorRangeExt, MultiBufferChunks, MultiBufferSnapshot, ToOffset}, - Chunk, Edit, Point, PointUtf16, TextSummary, -}; +use crate::{Anchor, AnchorRangeExt, MultiBufferChunks, MultiBufferSnapshot, ToOffset}; +use language::{Chunk, Edit, Point, PointUtf16, TextSummary}; use parking_lot::Mutex; use std::{ cmp::{self, Ordering}, @@ -1057,8 +1055,7 @@ pub type FoldEdit = Edit; #[cfg(test)] mod tests { use super::*; - use crate::ToPoint; - use language::multi_buffer::MultiBuffer; + use crate::{MultiBuffer, ToPoint}; use rand::prelude::*; use std::{env, mem}; use text::RandomCharIter; diff --git a/crates/editor/src/display_map/tab_map.rs b/crates/editor/src/display_map/tab_map.rs index 2786c594b6a677f2f8ef60a11d0bfe1511772216..5f62582581f0d303808d44ed13549257ffa4c965 100644 --- a/crates/editor/src/display_map/tab_map.rs +++ b/crates/editor/src/display_map/tab_map.rs @@ -1,5 +1,6 @@ use super::fold_map::{self, FoldEdit, FoldPoint, FoldSnapshot, ToFoldPoint}; -use language::{multi_buffer::MultiBufferSnapshot, rope, Chunk}; +use crate::MultiBufferSnapshot; +use language::{rope, Chunk}; use parking_lot::Mutex; use std::{cmp, mem, ops::Range}; use sum_tree::Bias; @@ -438,8 +439,7 @@ impl<'a> Iterator for TabChunks<'a> { #[cfg(test)] mod tests { use super::*; - use crate::display_map::fold_map::FoldMap; - use language::multi_buffer::MultiBuffer; + use crate::{display_map::fold_map::FoldMap, MultiBuffer}; use rand::{prelude::StdRng, Rng}; use text::{RandomCharIter, Rope}; diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index d5c11f61dc9c0af85f9a7d7389c2300450c0536e..d1efb09ff75a0f1f1bb2ff620c1e6171bb16a12c 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -2,11 +2,12 @@ use super::{ fold_map, tab_map::{self, TabEdit, TabPoint, TabSnapshot}, }; +use crate::{MultiBufferSnapshot, Point}; use gpui::{ fonts::FontId, text_layout::LineWrapper, Entity, ModelContext, ModelHandle, MutableAppContext, Task, }; -use language::{multi_buffer::MultiBufferSnapshot, Chunk, Point}; +use language::Chunk; use lazy_static::lazy_static; use smol::future::yield_now; use std::{collections::VecDeque, mem, ops::Range, time::Duration}; @@ -977,8 +978,9 @@ mod tests { use crate::{ display_map::{fold_map::FoldMap, tab_map::TabMap}, test::Observer, + MultiBuffer, }; - use language::{multi_buffer::MultiBuffer, RandomCharIter}; + use language::RandomCharIter; use rand::prelude::*; use std::{cmp, env}; use text::Rope; diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 3467d55771bf1c77aac9b4009a5ab3510a113e11..ff569562d093f9005188618d70103a4c9fbaee27 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -2,6 +2,7 @@ pub mod display_map; mod element; pub mod items; pub mod movement; +mod multi_buffer; #[cfg(test)] mod test; @@ -21,12 +22,14 @@ use gpui::{ }; use items::BufferItemHandle; use language::{ - multi_buffer::{ - Anchor, AnchorRangeExt, MultiBuffer, MultiBufferSnapshot, SelectionSet, ToOffset, ToPoint, - }, BracketPair, Buffer, Diagnostic, DiagnosticSeverity, Language, Point, Selection, SelectionGoal, SelectionSetId, }; +use multi_buffer::{ + Anchor, AnchorRangeExt, MultiBufferChunks, MultiBufferSnapshot, + SelectionSet, ToOffset, ToPoint, +}; +pub use multi_buffer::MultiBuffer; use serde::{Deserialize, Serialize}; use smallvec::SmallVec; use smol::Timer; diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index a878bc4a17d538ea3f26a66da10dccd53534a38f..89c324a1ecd7dcde0c82614917dc15299a6078ca 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -4,6 +4,7 @@ use super::{ DisplayPoint, Editor, EditorMode, EditorSettings, EditorSnapshot, EditorStyle, Input, Scroll, Select, SelectPhase, SoftWrap, MAX_LINE_LEN, }; +use crate::ToPoint; use clock::ReplicaId; use gpui::{ color::Color, @@ -19,7 +20,7 @@ use gpui::{ MutableAppContext, PaintContext, Quad, Scene, SizeConstraint, ViewContext, WeakViewHandle, }; use json::json; -use language::{multi_buffer::ToPoint, Chunk}; +use language::Chunk; use smallvec::SmallVec; use std::{ cmp::{self, Ordering}, @@ -1162,8 +1163,7 @@ fn scale_horizontal_mouse_autoscroll_delta(delta: f32) -> f32 { #[cfg(test)] mod tests { use super::*; - use crate::{Editor, EditorSettings}; - use language::{MultiBuffer}; + use crate::{Editor, EditorSettings, MultiBuffer}; use util::test::sample_text; #[gpui::test] diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 9a80456255a0ffe1845c49f727cd5f75a81dad15..2ecfac3646db355b0e6cb05ca87d1214a1ac5d64 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -1,14 +1,12 @@ use crate::{Editor, EditorSettings, Event}; +use crate::{MultiBuffer, ToPoint as _}; use anyhow::Result; use gpui::{ elements::*, fonts::TextStyle, AppContext, Entity, ModelContext, ModelHandle, MutableAppContext, RenderContext, Subscription, Task, View, ViewContext, ViewHandle, WeakModelHandle, }; -use language::{ - multi_buffer::{MultiBuffer, ToPoint as _}, - Diagnostic, File as _, -}; +use language::{Diagnostic, File as _}; use postage::watch; use project::{ProjectPath, Worktree}; use std::fmt::Write; diff --git a/crates/editor/src/movement.rs b/crates/editor/src/movement.rs index 44cb1ebcf59fb4bf44a6c60ebe06a44f0d0970c4..6c7525cb00ef97288188f80b846105ffc3117ef7 100644 --- a/crates/editor/src/movement.rs +++ b/crates/editor/src/movement.rs @@ -1,6 +1,6 @@ use super::{Bias, DisplayPoint, DisplaySnapshot, SelectionGoal, ToDisplayPoint}; +use crate::ToPoint; use anyhow::Result; -use language::multi_buffer::ToPoint; use std::{cmp, ops::Range}; pub fn left(map: &DisplaySnapshot, mut point: DisplayPoint) -> Result { @@ -244,8 +244,7 @@ fn char_kind(c: char) -> CharKind { #[cfg(test)] mod tests { use super::*; - use crate::display_map::DisplayMap; - use language::MultiBuffer; + use crate::{DisplayMap, MultiBuffer}; #[gpui::test] fn test_prev_next_word_boundary_multibyte(cx: &mut gpui::MutableAppContext) { diff --git a/crates/language/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs similarity index 99% rename from crates/language/src/multi_buffer.rs rename to crates/editor/src/multi_buffer.rs index e54cb009f965030b90b755660596edd8c708a92b..82b7f0aafb8b972d9a7a029352df03bffc9e5c39 100644 --- a/crates/language/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -1,15 +1,15 @@ mod anchor; mod selection; -use crate::{ - buffer::{self, Buffer, Chunk, ToOffset as _, ToPoint as _}, - BufferSnapshot, DiagnosticEntry, Event, File, Language, -}; pub use anchor::{Anchor, AnchorRangeExt}; use anyhow::Result; use clock::ReplicaId; use collections::HashMap; use gpui::{AppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task}; +use language::{ + Buffer, BufferChunks, BufferSnapshot, Chunk, DiagnosticEntry, Event, File, Language, + ToOffset as _, ToPoint as _, +}; pub use selection::SelectionSet; use std::{ cell::{Ref, RefCell}, @@ -72,7 +72,7 @@ pub struct ExcerptProperties<'a, T> { #[derive(Clone)] struct Excerpt { id: ExcerptId, - buffer: buffer::BufferSnapshot, + buffer: BufferSnapshot, range: Range, text_summary: TextSummary, header_height: u8, @@ -90,7 +90,7 @@ pub struct MultiBufferChunks<'a> { cursor: Cursor<'a, Excerpt, usize>, header_height: u8, has_trailing_newline: bool, - excerpt_chunks: Option>, + excerpt_chunks: Option>, theme: Option<&'a SyntaxTheme>, } @@ -394,7 +394,7 @@ impl MultiBuffer { cx.subscribe(buffer, Self::on_buffer_event).detach(); let buffer = props.buffer.read(cx); - let range = buffer.anchor_before(props.range.start)..buffer.anchor_after(props.range.end); + let range = buffer.anchor_before(&props.range.start)..buffer.anchor_after(&props.range.end); let mut snapshot = self.snapshot.borrow_mut(); let prev_id = snapshot.excerpts.last().map(|e| &e.id); let id = ExcerptId::between(prev_id.unwrap_or(&ExcerptId::min()), &ExcerptId::max()); @@ -565,7 +565,7 @@ impl MultiBuffer { } impl Entity for MultiBuffer { - type Event = super::Event; + type Event = language::Event; } impl MultiBufferSnapshot { @@ -1099,7 +1099,7 @@ impl MultiBufferSnapshot { impl Excerpt { fn new( id: ExcerptId, - buffer: buffer::BufferSnapshot, + buffer: BufferSnapshot, range: Range, header_height: u8, has_trailing_newline: bool, @@ -1357,8 +1357,8 @@ impl ToPoint for Point { #[cfg(test)] mod tests { use super::*; - use crate::buffer::Buffer; use gpui::MutableAppContext; + use language::Buffer; use rand::prelude::*; use std::env; use text::{Point, RandomCharIter}; diff --git a/crates/language/src/multi_buffer/anchor.rs b/crates/editor/src/multi_buffer/anchor.rs similarity index 100% rename from crates/language/src/multi_buffer/anchor.rs rename to crates/editor/src/multi_buffer/anchor.rs diff --git a/crates/language/src/multi_buffer/selection.rs b/crates/editor/src/multi_buffer/selection.rs similarity index 100% rename from crates/language/src/multi_buffer/selection.rs rename to crates/editor/src/multi_buffer/selection.rs diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 001317ea126d0f4460d65ae9314e950d27831e78..99161d1f5c778464e8c5d6f98367801a8381ccf1 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -1,7 +1,6 @@ mod buffer; mod diagnostic_set; mod highlight_map; -pub mod multi_buffer; pub mod proto; #[cfg(test)] mod tests; @@ -14,7 +13,6 @@ use gpui::{executor::Background, AppContext}; use highlight_map::HighlightMap; use lazy_static::lazy_static; use lsp::LanguageServer; -pub use multi_buffer::MultiBuffer; use parking_lot::Mutex; use serde::Deserialize; use std::{collections::HashSet, path::Path, str, sync::Arc}; diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index 0c8dd4b45826fa0ef69047c299f4dededb9843f7..54f2432b21e16609afd52173b8d8ce07700590e3 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -944,11 +944,11 @@ mod tests { EstablishConnectionError, UserStore, }, contacts_panel::JoinWorktree, - editor::{Editor, EditorSettings, Input}, + editor::{Editor, EditorSettings, Input, MultiBuffer}, fs::{FakeFs, Fs as _}, language::{ tree_sitter_rust, Diagnostic, DiagnosticEntry, Language, LanguageConfig, - LanguageRegistry, LanguageServerConfig, MultiBuffer, Point, + LanguageRegistry, LanguageServerConfig, Point, }, lsp, project::{ProjectPath, Worktree}, From 77defe6e2845ead50091a73c87514ebfdd2098b1 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Fri, 10 Dec 2021 18:00:09 -0700 Subject: [PATCH 050/196] Return optional transaction ids when starting/ending a transaction If the transaction was nested, we return None. Otherwise we return the transaction id in preparation for editors to maintain their own selection state. --- crates/editor/src/editor.rs | 11 ++++----- crates/editor/src/multi_buffer.rs | 6 ++--- crates/language/src/buffer.rs | 38 ++++++++++++++++--------------- crates/language/src/tests.rs | 16 ++++++------- crates/text/src/tests.rs | 10 ++++---- crates/text/src/text.rs | 34 +++++++++++++++++++-------- 6 files changed, 64 insertions(+), 51 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index ff569562d093f9005188618d70103a4c9fbaee27..ac112a25c88fa0531f2af320fb9da831265bd2c8 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -25,11 +25,10 @@ use language::{ BracketPair, Buffer, Diagnostic, DiagnosticSeverity, Language, Point, Selection, SelectionGoal, SelectionSetId, }; +pub use multi_buffer::MultiBuffer; use multi_buffer::{ - Anchor, AnchorRangeExt, MultiBufferChunks, MultiBufferSnapshot, - SelectionSet, ToOffset, ToPoint, + Anchor, AnchorRangeExt, MultiBufferChunks, MultiBufferSnapshot, SelectionSet, ToOffset, ToPoint, }; -pub use multi_buffer::MultiBuffer; use serde::{Deserialize, Serialize}; use smallvec::SmallVec; use smol::Timer; @@ -3209,15 +3208,13 @@ impl Editor { fn start_transaction(&mut self, cx: &mut ViewContext) { self.end_selection(cx); self.buffer.update(cx, |buffer, cx| { - buffer - .start_transaction([self.selection_set_id], cx) - .unwrap() + buffer.start_transaction([self.selection_set_id], cx); }); } fn end_transaction(&self, cx: &mut ViewContext) { self.buffer.update(cx, |buffer, cx| { - buffer.end_transaction([self.selection_set_id], cx).unwrap() + buffer.end_transaction([self.selection_set_id], cx); }); } diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 82b7f0aafb8b972d9a7a029352df03bffc9e5c39..cc1425f1cf7d4aa7bdc5f3a6ffa48ddb78fb4e5a 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -8,7 +8,7 @@ use collections::HashMap; use gpui::{AppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task}; use language::{ Buffer, BufferChunks, BufferSnapshot, Chunk, DiagnosticEntry, Event, File, Language, - ToOffset as _, ToPoint as _, + ToOffset as _, ToPoint as _, TransactionId, }; pub use selection::SelectionSet; use std::{ @@ -206,7 +206,7 @@ impl MultiBuffer { &mut self, selection_set_ids: impl IntoIterator, cx: &mut ModelContext, - ) -> Result<()> { + ) -> Option { // TODO self.as_singleton() .unwrap() @@ -217,7 +217,7 @@ impl MultiBuffer { &mut self, selection_set_ids: impl IntoIterator, cx: &mut ModelContext, - ) -> Result<()> { + ) -> Option { // TODO self.as_singleton().unwrap().update(cx, |buffer, cx| { buffer.end_transaction(selection_set_ids, cx) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 8a483d37066a9decc7c3e1c68069bff25cbf64bf..ddeb5328cbb430a7f28e9623e60dfa85ba0f674d 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -977,8 +977,7 @@ impl Buffer { .flat_map(|req| req.selection_set_ids.clone()) .collect::>(); - self.start_transaction(selection_set_ids.iter().copied()) - .unwrap(); + self.start_transaction(selection_set_ids.iter().copied()); for (row, indent_column) in &indent_columns { self.set_indent_column_for_line(*row, *indent_column, cx); } @@ -1014,8 +1013,7 @@ impl Buffer { } } - self.end_transaction(selection_set_ids.iter().copied(), cx) - .unwrap(); + self.end_transaction(selection_set_ids.iter().copied(), cx); } fn set_indent_column_for_line(&mut self, row: u32, column: u32, cx: &mut ModelContext) { @@ -1055,7 +1053,7 @@ impl Buffer { pub(crate) fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext) -> bool { if self.version == diff.base_version { - self.start_transaction(None).unwrap(); + self.start_transaction(None); let mut offset = 0; for (tag, len) in diff.changes { let range = offset..(offset + len); @@ -1068,7 +1066,7 @@ impl Buffer { } } } - self.end_transaction(None, cx).unwrap(); + self.end_transaction(None, cx); true } else { false @@ -1095,7 +1093,7 @@ impl Buffer { pub fn start_transaction( &mut self, selection_set_ids: impl IntoIterator, - ) -> Result<()> { + ) -> Option { self.start_transaction_at(selection_set_ids, Instant::now()) } @@ -1103,7 +1101,7 @@ impl Buffer { &mut self, selection_set_ids: impl IntoIterator, now: Instant, - ) -> Result<()> { + ) -> Option { self.text.start_transaction_at(selection_set_ids, now) } @@ -1111,7 +1109,7 @@ impl Buffer { &mut self, selection_set_ids: impl IntoIterator, cx: &mut ModelContext, - ) -> Result<()> { + ) -> Option { self.end_transaction_at(selection_set_ids, Instant::now(), cx) } @@ -1120,12 +1118,16 @@ impl Buffer { selection_set_ids: impl IntoIterator, now: Instant, cx: &mut ModelContext, - ) -> Result<()> { - if let Some(start_version) = self.text.end_transaction_at(selection_set_ids, now) { + ) -> Option { + if let Some((transaction_id, start_version)) = + self.text.end_transaction_at(selection_set_ids, now) + { let was_dirty = start_version != self.saved_version; self.did_edit(&start_version, was_dirty, cx); + Some(transaction_id) + } else { + None } - Ok(()) } fn update_language_server(&mut self) { @@ -1210,7 +1212,7 @@ impl Buffer { return; } - self.start_transaction(None).unwrap(); + self.start_transaction(None); self.pending_autoindent.take(); let autoindent_request = if autoindent && self.language.is_some() { let before_edit = self.snapshot(); @@ -1268,7 +1270,7 @@ impl Buffer { })); } - self.end_transaction(None, cx).unwrap(); + self.end_transaction(None, cx); self.send_operation(Operation::Buffer(text::Operation::Edit(edit)), cx); } @@ -1474,18 +1476,18 @@ impl Buffer { ) where T: rand::Rng, { - self.start_transaction(None).unwrap(); + self.start_transaction(None); self.text.randomly_edit(rng, old_range_count); - self.end_transaction(None, cx).unwrap(); + self.end_transaction(None, cx); } pub fn randomly_mutate(&mut self, rng: &mut T, cx: &mut ModelContext) where T: rand::Rng, { - self.start_transaction(None).unwrap(); + self.start_transaction(None); self.text.randomly_mutate(rng); - self.end_transaction(None, cx).unwrap(); + self.end_transaction(None, cx); } } diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index cb99575a34aa32f9257ae27f3294b7634aeb2f4d..686f088711fb59e93081285f9343907f7e489c1e 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -92,15 +92,15 @@ fn test_edit_events(cx: &mut gpui::MutableAppContext) { buffer.edit(Some(2..4), "XYZ", cx); // An empty transaction does not emit any events. - buffer.start_transaction(None).unwrap(); - buffer.end_transaction(None, cx).unwrap(); + buffer.start_transaction(None); + buffer.end_transaction(None, cx); // A transaction containing two edits emits one edited event. now += Duration::from_secs(1); - buffer.start_transaction_at(None, now).unwrap(); + buffer.start_transaction_at(None, now); buffer.edit(Some(5..5), "u", cx); buffer.edit(Some(6..6), "w", cx); - buffer.end_transaction_at(None, now, cx).unwrap(); + buffer.end_transaction_at(None, now, cx); // Undoing a transaction emits one edited event. buffer.undo(cx); @@ -167,7 +167,7 @@ async fn test_reparse(mut cx: gpui::TestAppContext) { // Perform some edits (add parameter and variable reference) // Parsing doesn't begin until the transaction is complete buffer.update(&mut cx, |buf, cx| { - buf.start_transaction(None).unwrap(); + buf.start_transaction(None); let offset = buf.text().find(")").unwrap(); buf.edit(vec![offset..offset], "b: C", cx); @@ -177,7 +177,7 @@ async fn test_reparse(mut cx: gpui::TestAppContext) { buf.edit(vec![offset..offset], " d; ", cx); assert!(!buf.is_parsing()); - buf.end_transaction(None, cx).unwrap(); + buf.end_transaction(None, cx); assert_eq!(buf.text(), "fn a(b: C) { d; }"); assert!(buf.is_parsing()); }); @@ -342,7 +342,7 @@ fn test_autoindent_moves_selections(cx: &mut MutableAppContext) { Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx); let selection_set_id = buffer.add_selection_set::(&[], cx); - buffer.start_transaction(Some(selection_set_id)).unwrap(); + buffer.start_transaction(Some(selection_set_id)); buffer.edit_with_autoindent([5..5, 9..9], "\n\n", cx); buffer .update_selection_set( @@ -370,7 +370,7 @@ fn test_autoindent_moves_selections(cx: &mut MutableAppContext) { // Ending the transaction runs the auto-indent. The selection // at the start of the auto-indented row is pushed to the right. - buffer.end_transaction(Some(selection_set_id), cx).unwrap(); + buffer.end_transaction(Some(selection_set_id), cx); assert_eq!(buffer.text(), "fn a(\n \n) {}\n\n"); let selection_ranges = buffer .selection_set(selection_set_id) diff --git a/crates/text/src/tests.rs b/crates/text/src/tests.rs index 8b2c8dbfd9aa788691a9a5cf8981caa13a5ce0d5..a470e8431e8464739dd4e69ac5f9513fc2b8f318 100644 --- a/crates/text/src/tests.rs +++ b/crates/text/src/tests.rs @@ -467,13 +467,13 @@ fn test_history() { } else { unreachable!() }; - buffer.start_transaction_at(Some(set_id), now).unwrap(); + buffer.start_transaction_at(Some(set_id), now); buffer.edit(vec![2..4], "cd"); - buffer.end_transaction_at(Some(set_id), now).unwrap(); + buffer.end_transaction_at(Some(set_id), now); assert_eq!(buffer.text(), "12cd56"); assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![4..4]); - buffer.start_transaction_at(Some(set_id), now).unwrap(); + buffer.start_transaction_at(Some(set_id), now); buffer .update_selection_set(set_id, &buffer.selections_from_ranges(vec![1..3]).unwrap()) .unwrap(); @@ -483,7 +483,7 @@ fn test_history() { assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]); now += buffer.history.group_interval + Duration::from_millis(1); - buffer.start_transaction_at(Some(set_id), now).unwrap(); + buffer.start_transaction_at(Some(set_id), now); buffer .update_selection_set(set_id, &buffer.selections_from_ranges(vec![2..2]).unwrap()) .unwrap(); @@ -515,7 +515,7 @@ fn test_history() { assert_eq!(buffer.text(), "ab2cde6"); assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![3..3]); - buffer.start_transaction_at(None, now).unwrap(); + buffer.start_transaction_at(None, now); assert!(buffer.end_transaction_at(None, now).is_none()); buffer.undo(); assert_eq!(buffer.text(), "12cde6"); diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index cb271e6937516b35139d5285ec5b94ffbd6d6c0a..a8eb32888f39c0abf1f185613d7f30dd88bbd6f7 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -38,6 +38,8 @@ pub use subscription::*; pub use sum_tree::Bias; use sum_tree::{FilterCursor, SumTree}; +pub type TransactionId = usize; + pub struct Buffer { snapshot: BufferSnapshot, last_edit: clock::Local, @@ -64,6 +66,7 @@ pub struct BufferSnapshot { #[derive(Clone, Debug)] pub struct Transaction { + id: TransactionId, start: clock::Global, end: clock::Global, edits: Vec, @@ -135,6 +138,7 @@ pub struct History { redo_stack: Vec, transaction_depth: usize, group_interval: Duration, + next_transaction_id: TransactionId, } impl History { @@ -146,6 +150,7 @@ impl History { redo_stack: Vec::new(), transaction_depth: 0, group_interval: Duration::from_millis(300), + next_transaction_id: 0, } } @@ -158,10 +163,13 @@ impl History { start: clock::Global, selections_before: HashMap]>>, now: Instant, - ) { + ) -> Option { self.transaction_depth += 1; if self.transaction_depth == 1 { + let id = self.next_transaction_id; + self.next_transaction_id += 1; self.undo_stack.push(Transaction { + id, start: start.clone(), end: start, edits: Vec::new(), @@ -171,6 +179,9 @@ impl History { first_edit_at: now, last_edit_at: now, }); + Some(id) + } else { + None } } @@ -547,7 +558,7 @@ impl Buffer { None }; - self.start_transaction(None).unwrap(); + self.start_transaction(None); let timestamp = InsertionTimestamp { replica_id: self.replica_id, local: self.local_clock.tick().value, @@ -1141,7 +1152,7 @@ impl Buffer { pub fn start_transaction( &mut self, selection_set_ids: impl IntoIterator, - ) -> Result<()> { + ) -> Option { self.start_transaction_at(selection_set_ids, Instant::now()) } @@ -1149,7 +1160,7 @@ impl Buffer { &mut self, selection_set_ids: impl IntoIterator, now: Instant, - ) -> Result<()> { + ) -> Option { let selections = selection_set_ids .into_iter() .map(|set_id| { @@ -1161,19 +1172,21 @@ impl Buffer { }) .collect(); self.history - .start_transaction(self.version.clone(), selections, now); - Ok(()) + .start_transaction(self.version.clone(), selections, now) } - pub fn end_transaction(&mut self, selection_set_ids: impl IntoIterator) { - self.end_transaction_at(selection_set_ids, Instant::now()); + pub fn end_transaction( + &mut self, + selection_set_ids: impl IntoIterator, + ) -> Option<(TransactionId, clock::Global)> { + self.end_transaction_at(selection_set_ids, Instant::now()) } pub fn end_transaction_at( &mut self, selection_set_ids: impl IntoIterator, now: Instant, - ) -> Option { + ) -> Option<(TransactionId, clock::Global)> { let selections = selection_set_ids .into_iter() .map(|set_id| { @@ -1186,9 +1199,10 @@ impl Buffer { .collect(); if let Some(transaction) = self.history.end_transaction(selections, now) { + let id = transaction.id; let since = transaction.start.clone(); self.history.group(); - Some(since) + Some((id, since)) } else { None } From f35c419f432ea85f657b5ffaf5d536e04ddc029c Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Fri, 10 Dec 2021 18:08:26 -0700 Subject: [PATCH 051/196] Return optional transaction ids from undo/redo This will allow the editor to restore selections that it associated with the start or end of a transaction. --- crates/editor/src/multi_buffer.rs | 4 ++-- crates/language/src/buffer.rs | 28 ++++++++++++++++++---------- crates/text/src/text.rs | 18 ++++++++++++------ 3 files changed, 32 insertions(+), 18 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index cc1425f1cf7d4aa7bdc5f3a6ffa48ddb78fb4e5a..8163a7bd23554aae8a35843d374820c71130411f 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -224,14 +224,14 @@ impl MultiBuffer { }) } - pub fn undo(&mut self, cx: &mut ModelContext) { + pub fn undo(&mut self, cx: &mut ModelContext) -> Option { // TODO self.as_singleton() .unwrap() .update(cx, |buffer, cx| buffer.undo(cx)) } - pub fn redo(&mut self, cx: &mut ModelContext) { + pub fn redo(&mut self, cx: &mut ModelContext) -> Option { // TODO self.as_singleton() .unwrap() diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index ddeb5328cbb430a7f28e9623e60dfa85ba0f674d..1830e49bf76d7a96a1822a748bcf15e939a7d8cf 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -1443,26 +1443,34 @@ impl Buffer { cx.notify(); } - pub fn undo(&mut self, cx: &mut ModelContext) { + pub fn undo(&mut self, cx: &mut ModelContext) -> Option { let was_dirty = self.is_dirty(); let old_version = self.version.clone(); - for operation in self.text.undo() { - self.send_operation(Operation::Buffer(operation), cx); + if let Some((transaction_id, operations)) = self.text.undo() { + for operation in operations { + self.send_operation(Operation::Buffer(operation), cx); + } + self.did_edit(&old_version, was_dirty, cx); + Some(transaction_id) + } else { + None } - - self.did_edit(&old_version, was_dirty, cx); } - pub fn redo(&mut self, cx: &mut ModelContext) { + pub fn redo(&mut self, cx: &mut ModelContext) -> Option { let was_dirty = self.is_dirty(); let old_version = self.version.clone(); - for operation in self.text.redo() { - self.send_operation(Operation::Buffer(operation), cx); + if let Some((transaction_id, operations)) = self.text.redo() { + for operation in operations { + self.send_operation(Operation::Buffer(operation), cx); + } + self.did_edit(&old_version, was_dirty, cx); + Some(transaction_id) + } else { + None } - - self.did_edit(&old_version, was_dirty, cx); } } diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index a8eb32888f39c0abf1f185613d7f30dd88bbd6f7..525cbc43662a03af9eadec7b5ea868a6d3c8a1e4 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -1221,28 +1221,34 @@ impl Buffer { self.history.ops.values() } - pub fn undo(&mut self) -> Vec { - let mut ops = Vec::new(); + pub fn undo(&mut self) -> Option<(TransactionId, Vec)> { if let Some(transaction) = self.history.pop_undo().cloned() { + let transaction_id = transaction.id; let selections = transaction.selections_before.clone(); + let mut ops = Vec::new(); ops.push(self.undo_or_redo(transaction).unwrap()); for (set_id, selections) in selections { ops.extend(self.restore_selection_set(set_id, selections)); } + Some((transaction_id, ops)) + } else { + None } - ops } - pub fn redo(&mut self) -> Vec { - let mut ops = Vec::new(); + pub fn redo(&mut self) -> Option<(TransactionId, Vec)> { if let Some(transaction) = self.history.pop_redo().cloned() { + let transaction_id = transaction.id; let selections = transaction.selections_after.clone(); + let mut ops = Vec::new(); ops.push(self.undo_or_redo(transaction).unwrap()); for (set_id, selections) in selections { ops.extend(self.restore_selection_set(set_id, selections)); } + Some((transaction_id, ops)) + } else { + None } - ops } fn undo_or_redo(&mut self, transaction: Transaction) -> Result { From 8432daef6a02a0c2d8f33dd33ca6c8b7a3d1e270 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Fri, 10 Dec 2021 19:23:34 -0700 Subject: [PATCH 052/196] WIP: Start on removing selections from buffer in favor of editor --- crates/editor/src/editor.rs | 317 ++++++++++++++--------------- crates/editor/src/element.rs | 51 ++--- crates/editor/src/items.rs | 4 +- crates/editor/src/multi_buffer.rs | 187 ++--------------- crates/language/src/buffer.rs | 169 ++------------- crates/language/src/tests.rs | 121 +++++------ crates/project/src/worktree.rs | 59 +++--- crates/server/src/rpc.rs | 20 +- crates/text/src/tests.rs | 58 ++---- crates/text/src/text.rs | 327 ++---------------------------- 10 files changed, 352 insertions(+), 961 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index ac112a25c88fa0531f2af320fb9da831265bd2c8..c82a5d6d2ffeacbc9f2c39280f2d15f674b74ead 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -23,11 +23,11 @@ use gpui::{ use items::BufferItemHandle; use language::{ BracketPair, Buffer, Diagnostic, DiagnosticSeverity, Language, Point, Selection, SelectionGoal, - SelectionSetId, + SelectionSetId, TransactionId, }; pub use multi_buffer::MultiBuffer; use multi_buffer::{ - Anchor, AnchorRangeExt, MultiBufferChunks, MultiBufferSnapshot, SelectionSet, ToOffset, ToPoint, + Anchor, AnchorRangeExt, MultiBufferChunks, MultiBufferSnapshot, ToOffset, ToPoint, }; use serde::{Deserialize, Serialize}; use smallvec::SmallVec; @@ -36,7 +36,8 @@ use std::{ cell::RefCell, cmp, collections::HashMap, - iter, mem, + iter::{self, FromIterator}, + mem, ops::{Deref, Range, RangeInclusive, Sub}, rc::Rc, sync::Arc, @@ -359,12 +360,14 @@ pub struct Editor { handle: WeakViewHandle, buffer: ModelHandle, display_map: ModelHandle, - selection_set_id: SelectionSetId, + next_selection_id: usize, + selections: Arc<[Selection]>, pending_selection: Option, columnar_selection_tail: Option, - next_selection_id: usize, add_selections_state: Option, select_next_state: Option, + selection_history: + HashMap]>, Option]>>)>, autoclose_stack: Vec, select_larger_syntax_node_stack: Vec]>>, active_diagnostics: Option, @@ -487,28 +490,27 @@ impl Editor { .detach(); let mut next_selection_id = 0; - let selection_set_id = buffer.update(cx, |buffer, cx| { - buffer.add_selection_set( - &[Selection { - id: post_inc(&mut next_selection_id), - start: 0, - end: 0, - reversed: false, - goal: SelectionGoal::None, - }], - cx, - ) - }); + let selections = Arc::from( + &[Selection { + id: post_inc(&mut next_selection_id), + start: Anchor::min(), + end: Anchor::min(), + reversed: false, + goal: SelectionGoal::None, + }][..], + ); + Self { handle: cx.weak_handle(), buffer, display_map, - selection_set_id, + selections, pending_selection: None, columnar_selection_tail: None, next_selection_id, add_selections_state: None, select_next_state: None, + selection_history: Default::default(), autoclose_stack: Default::default(), select_larger_syntax_node_stack: Vec::new(), active_diagnostics: None, @@ -636,7 +638,7 @@ impl Editor { let first_cursor_top; let last_cursor_bottom; if autoscroll == Autoscroll::Newest { - let newest_selection = self.newest_selection::(cx); + let newest_selection = self.newest_selection::(&display_map.buffer_snapshot, cx); first_cursor_top = newest_selection.head().to_display_point(&display_map).row() as f32; last_cursor_bottom = first_cursor_top + 1.; } else { @@ -769,7 +771,9 @@ impl Editor { cx: &mut ViewContext, ) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let tail = self.newest_selection::(cx).tail(); + let tail = self + .newest_selection::(&display_map.buffer_snapshot, cx) + .tail(); self.begin_selection(position, false, click_count, cx); let position = position.to_offset(&display_map, Bias::Left); @@ -851,7 +855,7 @@ impl Editor { self.update_selections::(Vec::new(), None, cx); } else if click_count > 1 { // Remove the newest selection since it was only added as part of this multi-click. - let newest_selection = self.newest_selection::(cx); + let newest_selection = self.newest_selection::(buffer, cx); let mut selections = self.selections(cx); selections.retain(|selection| selection.id != newest_selection.id); self.update_selections::(selections, None, cx) @@ -874,7 +878,9 @@ impl Editor { } let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let tail = self.newest_selection::(cx).tail(); + let tail = self + .newest_selection::(&display_map.buffer_snapshot, cx) + .tail(); self.columnar_selection_tail = Some(display_map.buffer_snapshot.anchor_before(tail)); self.select_columns( @@ -2812,7 +2818,7 @@ impl Editor { pub fn show_next_diagnostic(&mut self, _: &ShowNextDiagnostic, cx: &mut ViewContext) { let buffer = self.buffer.read(cx).snapshot(cx); - let selection = self.newest_selection::(cx); + let selection = self.newest_selection::(&buffer, cx); let active_primary_range = self.active_diagnostics.as_ref().map(|active_diagnostics| { active_diagnostics .primary_range @@ -2992,120 +2998,119 @@ impl Editor { } } - pub fn active_selection_sets<'a>( - &'a self, - cx: &'a AppContext, - ) -> impl 'a + Iterator { - let buffer = self.buffer.read(cx); - let replica_id = buffer.replica_id(); - buffer - .selection_sets(cx) - .filter(move |(set_id, set)| { - set.active && (set_id.replica_id != replica_id || **set_id == self.selection_set_id) - }) - .map(|(set_id, _)| *set_id) - } - pub fn intersecting_selections<'a>( &'a self, set_id: SelectionSetId, range: Range, cx: &'a mut MutableAppContext, ) -> Vec> { - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let buffer = self.buffer.read(cx); - - let pending_selection = if set_id == self.selection_set_id { - self.pending_selection.as_ref().and_then(|pending| { - let selection_start = pending.selection.start.to_display_point(&display_map); - let selection_end = pending.selection.end.to_display_point(&display_map); - if selection_start <= range.end || selection_end <= range.end { - Some(Selection { - id: pending.selection.id, - start: selection_start, - end: selection_end, - reversed: pending.selection.reversed, - goal: pending.selection.goal, - }) - } else { - None - } - }) - } else { - None - }; - - let range = (range.start.to_offset(&display_map, Bias::Left), Bias::Left) - ..(range.end.to_offset(&display_map, Bias::Left), Bias::Right); - buffer - .selection_set(set_id, cx) - .unwrap() - .intersecting_selections::(range, &buffer.read(cx)) - .map(move |s| Selection { - id: s.id, - start: s.start.to_display_point(&display_map), - end: s.end.to_display_point(&display_map), - reversed: s.reversed, - goal: s.goal, - }) - .chain(pending_selection) - .collect() + todo!() + // let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + // let buffer = self.buffer.read(cx); + + // let pending_selection = if set_id == self.selection_set_id { + // self.pending_selection.as_ref().and_then(|pending| { + // let selection_start = pending.selection.start.to_display_point(&display_map); + // let selection_end = pending.selection.end.to_display_point(&display_map); + // if selection_start <= range.end || selection_end <= range.end { + // Some(Selection { + // id: pending.selection.id, + // start: selection_start, + // end: selection_end, + // reversed: pending.selection.reversed, + // goal: pending.selection.goal, + // }) + // } else { + // None + // } + // }) + // } else { + // None + // }; + + // let range = (range.start.to_offset(&display_map, Bias::Left), Bias::Left) + // ..(range.end.to_offset(&display_map, Bias::Left), Bias::Right); + // buffer + // .selection_set(set_id, cx) + // .unwrap() + // .intersecting_selections::(range, &buffer.read(cx)) + // .map(move |s| Selection { + // id: s.id, + // start: s.start.to_display_point(&display_map), + // end: s.end.to_display_point(&display_map), + // reversed: s.reversed, + // goal: s.goal, + // }) + // .chain(pending_selection) + // .collect() } pub fn selections<'a, D>(&self, cx: &'a AppContext) -> Vec> where D: 'a + TextDimension + Ord + Sub, { - let buffer = self.buffer.read(cx).snapshot(cx); - let mut selections = self.selection_set(cx).selections::(&buffer).peekable(); - let mut pending_selection = self.pending_selection(cx); - - iter::from_fn(move || { - if let Some(pending) = pending_selection.as_mut() { - while let Some(next_selection) = selections.peek() { - if pending.start <= next_selection.end && pending.end >= next_selection.start { - let next_selection = selections.next().unwrap(); - if next_selection.start < pending.start { - pending.start = next_selection.start; - } - if next_selection.end > pending.end { - pending.end = next_selection.end; - } - } else if next_selection.end < pending.start { - return selections.next(); - } else { - break; - } - } - - pending_selection.take() - } else { - selections.next() - } - }) - .collect() + // let buffer = self.buffer.read(cx).snapshot(cx); + // let mut selections = self.selection_set(cx).selections::(&buffer).peekable(); + // let mut pending_selection = self.pending_selection(cx); + + // iter::from_fn(move || { + // if let Some(pending) = pending_selection.as_mut() { + // while let Some(next_selection) = selections.peek() { + // if pending.start <= next_selection.end && pending.end >= next_selection.start { + // let next_selection = selections.next().unwrap(); + // if next_selection.start < pending.start { + // pending.start = next_selection.start; + // } + // if next_selection.end > pending.end { + // pending.end = next_selection.end; + // } + // } else if next_selection.end < pending.start { + // return selections.next(); + // } else { + // break; + // } + // } + + // pending_selection.take() + // } else { + // selections.next() + // } + // }) + // .collect() + todo!() } fn pending_selection>( &self, + snapshot: &MultiBufferSnapshot, cx: &AppContext, ) -> Option> { - let buffer = self.buffer.read(cx).read(cx); - self.pending_selection.as_ref().map(|pending| Selection { - id: pending.selection.id, - start: pending.selection.start.summary::(&buffer), - end: pending.selection.end.summary::(&buffer), - reversed: pending.selection.reversed, - goal: pending.selection.goal, - }) + self.pending_selection + .as_ref() + .map(|pending| self.resolve_selection(&pending.selection, &snapshot, cx)) + } + + fn resolve_selection>( + &self, + selection: &Selection, + buffer: &MultiBufferSnapshot, + cx: &AppContext, + ) -> Selection { + Selection { + id: selection.id, + start: selection.start.summary::(&buffer), + end: selection.end.summary::(&buffer), + reversed: selection.reversed, + goal: selection.goal, + } } fn selection_count<'a>(&self, cx: &'a AppContext) -> usize { - let mut selection_count = self.selection_set(cx).len(); + let mut count = self.selections.len(); if self.pending_selection.is_some() { - selection_count += 1; + count += 1; } - selection_count + count } pub fn oldest_selection>( @@ -3113,31 +3118,29 @@ impl Editor { snapshot: &MultiBufferSnapshot, cx: &AppContext, ) -> Selection { - self.selection_set(cx) - .oldest_selection(snapshot) - .or_else(|| self.pending_selection(cx)) + self.selections + .iter() + .min_by_key(|s| s.id) + .map(|selection| self.resolve_selection(selection, snapshot, cx)) + .or_else(|| self.pending_selection(snapshot, cx)) .unwrap() } pub fn newest_selection>( &self, + snapshot: &MultiBufferSnapshot, cx: &AppContext, ) -> Selection { - self.pending_selection(cx) + self.pending_selection(snapshot, cx) .or_else(|| { - self.selection_set(cx) - .newest_selection(&self.buffer.read(cx).read(cx)) + self.selections + .iter() + .min_by_key(|s| s.id) + .map(|selection| self.resolve_selection(selection, snapshot, cx)) }) .unwrap() } - fn selection_set<'a>(&self, cx: &'a AppContext) -> &'a SelectionSet { - self.buffer - .read(cx) - .selection_set(self.selection_set_id, cx) - .unwrap() - } - pub fn update_selections( &mut self, mut selections: Vec>, @@ -3193,11 +3196,13 @@ impl Editor { } self.pause_cursor_blinking(cx); - self.buffer.update(cx, |buffer, cx| { - buffer - .update_selection_set(self.selection_set_id, &selections, cx) - .unwrap(); - }); + self.selections = Arc::from_iter(selections.into_iter().map(|selection| Selection { + id: selection.id, + start: buffer.anchor_before(selection.start), + end: buffer.anchor_before(selection.end), + reversed: selection.reversed, + goal: selection.goal, + })); } fn request_autoscroll(&mut self, autoscroll: Autoscroll, cx: &mut ViewContext) { @@ -3208,13 +3213,13 @@ impl Editor { fn start_transaction(&mut self, cx: &mut ViewContext) { self.end_selection(cx); self.buffer.update(cx, |buffer, cx| { - buffer.start_transaction([self.selection_set_id], cx); + buffer.start_transaction(cx); }); } fn end_transaction(&self, cx: &mut ViewContext) { self.buffer.update(cx, |buffer, cx| { - buffer.end_transaction([self.selection_set_id], cx); + buffer.end_transaction(cx); }); } @@ -3549,14 +3554,6 @@ pub enum Event { impl Entity for Editor { type Event = Event; - - fn release(&mut self, cx: &mut MutableAppContext) { - self.buffer.update(cx, |buffer, cx| { - buffer - .remove_selection_set(self.selection_set_id, cx) - .unwrap(); - }); - } } impl View for Editor { @@ -3579,19 +3576,11 @@ impl View for Editor { fn on_focus(&mut self, cx: &mut ViewContext) { self.focused = true; self.blink_cursors(self.blink_epoch, cx); - self.buffer.update(cx, |buffer, cx| { - buffer - .set_active_selection_set(Some(self.selection_set_id), cx) - .unwrap(); - }); } fn on_blur(&mut self, cx: &mut ViewContext) { self.focused = false; self.show_local_cursors = false; - self.buffer.update(cx, |buffer, cx| { - buffer.set_active_selection_set(None, cx).unwrap(); - }); cx.emit(Event::Blurred); cx.notify(); } @@ -3710,6 +3699,8 @@ pub fn diagnostic_style( #[cfg(test)] mod tests { + use std::mem; + use super::*; use language::LanguageConfig; use text::Point; @@ -5670,20 +5661,18 @@ mod tests { impl Editor { fn selection_ranges(&self, cx: &mut MutableAppContext) -> Vec> { - self.intersecting_selections( - self.selection_set_id, - DisplayPoint::zero()..self.max_point(cx), - cx, - ) - .into_iter() - .map(|s| { - if s.reversed { - s.end..s.start - } else { - s.start..s.end - } - }) - .collect() + let snapshot = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + self.selections + .iter() + .map(|s| { + let mut range = + s.start.to_display_point(&snapshot)..s.end.to_display_point(&snapshot); + if s.reversed { + mem::swap(&mut range.start, &mut range.end); + } + range + }) + .collect() } } diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 89c324a1ecd7dcde0c82614917dc15299a6078ca..27b29e678c16859560f4ffd3500d7386fc2124f7 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -733,34 +733,37 @@ impl Element for EditorElement { let scroll_top = scroll_position.y() * line_height; let end_row = ((scroll_top + size.y()) / line_height).ceil() as u32 + 1; // Add 1 to ensure selections bleed off screen - let mut selections = HashMap::new(); - let mut active_rows = BTreeMap::new(); + let selections = HashMap::new(); + let active_rows = BTreeMap::new(); let mut highlighted_row = None; self.update_view(cx.app, |view, cx| { highlighted_row = view.highlighted_row(); - for selection_set_id in view.active_selection_sets(cx).collect::>() { - let replica_selections = view.intersecting_selections( - selection_set_id, - DisplayPoint::new(start_row, 0)..DisplayPoint::new(end_row, 0), - cx, - ); - for selection in &replica_selections { - if selection_set_id == view.selection_set_id { - let is_empty = selection.start == selection.end; - let selection_start = snapshot.prev_row_boundary(selection.start).0; - let selection_end = snapshot.next_row_boundary(selection.end).0; - for row in cmp::max(selection_start.row(), start_row) - ..=cmp::min(selection_end.row(), end_row) - { - let contains_non_empty_selection = - active_rows.entry(row).or_insert(!is_empty); - *contains_non_empty_selection |= !is_empty; - } - } - } - selections.insert(selection_set_id.replica_id, replica_selections); - } + // TODO: Get this working with editors owning their own selections + + // for selection_set_id in view.active_selection_sets(cx).collect::>() { + // let replica_selections = view.intersecting_selections( + // selection_set_id, + // DisplayPoint::new(start_row, 0)..DisplayPoint::new(end_row, 0), + // cx, + // ); + // for selection in &replica_selections { + // if selection_set_id == view.selection_set_id { + // let is_empty = selection.start == selection.end; + // let selection_start = snapshot.prev_row_boundary(selection.start).0; + // let selection_end = snapshot.next_row_boundary(selection.end).0; + // for row in cmp::max(selection_start.row(), start_row) + // ..=cmp::min(selection_end.row(), end_row) + // { + // let contains_non_empty_selection = + // active_rows.entry(row).or_insert(!is_empty); + // *contains_non_empty_selection |= !is_empty; + // } + // } + // } + + // selections.insert(selection_set_id.replica_id, replica_selections); + // } }); let line_number_layouts = self.layout_rows(start_row..end_row, &active_rows, &snapshot, cx); diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 2ecfac3646db355b0e6cb05ca87d1214a1ac5d64..cbc9ff223d9ae33a16bf4afe6372e904cdf857f0 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -322,8 +322,10 @@ impl DiagnosticMessage { fn update(&mut self, editor: ViewHandle, cx: &mut ViewContext) { let editor = editor.read(cx); - let cursor_position = editor.newest_selection::(cx).head(); let buffer = editor.buffer().read(cx); + let cursor_position = editor + .newest_selection::(&buffer.read(cx), cx) + .head(); let new_diagnostic = buffer .read(cx) .diagnostics_in_range::<_, usize>(cursor_position..cursor_position) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 8163a7bd23554aae8a35843d374820c71130411f..5f2ca98f8d6c3a675766cd94833bbc47f311b559 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -24,7 +24,7 @@ use text::{ locator::Locator, rope::TextDimension, subscription::{Subscription, Topic}, - AnchorRangeExt as _, Edit, Point, PointUtf16, Selection, SelectionSetId, TextSummary, + AnchorRangeExt as _, Edit, Point, PointUtf16, SelectionSetId, TextSummary, }; use theme::SyntaxTheme; @@ -113,7 +113,7 @@ impl MultiBuffer { pub fn singleton(buffer: ModelHandle, cx: &mut ModelContext) -> Self { let mut this = Self::new(buffer.read(cx).replica_id()); this.singleton = true; - this.push( + this.push_excerpt( ExcerptProperties { buffer: &buffer, range: text::Anchor::min()..text::Anchor::max(), @@ -202,26 +202,18 @@ impl MultiBuffer { }); } - pub fn start_transaction( - &mut self, - selection_set_ids: impl IntoIterator, - cx: &mut ModelContext, - ) -> Option { + pub fn start_transaction(&mut self, cx: &mut ModelContext) -> Option { // TODO self.as_singleton() .unwrap() - .update(cx, |buffer, _| buffer.start_transaction(selection_set_ids)) + .update(cx, |buffer, _| buffer.start_transaction()) } - pub fn end_transaction( - &mut self, - selection_set_ids: impl IntoIterator, - cx: &mut ModelContext, - ) -> Option { + pub fn end_transaction(&mut self, cx: &mut ModelContext) -> Option { // TODO - self.as_singleton().unwrap().update(cx, |buffer, cx| { - buffer.end_transaction(selection_set_ids, cx) - }) + self.as_singleton() + .unwrap() + .update(cx, |buffer, cx| buffer.end_transaction(cx)) } pub fn undo(&mut self, cx: &mut ModelContext) -> Option { @@ -238,153 +230,11 @@ impl MultiBuffer { .update(cx, |buffer, cx| buffer.redo(cx)) } - pub fn selection_set(&self, set_id: SelectionSetId, cx: &AppContext) -> Result<&SelectionSet> { - // TODO - let set = self - .as_singleton() - .unwrap() - .read(cx) - .selection_set(set_id)?; - let excerpt_id = self.snapshot.borrow().excerpts.first().unwrap().id.clone(); - - let selection_sets: &mut HashMap = - unsafe { &mut *(&self.selection_sets as *const _ as *mut _) }; - selection_sets.insert( - set_id, - SelectionSet { - id: set.id, - active: set.active, - selections: set - .selections - .iter() - .map(|selection| Selection { - id: selection.id, - start: Anchor { - excerpt_id: excerpt_id.clone(), - text_anchor: selection.start.clone(), - }, - end: Anchor { - excerpt_id: excerpt_id.clone(), - text_anchor: selection.end.clone(), - }, - reversed: selection.reversed, - goal: selection.goal, - }) - .collect(), - }, - ); - Ok(self.selection_sets.get(&set.id).unwrap()) - } - - pub fn add_selection_set( - &mut self, - selections: &[Selection], - cx: &mut ModelContext, - ) -> SelectionSetId { - // TODO - let snapshot = self.read(cx); - self.as_singleton().unwrap().update(cx, |buffer, cx| { - buffer.add_selection_set( - &selections - .iter() - .map(|selection| Selection { - id: selection.id, - start: selection.start.to_offset(&snapshot), - end: selection.end.to_offset(&snapshot), - reversed: selection.reversed, - goal: selection.goal, - }) - .collect::>(), - cx, - ) - }) - } - - pub fn remove_selection_set( - &mut self, - set_id: SelectionSetId, - cx: &mut ModelContext, - ) -> Result<()> { - // TODO - self.as_singleton() - .unwrap() - .update(cx, |buffer, cx| buffer.remove_selection_set(set_id, cx)) - } - - pub fn update_selection_set( - &mut self, - set_id: SelectionSetId, - selections: &[Selection], - cx: &mut ModelContext, - ) -> Result<()> { - // TODO - let snapshot = self.read(cx); - self.as_singleton().unwrap().update(cx, |buffer, cx| { - buffer.update_selection_set( - set_id, - &selections - .iter() - .map(|selection| Selection { - id: selection.id, - start: selection.start.to_offset(&snapshot), - end: selection.end.to_offset(&snapshot), - reversed: selection.reversed, - goal: selection.goal, - }) - .collect::>(), - cx, - ) - }) - } - - pub fn set_active_selection_set( + pub fn push_excerpt( &mut self, - set_id: Option, + props: ExcerptProperties, cx: &mut ModelContext, - ) -> Result<()> { - self.as_singleton() - .unwrap() - .update(cx, |buffer, cx| buffer.set_active_selection_set(set_id, cx)) - } - - pub fn selection_sets( - &self, - cx: &AppContext, - ) -> impl Iterator { - let excerpt_id = self.snapshot.borrow().excerpts.first().unwrap().id.clone(); - let selection_sets: &mut HashMap = - unsafe { &mut *(&self.selection_sets as *const _ as *mut _) }; - selection_sets.clear(); - for (selection_set_id, set) in self.as_singleton().unwrap().read(cx).selection_sets() { - selection_sets.insert( - *selection_set_id, - SelectionSet { - id: set.id, - active: set.active, - selections: set - .selections - .iter() - .map(|selection| Selection { - id: selection.id, - start: Anchor { - excerpt_id: excerpt_id.clone(), - text_anchor: selection.start.clone(), - }, - end: Anchor { - excerpt_id: excerpt_id.clone(), - text_anchor: selection.end.clone(), - }, - reversed: selection.reversed, - goal: selection.goal, - }) - .collect(), - }, - ); - } - self.selection_sets.iter() - } - - pub fn push(&mut self, props: ExcerptProperties, cx: &mut ModelContext) -> ExcerptId + ) -> ExcerptId where O: text::ToOffset, { @@ -555,13 +405,6 @@ impl MultiBuffer { .update(cx, |buffer, cx| buffer.randomly_edit(rng, count, cx)); self.sync(cx); } - - pub fn randomly_mutate(&mut self, rng: &mut R, cx: &mut ModelContext) { - self.as_singleton() - .unwrap() - .update(cx, |buffer, cx| buffer.randomly_mutate(rng, cx)); - self.sync(cx); - } } impl Entity for MultiBuffer { @@ -1389,7 +1232,7 @@ mod tests { let subscription = multibuffer.update(cx, |multibuffer, cx| { let subscription = multibuffer.subscribe(); - multibuffer.push( + multibuffer.push_excerpt( ExcerptProperties { buffer: &buffer_1, range: Point::new(1, 2)..Point::new(2, 5), @@ -1405,7 +1248,7 @@ mod tests { }] ); - multibuffer.push( + multibuffer.push_excerpt( ExcerptProperties { buffer: &buffer_1, range: Point::new(3, 3)..Point::new(4, 4), @@ -1413,7 +1256,7 @@ mod tests { }, cx, ); - multibuffer.push( + multibuffer.push_excerpt( ExcerptProperties { buffer: &buffer_2, range: Point::new(3, 1)..Point::new(3, 3), @@ -1529,7 +1372,7 @@ mod tests { ); let excerpt_id = list.update(cx, |list, cx| { - list.push( + list.push_excerpt( ExcerptProperties { buffer: &buffer_handle, range: start_ix..end_ix, diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 1830e49bf76d7a96a1822a748bcf15e939a7d8cf..b408bf589b0e21ac40cee7e8d772443bb30ad623 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -178,7 +178,6 @@ struct SyntaxTree { #[derive(Clone)] struct AutoindentRequest { - selection_set_ids: HashSet, before_edit: BufferSnapshot, edited: Vec, inserted: Option>>, @@ -277,10 +276,6 @@ impl Buffer { .into_iter() .map(|op| text::Operation::Edit(proto::deserialize_edit_operation(op))); buffer.apply_ops(ops)?; - for set in message.selections { - let set = proto::deserialize_selection_set(set); - buffer.add_raw_selection_set(set.id, set); - } let mut this = Self::build(buffer, file); this.apply_diagnostic_update( Arc::from(proto::deserialize_diagnostics(message.diagnostics)), @@ -299,10 +294,7 @@ impl Buffer { .history() .map(proto::serialize_edit_operation) .collect(), - selections: self - .selection_sets() - .map(|(_, set)| proto::serialize_selection_set(set)) - .collect(), + selections: Vec::new(), diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()), } } @@ -971,49 +963,11 @@ impl Buffer { indent_columns: BTreeMap, cx: &mut ModelContext, ) { - let selection_set_ids = self - .autoindent_requests - .drain(..) - .flat_map(|req| req.selection_set_ids.clone()) - .collect::>(); - - self.start_transaction(selection_set_ids.iter().copied()); + self.start_transaction(); for (row, indent_column) in &indent_columns { self.set_indent_column_for_line(*row, *indent_column, cx); } - - for selection_set_id in &selection_set_ids { - if let Ok(set) = self.selection_set(*selection_set_id) { - let new_selections = set - .selections::(&*self) - .map(|selection| { - if selection.start.column == 0 { - let delta = Point::new( - 0, - indent_columns - .get(&selection.start.row) - .copied() - .unwrap_or(0), - ); - if delta.column > 0 { - return Selection { - id: selection.id, - goal: selection.goal, - reversed: selection.reversed, - start: selection.start + delta, - end: selection.end + delta, - }; - } - } - selection - }) - .collect::>(); - self.update_selection_set(*selection_set_id, &new_selections, cx) - .unwrap(); - } - } - - self.end_transaction(selection_set_ids.iter().copied(), cx); + self.end_transaction(cx); } fn set_indent_column_for_line(&mut self, row: u32, column: u32, cx: &mut ModelContext) { @@ -1053,7 +1007,7 @@ impl Buffer { pub(crate) fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext) -> bool { if self.version == diff.base_version { - self.start_transaction(None); + self.start_transaction(); let mut offset = 0; for (tag, len) in diff.changes { let range = offset..(offset + len); @@ -1066,7 +1020,7 @@ impl Buffer { } } } - self.end_transaction(None, cx); + self.end_transaction(cx); true } else { false @@ -1090,38 +1044,24 @@ impl Buffer { self.text.subscribe() } - pub fn start_transaction( - &mut self, - selection_set_ids: impl IntoIterator, - ) -> Option { - self.start_transaction_at(selection_set_ids, Instant::now()) + pub fn start_transaction(&mut self) -> Option { + self.start_transaction_at(Instant::now()) } - pub(crate) fn start_transaction_at( - &mut self, - selection_set_ids: impl IntoIterator, - now: Instant, - ) -> Option { - self.text.start_transaction_at(selection_set_ids, now) + pub(crate) fn start_transaction_at(&mut self, now: Instant) -> Option { + self.text.start_transaction_at(now) } - pub fn end_transaction( - &mut self, - selection_set_ids: impl IntoIterator, - cx: &mut ModelContext, - ) -> Option { - self.end_transaction_at(selection_set_ids, Instant::now(), cx) + pub fn end_transaction(&mut self, cx: &mut ModelContext) -> Option { + self.end_transaction_at(Instant::now(), cx) } pub(crate) fn end_transaction_at( &mut self, - selection_set_ids: impl IntoIterator, now: Instant, cx: &mut ModelContext, ) -> Option { - if let Some((transaction_id, start_version)) = - self.text.end_transaction_at(selection_set_ids, now) - { + if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) { let was_dirty = start_version != self.saved_version; self.did_edit(&start_version, was_dirty, cx); Some(transaction_id) @@ -1212,7 +1152,7 @@ impl Buffer { return; } - self.start_transaction(None); + self.start_transaction(); self.pending_autoindent.take(); let autoindent_request = if autoindent && self.language.is_some() { let before_edit = self.snapshot(); @@ -1256,21 +1196,14 @@ impl Buffer { ); } - let selection_set_ids = self - .text - .peek_undo_stack() - .unwrap() - .starting_selection_set_ids() - .collect(); self.autoindent_requests.push(Arc::new(AutoindentRequest { - selection_set_ids, before_edit, edited, inserted, })); } - self.end_transaction(None, cx); + self.end_transaction(cx); self.send_operation(Operation::Buffer(text::Operation::Edit(edit)), cx); } @@ -1298,55 +1231,6 @@ impl Buffer { self.language.as_ref().and_then(|l| l.grammar.as_ref()) } - pub fn add_selection_set( - &mut self, - selections: &[Selection], - cx: &mut ModelContext, - ) -> SelectionSetId { - let operation = self.text.add_selection_set(selections); - if let text::Operation::UpdateSelections { set_id, .. } = &operation { - let set_id = *set_id; - cx.notify(); - self.send_operation(Operation::Buffer(operation), cx); - set_id - } else { - unreachable!() - } - } - - pub fn update_selection_set( - &mut self, - set_id: SelectionSetId, - selections: &[Selection], - cx: &mut ModelContext, - ) -> Result<()> { - let operation = self.text.update_selection_set(set_id, selections)?; - cx.notify(); - self.send_operation(Operation::Buffer(operation), cx); - Ok(()) - } - - pub fn set_active_selection_set( - &mut self, - set_id: Option, - cx: &mut ModelContext, - ) -> Result<()> { - let operation = self.text.set_active_selection_set(set_id)?; - self.send_operation(Operation::Buffer(operation), cx); - Ok(()) - } - - pub fn remove_selection_set( - &mut self, - set_id: SelectionSetId, - cx: &mut ModelContext, - ) -> Result<()> { - let operation = self.text.remove_selection_set(set_id)?; - cx.notify(); - self.send_operation(Operation::Buffer(operation), cx); - Ok(()) - } - pub fn apply_ops>( &mut self, ops: I, @@ -1447,10 +1331,8 @@ impl Buffer { let was_dirty = self.is_dirty(); let old_version = self.version.clone(); - if let Some((transaction_id, operations)) = self.text.undo() { - for operation in operations { - self.send_operation(Operation::Buffer(operation), cx); - } + if let Some((transaction_id, operation)) = self.text.undo() { + self.send_operation(Operation::Buffer(operation), cx); self.did_edit(&old_version, was_dirty, cx); Some(transaction_id) } else { @@ -1462,10 +1344,8 @@ impl Buffer { let was_dirty = self.is_dirty(); let old_version = self.version.clone(); - if let Some((transaction_id, operations)) = self.text.redo() { - for operation in operations { - self.send_operation(Operation::Buffer(operation), cx); - } + if let Some((transaction_id, operation)) = self.text.redo() { + self.send_operation(Operation::Buffer(operation), cx); self.did_edit(&old_version, was_dirty, cx); Some(transaction_id) } else { @@ -1484,18 +1364,9 @@ impl Buffer { ) where T: rand::Rng, { - self.start_transaction(None); + self.start_transaction(); self.text.randomly_edit(rng, old_range_count); - self.end_transaction(None, cx); - } - - pub fn randomly_mutate(&mut self, rng: &mut T, cx: &mut ModelContext) - where - T: rand::Rng, - { - self.start_transaction(None); - self.text.randomly_mutate(rng); - self.end_transaction(None, cx); + self.end_transaction(cx); } } diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index 686f088711fb59e93081285f9343907f7e489c1e..6e2bc43dcd00e36a33cc0812e39a0668088a5763 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -92,15 +92,15 @@ fn test_edit_events(cx: &mut gpui::MutableAppContext) { buffer.edit(Some(2..4), "XYZ", cx); // An empty transaction does not emit any events. - buffer.start_transaction(None); - buffer.end_transaction(None, cx); + buffer.start_transaction(); + buffer.end_transaction(cx); // A transaction containing two edits emits one edited event. now += Duration::from_secs(1); - buffer.start_transaction_at(None, now); + buffer.start_transaction_at(now); buffer.edit(Some(5..5), "u", cx); buffer.edit(Some(6..6), "w", cx); - buffer.end_transaction_at(None, now, cx); + buffer.end_transaction_at(now, cx); // Undoing a transaction emits one edited event. buffer.undo(cx); @@ -167,7 +167,7 @@ async fn test_reparse(mut cx: gpui::TestAppContext) { // Perform some edits (add parameter and variable reference) // Parsing doesn't begin until the transaction is complete buffer.update(&mut cx, |buf, cx| { - buf.start_transaction(None); + buf.start_transaction(); let offset = buf.text().find(")").unwrap(); buf.edit(vec![offset..offset], "b: C", cx); @@ -177,7 +177,7 @@ async fn test_reparse(mut cx: gpui::TestAppContext) { buf.edit(vec![offset..offset], " d; ", cx); assert!(!buf.is_parsing()); - buf.end_transaction(None, cx); + buf.end_transaction(cx); assert_eq!(buf.text(), "fn a(b: C) { d; }"); assert!(buf.is_parsing()); }); @@ -333,59 +333,62 @@ fn test_edit_with_autoindent(cx: &mut MutableAppContext) { }); } -#[gpui::test] -fn test_autoindent_moves_selections(cx: &mut MutableAppContext) { - cx.add_model(|cx| { - let text = "fn a() {}"; - - let mut buffer = - Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx); - - let selection_set_id = buffer.add_selection_set::(&[], cx); - buffer.start_transaction(Some(selection_set_id)); - buffer.edit_with_autoindent([5..5, 9..9], "\n\n", cx); - buffer - .update_selection_set( - selection_set_id, - &[ - Selection { - id: 0, - start: Point::new(1, 0), - end: Point::new(1, 0), - reversed: false, - goal: SelectionGoal::None, - }, - Selection { - id: 1, - start: Point::new(4, 0), - end: Point::new(4, 0), - reversed: false, - goal: SelectionGoal::None, - }, - ], - cx, - ) - .unwrap(); - assert_eq!(buffer.text(), "fn a(\n\n) {}\n\n"); - - // Ending the transaction runs the auto-indent. The selection - // at the start of the auto-indented row is pushed to the right. - buffer.end_transaction(Some(selection_set_id), cx); - assert_eq!(buffer.text(), "fn a(\n \n) {}\n\n"); - let selection_ranges = buffer - .selection_set(selection_set_id) - .unwrap() - .selections::(&buffer) - .map(|selection| selection.start.to_point(&buffer)..selection.end.to_point(&buffer)) - .collect::>(); - - assert_eq!(selection_ranges[0], empty(Point::new(1, 4))); - assert_eq!(selection_ranges[1], empty(Point::new(4, 0))); - - buffer - }); -} - +// We need another approach to managing selections with auto-indent + +// #[gpui::test] +// fn test_autoindent_moves_selections(cx: &mut MutableAppContext) { +// cx.add_model(|cx| { +// let text = "fn a() {}"; + +// let mut buffer = +// Buffer::new(0, text, cx).with_language(Some(Arc::new(rust_lang())), None, cx); + +// let selection_set_id = buffer.add_selection_set::(&[], cx); +// buffer.start_transaction(); +// buffer.edit_with_autoindent([5..5, 9..9], "\n\n", cx); +// buffer +// .update_selection_set( +// selection_set_id, +// &[ +// Selection { +// id: 0, +// start: Point::new(1, 0), +// end: Point::new(1, 0), +// reversed: false, +// goal: SelectionGoal::None, +// }, +// Selection { +// id: 1, +// start: Point::new(4, 0), +// end: Point::new(4, 0), +// reversed: false, +// goal: SelectionGoal::None, +// }, +// ], +// cx, +// ) +// .unwrap(); +// assert_eq!(buffer.text(), "fn a(\n\n) {}\n\n"); + +// // TODO! Come up with a different approach to moving selections now that we don't manage selection sets in the buffer + +// // Ending the transaction runs the auto-indent. The selection +// // at the start of the auto-indented row is pushed to the right. +// buffer.end_transaction(cx); +// assert_eq!(buffer.text(), "fn a(\n \n) {}\n\n"); +// let selection_ranges = buffer +// .selection_set(selection_set_id) +// .unwrap() +// .selections::(&buffer) +// .map(|selection| selection.start.to_point(&buffer)..selection.end.to_point(&buffer)) +// .collect::>(); + +// assert_eq!(selection_ranges[0], empty(Point::new(1, 4))); +// assert_eq!(selection_ranges[1], empty(Point::new(4, 0))); + +// buffer +// }); +// } #[gpui::test] fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut MutableAppContext) { cx.add_model(|cx| { diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 020bf64b7f326de0b5356bede785486cdc7b85c3..fe6cb2e39412b72f3183b2fe06e80cb7f5815f06 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -3559,7 +3559,6 @@ mod tests { #[gpui::test] async fn test_buffer_file_changes_on_disk(mut cx: gpui::TestAppContext) { use std::fs; - use text::{Point, Selection, SelectionGoal}; let initial_contents = "aaa\nbbbbb\nc\n"; let dir = temp_tree(json!({ "the-file": initial_contents })); @@ -3588,22 +3587,23 @@ mod tests { .await .unwrap(); + // TODO // Add a cursor on each row. - let selection_set_id = buffer.update(&mut cx, |buffer, cx| { - assert!(!buffer.is_dirty()); - buffer.add_selection_set( - &(0..3) - .map(|row| Selection { - id: row as usize, - start: Point::new(row, 1), - end: Point::new(row, 1), - reversed: false, - goal: SelectionGoal::None, - }) - .collect::>(), - cx, - ) - }); + // let selection_set_id = buffer.update(&mut cx, |buffer, cx| { + // assert!(!buffer.is_dirty()); + // buffer.add_selection_set( + // &(0..3) + // .map(|row| Selection { + // id: row as usize, + // start: Point::new(row, 1), + // end: Point::new(row, 1), + // reversed: false, + // goal: SelectionGoal::None, + // }) + // .collect::>(), + // cx, + // ) + // }); // Change the file on disk, adding two new lines of text, and removing // one line. @@ -3626,19 +3626,20 @@ mod tests { assert!(!buffer.is_dirty()); assert!(!buffer.has_conflict()); - let cursor_positions = buffer - .selection_set(selection_set_id) - .unwrap() - .selections::(&*buffer) - .map(|selection| { - assert_eq!(selection.start, selection.end); - selection.start - }) - .collect::>(); - assert_eq!( - cursor_positions, - [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)] - ); + // TODO + // let cursor_positions = buffer + // .selection_set(selection_set_id) + // .unwrap() + // .selections::(&*buffer) + // .map(|selection| { + // assert_eq!(selection.start, selection.end); + // selection.start + // }) + // .collect::>(); + // assert_eq!( + // cursor_positions, + // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)] + // ); }); // Modify the buffer diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index 54f2432b21e16609afd52173b8d8ce07700590e3..7220740af47c2baab2d4bb47f4781237395563c9 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -1045,13 +1045,14 @@ mod tests { .await .unwrap(); - // Create a selection set as client B and see that selection set as client A. let editor_b = cx_b.add_view(window_b, |cx| { Editor::for_buffer(buffer_b, |cx| EditorSettings::test(cx), cx) }); - buffer_a - .condition(&cx_a, |buffer, _| buffer.selection_sets().count() == 1) - .await; + // TODO + // // Create a selection set as client B and see that selection set as client A. + // buffer_a + // .condition(&cx_a, |buffer, _| buffer.selection_sets().count() == 1) + // .await; // Edit the buffer as client B and see that edit as client A. editor_b.update(&mut cx_b, |editor, cx| { @@ -1061,11 +1062,12 @@ mod tests { .condition(&cx_a, |buffer, _| buffer.text() == "ok, b-contents") .await; - // Remove the selection set as client B, see those selections disappear as client A. - cx_b.update(move |_| drop(editor_b)); - buffer_a - .condition(&cx_a, |buffer, _| buffer.selection_sets().count() == 0) - .await; + // TODO + // // Remove the selection set as client B, see those selections disappear as client A. + // cx_b.update(move |_| drop(editor_b)); + // buffer_a + // .condition(&cx_a, |buffer, _| buffer.selection_sets().count() == 0) + // .await; // Close the buffer as client A, see that the buffer is closed. cx_a.update(move |_| drop(buffer_a)); diff --git a/crates/text/src/tests.rs b/crates/text/src/tests.rs index a470e8431e8464739dd4e69ac5f9513fc2b8f318..94523de5a6298272fbd693aa67d2ef718825e8ae 100644 --- a/crates/text/src/tests.rs +++ b/crates/text/src/tests.rs @@ -460,63 +460,41 @@ fn test_history() { let mut now = Instant::now(); let mut buffer = Buffer::new(0, 0, History::new("123456".into())); - let set_id = if let Operation::UpdateSelections { set_id, .. } = - buffer.add_selection_set(&buffer.selections_from_ranges(vec![4..4]).unwrap()) - { - set_id - } else { - unreachable!() - }; - buffer.start_transaction_at(Some(set_id), now); + buffer.start_transaction_at(now); buffer.edit(vec![2..4], "cd"); - buffer.end_transaction_at(Some(set_id), now); + buffer.end_transaction_at(now); assert_eq!(buffer.text(), "12cd56"); - assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![4..4]); - buffer.start_transaction_at(Some(set_id), now); - buffer - .update_selection_set(set_id, &buffer.selections_from_ranges(vec![1..3]).unwrap()) - .unwrap(); + buffer.start_transaction_at(now); buffer.edit(vec![4..5], "e"); - buffer.end_transaction_at(Some(set_id), now).unwrap(); + buffer.end_transaction_at(now).unwrap(); assert_eq!(buffer.text(), "12cde6"); - assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]); now += buffer.history.group_interval + Duration::from_millis(1); - buffer.start_transaction_at(Some(set_id), now); - buffer - .update_selection_set(set_id, &buffer.selections_from_ranges(vec![2..2]).unwrap()) - .unwrap(); + buffer.start_transaction_at(now); buffer.edit(vec![0..1], "a"); buffer.edit(vec![1..1], "b"); - buffer.end_transaction_at(Some(set_id), now).unwrap(); + buffer.end_transaction_at(now).unwrap(); assert_eq!(buffer.text(), "ab2cde6"); - assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![3..3]); - // Last transaction happened past the group interval, undo it on its - // own. + // Last transaction happened past the group interval, undo it on its own. buffer.undo(); assert_eq!(buffer.text(), "12cde6"); - assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]); - // First two transactions happened within the group interval, undo them - // together. + // First two transactions happened within the group interval, undo them together. buffer.undo(); assert_eq!(buffer.text(), "123456"); - assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![4..4]); // Redo the first two transactions together. buffer.redo(); assert_eq!(buffer.text(), "12cde6"); - assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![1..3]); // Redo the last transaction on its own. buffer.redo(); assert_eq!(buffer.text(), "ab2cde6"); - assert_eq!(buffer.selection_ranges(set_id).unwrap(), vec![3..3]); - buffer.start_transaction_at(None, now); - assert!(buffer.end_transaction_at(None, now).is_none()); + buffer.start_transaction_at(now); + assert!(buffer.end_transaction_at(now).is_none()); buffer.undo(); assert_eq!(buffer.text(), "12cde6"); } @@ -582,8 +560,8 @@ fn test_random_concurrent_edits(mut rng: StdRng) { let buffer = &mut buffers[replica_index]; match rng.gen_range(0..=100) { 0..=50 if mutation_count != 0 => { - let ops = buffer.randomly_mutate(&mut rng); - network.broadcast(buffer.replica_id, ops); + let op = buffer.randomly_edit(&mut rng, 5).2; + network.broadcast(buffer.replica_id, vec!(op)); log::info!("buffer {} text: {:?}", buffer.replica_id, buffer.text()); mutation_count -= 1; } @@ -620,18 +598,6 @@ fn test_random_concurrent_edits(mut rng: StdRng) { "Replica {} text != Replica 0 text", buffer.replica_id ); - assert_eq!( - buffer.selection_sets().collect::>(), - first_buffer.selection_sets().collect::>() - ); - assert_eq!( - buffer - .all_selection_ranges::() - .collect::>(), - first_buffer - .all_selection_ranges::() - .collect::>() - ); buffer.check_invariants(); } } diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 525cbc43662a03af9eadec7b5ea868a6d3c8a1e4..5da3c7043e617065e977203f9e9d014ed804579b 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -13,7 +13,7 @@ pub mod subscription; mod tests; pub use anchor::*; -use anyhow::{anyhow, Result}; +use anyhow::Result; use clock::ReplicaId; use collections::{HashMap, HashSet}; use locator::Locator; @@ -71,17 +71,11 @@ pub struct Transaction { end: clock::Global, edits: Vec, ranges: Vec>, - selections_before: HashMap]>>, - selections_after: HashMap]>>, first_edit_at: Instant, last_edit_at: Instant, } impl Transaction { - pub fn starting_selection_set_ids<'a>(&'a self) -> impl Iterator + 'a { - self.selections_before.keys().copied() - } - fn push_edit(&mut self, edit: &EditOperation) { self.edits.push(edit.timestamp.local()); self.end.observe(edit.timestamp.local()); @@ -158,12 +152,7 @@ impl History { self.ops.insert(op.timestamp.local(), op); } - fn start_transaction( - &mut self, - start: clock::Global, - selections_before: HashMap]>>, - now: Instant, - ) -> Option { + fn start_transaction(&mut self, start: clock::Global, now: Instant) -> Option { self.transaction_depth += 1; if self.transaction_depth == 1 { let id = self.next_transaction_id; @@ -174,8 +163,6 @@ impl History { end: start, edits: Vec::new(), ranges: Vec::new(), - selections_before, - selections_after: Default::default(), first_edit_at: now, last_edit_at: now, }); @@ -185,11 +172,7 @@ impl History { } } - fn end_transaction( - &mut self, - selections_after: HashMap]>>, - now: Instant, - ) -> Option<&Transaction> { + fn end_transaction(&mut self, now: Instant) -> Option<&Transaction> { assert_ne!(self.transaction_depth, 0); self.transaction_depth -= 1; if self.transaction_depth == 0 { @@ -198,7 +181,6 @@ impl History { None } else { let transaction = self.undo_stack.last_mut().unwrap(); - transaction.selections_after = selections_after; transaction.last_edit_at = now; Some(transaction) } @@ -234,9 +216,6 @@ impl History { if let Some(transaction) = transactions_to_merge.last_mut() { last_transaction.last_edit_at = transaction.last_edit_at; - last_transaction - .selections_after - .extend(transaction.selections_after.drain()); last_transaction.end = transaction.end.clone(); } } @@ -558,7 +537,7 @@ impl Buffer { None }; - self.start_transaction(None); + self.start_transaction(); let timestamp = InsertionTimestamp { replica_id: self.replica_id, local: self.local_clock.tick().value, @@ -570,7 +549,7 @@ impl Buffer { self.history.push_undo(edit.timestamp.local()); self.last_edit = edit.timestamp.local(); self.snapshot.version.observe(edit.timestamp.local()); - self.end_transaction(None); + self.end_transaction(); edit } @@ -1149,56 +1128,20 @@ impl Buffer { self.history.undo_stack.last() } - pub fn start_transaction( - &mut self, - selection_set_ids: impl IntoIterator, - ) -> Option { - self.start_transaction_at(selection_set_ids, Instant::now()) + pub fn start_transaction(&mut self) -> Option { + self.start_transaction_at(Instant::now()) } - pub fn start_transaction_at( - &mut self, - selection_set_ids: impl IntoIterator, - now: Instant, - ) -> Option { - let selections = selection_set_ids - .into_iter() - .map(|set_id| { - let set = self - .selection_sets - .get(&set_id) - .expect("invalid selection set id"); - (set_id, set.selections.clone()) - }) - .collect(); - self.history - .start_transaction(self.version.clone(), selections, now) + pub fn start_transaction_at(&mut self, now: Instant) -> Option { + self.history.start_transaction(self.version.clone(), now) } - pub fn end_transaction( - &mut self, - selection_set_ids: impl IntoIterator, - ) -> Option<(TransactionId, clock::Global)> { - self.end_transaction_at(selection_set_ids, Instant::now()) + pub fn end_transaction(&mut self) -> Option<(TransactionId, clock::Global)> { + self.end_transaction_at(Instant::now()) } - pub fn end_transaction_at( - &mut self, - selection_set_ids: impl IntoIterator, - now: Instant, - ) -> Option<(TransactionId, clock::Global)> { - let selections = selection_set_ids - .into_iter() - .map(|set_id| { - let set = self - .selection_sets - .get(&set_id) - .expect("invalid selection set id"); - (set_id, set.selections.clone()) - }) - .collect(); - - if let Some(transaction) = self.history.end_transaction(selections, now) { + pub fn end_transaction_at(&mut self, now: Instant) -> Option<(TransactionId, clock::Global)> { + if let Some(transaction) = self.history.end_transaction(now) { let id = transaction.id; let since = transaction.start.clone(); self.history.group(); @@ -1221,31 +1164,21 @@ impl Buffer { self.history.ops.values() } - pub fn undo(&mut self) -> Option<(TransactionId, Vec)> { + pub fn undo(&mut self) -> Option<(TransactionId, Operation)> { if let Some(transaction) = self.history.pop_undo().cloned() { let transaction_id = transaction.id; - let selections = transaction.selections_before.clone(); - let mut ops = Vec::new(); - ops.push(self.undo_or_redo(transaction).unwrap()); - for (set_id, selections) in selections { - ops.extend(self.restore_selection_set(set_id, selections)); - } - Some((transaction_id, ops)) + let op = self.undo_or_redo(transaction).unwrap(); + Some((transaction_id, op)) } else { None } } - pub fn redo(&mut self) -> Option<(TransactionId, Vec)> { + pub fn redo(&mut self) -> Option<(TransactionId, Operation)> { if let Some(transaction) = self.history.pop_redo().cloned() { let transaction_id = transaction.id; - let selections = transaction.selections_after.clone(); - let mut ops = Vec::new(); - ops.push(self.undo_or_redo(transaction).unwrap()); - for (set_id, selections) in selections { - ops.extend(self.restore_selection_set(set_id, selections)); - } - Some((transaction_id, ops)) + let op = self.undo_or_redo(transaction).unwrap(); + Some((transaction_id, op)) } else { None } @@ -1275,125 +1208,6 @@ impl Buffer { pub fn subscribe(&mut self) -> Subscription { self.subscriptions.subscribe() } - - pub fn selection_set(&self, set_id: SelectionSetId) -> Result<&SelectionSet> { - self.selection_sets - .get(&set_id) - .ok_or_else(|| anyhow!("invalid selection set id {:?}", set_id)) - } - - pub fn selection_sets(&self) -> impl Iterator { - self.selection_sets.iter() - } - - fn build_anchor_selection_set( - &self, - selections: &[Selection], - ) -> Arc<[Selection]> { - Arc::from( - selections - .iter() - .map(|selection| Selection { - id: selection.id, - start: self.anchor_before(&selection.start), - end: self.anchor_before(&selection.end), - reversed: selection.reversed, - goal: selection.goal, - }) - .collect::>(), - ) - } - - pub fn update_selection_set( - &mut self, - set_id: SelectionSetId, - selections: &[Selection], - ) -> Result { - let selections = self.build_anchor_selection_set(selections); - let set = self - .selection_sets - .get_mut(&set_id) - .ok_or_else(|| anyhow!("invalid selection set id {:?}", set_id))?; - set.selections = selections.clone(); - Ok(Operation::UpdateSelections { - set_id, - selections, - lamport_timestamp: self.lamport_clock.tick(), - }) - } - - pub fn restore_selection_set( - &mut self, - set_id: SelectionSetId, - selections: Arc<[Selection]>, - ) -> Result { - let set = self - .selection_sets - .get_mut(&set_id) - .ok_or_else(|| anyhow!("invalid selection set id {:?}", set_id))?; - set.selections = selections.clone(); - Ok(Operation::UpdateSelections { - set_id, - selections, - lamport_timestamp: self.lamport_clock.tick(), - }) - } - - pub fn add_selection_set(&mut self, selections: &[Selection]) -> Operation { - let selections = self.build_anchor_selection_set(selections); - let set_id = self.lamport_clock.tick(); - self.selection_sets.insert( - set_id, - SelectionSet { - id: set_id, - selections: selections.clone(), - active: false, - }, - ); - Operation::UpdateSelections { - set_id, - selections, - lamport_timestamp: set_id, - } - } - - pub fn add_raw_selection_set(&mut self, id: SelectionSetId, selections: SelectionSet) { - self.selection_sets.insert(id, selections); - } - - pub fn set_active_selection_set( - &mut self, - set_id: Option, - ) -> Result { - if let Some(set_id) = set_id { - assert_eq!(set_id.replica_id, self.replica_id()); - } - - for (id, set) in &mut self.selection_sets { - if id.replica_id == self.local_clock.replica_id { - if Some(*id) == set_id { - set.active = true; - } else { - set.active = false; - } - } - } - - Ok(Operation::SetActiveSelections { - set_id, - lamport_timestamp: self.lamport_clock.tick(), - }) - } - - pub fn remove_selection_set(&mut self, set_id: SelectionSetId) -> Result { - self.selection_sets - .remove(&set_id) - .ok_or_else(|| anyhow!("invalid selection set id {:?}", set_id))?; - Ok(Operation::RemoveSelections { - set_id, - lamport_timestamp: self.lamport_clock.tick(), - }) - } } #[cfg(any(test, feature = "test-support"))] @@ -1434,42 +1248,6 @@ impl Buffer { (old_ranges, new_text, Operation::Edit(op)) } - pub fn randomly_mutate(&mut self, rng: &mut T) -> Vec - where - T: rand::Rng, - { - use rand::prelude::*; - - let mut ops = vec![self.randomly_edit(rng, 5).2]; - - // Randomly add, remove or mutate selection sets. - let replica_selection_sets = &self - .selection_sets() - .map(|(set_id, _)| *set_id) - .filter(|set_id| self.replica_id == set_id.replica_id) - .collect::>(); - let set_id = replica_selection_sets.choose(rng); - if set_id.is_some() && rng.gen_bool(1.0 / 6.0) { - ops.push(self.remove_selection_set(*set_id.unwrap()).unwrap()); - } else { - let mut ranges = Vec::new(); - for _ in 0..5 { - ranges.push(self.random_byte_range(0, rng)); - } - let new_selections = self.selections_from_ranges(ranges).unwrap(); - - let op = if set_id.is_none() || rng.gen_bool(1.0 / 5.0) { - self.add_selection_set(&new_selections) - } else { - self.update_selection_set(*set_id.unwrap(), &new_selections) - .unwrap() - }; - ops.push(op); - } - - ops - } - pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng) -> Vec { use rand::prelude::*; @@ -1486,73 +1264,6 @@ impl Buffer { } ops } - - fn selections_from_ranges(&self, ranges: I) -> Result>> - where - I: IntoIterator>, - { - use std::sync::atomic::{self, AtomicUsize}; - - static NEXT_SELECTION_ID: AtomicUsize = AtomicUsize::new(0); - - let mut ranges = ranges.into_iter().collect::>(); - ranges.sort_unstable_by_key(|range| range.start); - - let mut selections = Vec::>::with_capacity(ranges.len()); - for mut range in ranges { - let mut reversed = false; - if range.start > range.end { - reversed = true; - std::mem::swap(&mut range.start, &mut range.end); - } - - if let Some(selection) = selections.last_mut() { - if selection.end >= range.start { - selection.end = range.end; - continue; - } - } - - selections.push(Selection { - id: NEXT_SELECTION_ID.fetch_add(1, atomic::Ordering::SeqCst), - start: range.start, - end: range.end, - reversed, - goal: SelectionGoal::None, - }); - } - Ok(selections) - } - - #[cfg(test)] - pub fn selection_ranges<'a, D>(&'a self, set_id: SelectionSetId) -> Result>> - where - D: TextDimension, - { - Ok(self - .selection_set(set_id)? - .selections(self) - .map(move |selection| { - if selection.reversed { - selection.end..selection.start - } else { - selection.start..selection.end - } - }) - .collect()) - } - - #[cfg(test)] - pub fn all_selection_ranges<'a, D>( - &'a self, - ) -> impl 'a + Iterator>)> - where - D: TextDimension, - { - self.selection_sets - .keys() - .map(move |set_id| (*set_id, self.selection_ranges(*set_id).unwrap())) - } } impl Deref for Buffer { From f5c775fcd16156fbf87cb25a22d3890350231b90 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Fri, 10 Dec 2021 22:16:39 -0700 Subject: [PATCH 053/196] WIP --- crates/editor/src/editor.rs | 71 +++++++++++++++++++------------ crates/editor/src/multi_buffer.rs | 4 +- 2 files changed, 45 insertions(+), 30 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index c82a5d6d2ffeacbc9f2c39280f2d15f674b74ead..c663e442a773e601e78eb66d4134ce484acd8b6e 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -3049,35 +3049,50 @@ impl Editor { where D: 'a + TextDimension + Ord + Sub, { - // let buffer = self.buffer.read(cx).snapshot(cx); - // let mut selections = self.selection_set(cx).selections::(&buffer).peekable(); - // let mut pending_selection = self.pending_selection(cx); - - // iter::from_fn(move || { - // if let Some(pending) = pending_selection.as_mut() { - // while let Some(next_selection) = selections.peek() { - // if pending.start <= next_selection.end && pending.end >= next_selection.start { - // let next_selection = selections.next().unwrap(); - // if next_selection.start < pending.start { - // pending.start = next_selection.start; - // } - // if next_selection.end > pending.end { - // pending.end = next_selection.end; - // } - // } else if next_selection.end < pending.start { - // return selections.next(); - // } else { - // break; - // } - // } + let buffer = self.buffer.read(cx).snapshot(cx); - // pending_selection.take() - // } else { - // selections.next() - // } - // }) - // .collect() - todo!() + let mut summaries = buffer + .summaries_for_anchors::(self.selections.iter().flat_map(|s| [&s.start, &s.end])) + .into_iter(); + + let mut selections = self + .selections + .iter() + .map(|s| Selection { + id: s.id, + start: summaries.next().unwrap(), + end: summaries.next().unwrap(), + reversed: s.reversed, + goal: s.goal, + }) + .peekable(); + + let mut pending_selection = self.pending_selection::(&buffer, cx); + + iter::from_fn(move || { + if let Some(pending) = pending_selection.as_mut() { + while let Some(next_selection) = selections.peek() { + if pending.start <= next_selection.end && pending.end >= next_selection.start { + let next_selection = selections.next().unwrap(); + if next_selection.start < pending.start { + pending.start = next_selection.start; + } + if next_selection.end > pending.end { + pending.end = next_selection.end; + } + } else if next_selection.end < pending.start { + return selections.next(); + } else { + break; + } + } + + pending_selection.take() + } else { + selections.next() + } + }) + .collect() } fn pending_selection>( diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 5f2ca98f8d6c3a675766cd94833bbc47f311b559..1e93c16f7779cba32509133c76c0430f5409bb7e 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -777,7 +777,7 @@ impl MultiBufferSnapshot { summary } - fn summary_for_anchor(&self, anchor: &Anchor) -> D + pub fn summary_for_anchor(&self, anchor: &Anchor) -> D where D: TextDimension + Ord + Sub, { @@ -798,7 +798,7 @@ impl MultiBufferSnapshot { D::from_text_summary(&cursor.start().text) } - fn summaries_for_anchors<'a, D, I>(&'a self, anchors: I) -> Vec + pub fn summaries_for_anchors<'a, D, I>(&'a self, anchors: I) -> Vec where D: TextDimension + Ord + Sub, I: 'a + IntoIterator, From 49d1c9d1ba238647568fece048fa45573bb49a4c Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Fri, 10 Dec 2021 23:33:15 -0700 Subject: [PATCH 054/196] Introduce sum_tree::TreeMap I think this will be useful to avoid cloning HashMaps in certain cases such as snapshots. --- crates/sum_tree/src/sum_tree.rs | 2 + crates/sum_tree/src/tree_map.rs | 109 ++++++++++++++++++++++++++++++++ 2 files changed, 111 insertions(+) create mode 100644 crates/sum_tree/src/tree_map.rs diff --git a/crates/sum_tree/src/sum_tree.rs b/crates/sum_tree/src/sum_tree.rs index 63fb379d537785220a36cc8217d8fd28924a0a76..67c056d858d74b37d29a4490e77348b0781628f8 100644 --- a/crates/sum_tree/src/sum_tree.rs +++ b/crates/sum_tree/src/sum_tree.rs @@ -1,9 +1,11 @@ mod cursor; +mod tree_map; use arrayvec::ArrayVec; pub use cursor::{Cursor, FilterCursor, Iter}; use std::marker::PhantomData; use std::{cmp::Ordering, fmt, iter::FromIterator, sync::Arc}; +pub use tree_map::TreeMap; #[cfg(test)] const TREE_BASE: usize = 2; diff --git a/crates/sum_tree/src/tree_map.rs b/crates/sum_tree/src/tree_map.rs new file mode 100644 index 0000000000000000000000000000000000000000..d5ca7c13271099d8b27d65afc1da97832ccd54e0 --- /dev/null +++ b/crates/sum_tree/src/tree_map.rs @@ -0,0 +1,109 @@ +use std::{cmp::Ordering, fmt::Debug}; + +use crate::{Bias, Dimension, Item, KeyedItem, SeekTarget, SumTree, Summary}; + +pub struct TreeMap(SumTree>) +where + K: Clone + Debug + Default, + V: Clone + Debug + Default; + +#[derive(Clone)] +pub struct MapEntry { + key: K, + value: V, +} + +#[derive(Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord)] +pub struct MapKey(K); + +#[derive(Clone, Debug, Default)] +pub struct MapKeyRef<'a, K>(Option<&'a K>); + +impl TreeMap { + pub fn get<'a>(&self, key: &'a K) -> Option<&V> { + let mut cursor = self.0.cursor::>(); + let key = MapKeyRef(Some(key)); + cursor.seek(&key, Bias::Left, &()); + if key.cmp(cursor.start(), &()) == Ordering::Equal { + Some(&cursor.item().unwrap().value) + } else { + None + } + } + + pub fn insert(&mut self, key: K, value: V) { + self.0.insert_or_replace(MapEntry { key, value }, &()); + } + + pub fn remove<'a>(&mut self, key: &'a K) -> Option { + let mut removed = None; + let mut cursor = self.0.cursor::>(); + let key = MapKeyRef(Some(key)); + let mut new_tree = cursor.slice(&key, Bias::Left, &()); + if key.cmp(cursor.start(), &()) == Ordering::Equal { + removed = Some(cursor.item().unwrap().value.clone()); + cursor.next(&()); + } + new_tree.push_tree(cursor.suffix(&()), &()); + drop(cursor); + self.0 = new_tree; + removed + } +} + +impl Item for MapEntry +where + K: Clone + Debug + Default + Clone, + V: Clone, +{ + type Summary = MapKey; + + fn summary(&self) -> Self::Summary { + todo!() + } +} + +impl KeyedItem for MapEntry +where + K: Clone + Debug + Default + Ord, + V: Clone, +{ + type Key = MapKey; + + fn key(&self) -> Self::Key { + MapKey(self.key.clone()) + } +} + +impl Summary for MapKey +where + K: Clone + Debug + Default, +{ + type Context = (); + + fn add_summary(&mut self, summary: &Self, cx: &()) { + *self = summary.clone() + } +} + +impl<'a, K> Dimension<'a, MapKey> for MapKeyRef<'a, K> +where + K: Clone + Debug + Default + Ord, +{ + fn add_summary(&mut self, summary: &'a MapKey, _: &()) { + self.0 = Some(&summary.0) + } +} + +impl<'a, K> SeekTarget<'a, MapKey, MapKeyRef<'a, K>> for MapKeyRef<'_, K> +where + K: Clone + Debug + Default + Ord, +{ + fn cmp(&self, cursor_location: &MapKeyRef, cx: &()) -> Ordering { + if let Some(key) = cursor_location.0 { + self.0.cmp(&cursor_location.0) + } else { + Ordering::Greater + } + } +} From 0639c8331c2e6e86ec273d480bcf1afc8df34c27 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Fri, 10 Dec 2021 23:35:24 -0700 Subject: [PATCH 055/196] Relax TreeMap value bounds, fix warnings, simplify cmp --- crates/sum_tree/src/tree_map.rs | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/crates/sum_tree/src/tree_map.rs b/crates/sum_tree/src/tree_map.rs index d5ca7c13271099d8b27d65afc1da97832ccd54e0..75405cbef43478e4b83f0caf54a6dae36b3d0cf9 100644 --- a/crates/sum_tree/src/tree_map.rs +++ b/crates/sum_tree/src/tree_map.rs @@ -5,7 +5,7 @@ use crate::{Bias, Dimension, Item, KeyedItem, SeekTarget, SumTree, Summary}; pub struct TreeMap(SumTree>) where K: Clone + Debug + Default, - V: Clone + Debug + Default; + V: Clone + Debug; #[derive(Clone)] pub struct MapEntry { @@ -81,7 +81,7 @@ where { type Context = (); - fn add_summary(&mut self, summary: &Self, cx: &()) { + fn add_summary(&mut self, summary: &Self, _: &()) { *self = summary.clone() } } @@ -99,11 +99,7 @@ impl<'a, K> SeekTarget<'a, MapKey, MapKeyRef<'a, K>> for MapKeyRef<'_, K> where K: Clone + Debug + Default + Ord, { - fn cmp(&self, cursor_location: &MapKeyRef, cx: &()) -> Ordering { - if let Some(key) = cursor_location.0 { - self.0.cmp(&cursor_location.0) - } else { - Ordering::Greater - } + fn cmp(&self, cursor_location: &MapKeyRef, _: &()) -> Ordering { + self.0.cmp(&cursor_location.0) } } From 4dd0752e80f388a9f78c9937ceed0454da868ad9 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Sat, 11 Dec 2021 00:29:34 -0700 Subject: [PATCH 056/196] More messy progress towards selections in editors --- crates/editor/src/editor.rs | 247 +++++++++++--------- crates/editor/src/items.rs | 6 +- crates/editor/src/multi_buffer.rs | 20 +- crates/editor/src/multi_buffer/selection.rs | 85 +------ crates/go_to_line/src/go_to_line.rs | 13 +- crates/language/src/buffer.rs | 81 ++++++- crates/language/src/proto.rs | 129 ++++------ crates/rpc/proto/zed.proto | 11 +- crates/sum_tree/src/tree_map.rs | 17 +- crates/text/src/selection.rs | 95 +------- crates/text/src/text.rs | 81 +------ 11 files changed, 302 insertions(+), 483 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index c663e442a773e601e78eb66d4134ce484acd8b6e..07a01f73638a4b40aa0e6951cfcdb313449d602e 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -23,7 +23,7 @@ use gpui::{ use items::BufferItemHandle; use language::{ BracketPair, Buffer, Diagnostic, DiagnosticSeverity, Language, Point, Selection, SelectionGoal, - SelectionSetId, TransactionId, + TransactionId, }; pub use multi_buffer::MultiBuffer; use multi_buffer::{ @@ -638,11 +638,11 @@ impl Editor { let first_cursor_top; let last_cursor_bottom; if autoscroll == Autoscroll::Newest { - let newest_selection = self.newest_selection::(&display_map.buffer_snapshot, cx); + let newest_selection = self.newest_selection::(&display_map.buffer_snapshot); first_cursor_top = newest_selection.head().to_display_point(&display_map).row() as f32; last_cursor_bottom = first_cursor_top + 1.; } else { - let selections = self.selections::(cx); + let selections = self.local_selections::(cx); first_cursor_top = selections .first() .unwrap() @@ -702,7 +702,7 @@ impl Editor { cx: &mut ViewContext, ) -> bool { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let selections = self.selections::(cx); + let selections = self.local_selections::(cx); let mut target_left = std::f32::INFINITY; let mut target_right = 0.0_f32; for selection in selections { @@ -772,7 +772,7 @@ impl Editor { ) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let tail = self - .newest_selection::(&display_map.buffer_snapshot, cx) + .newest_selection::(&display_map.buffer_snapshot) .tail(); self.begin_selection(position, false, click_count, cx); @@ -855,8 +855,8 @@ impl Editor { self.update_selections::(Vec::new(), None, cx); } else if click_count > 1 { // Remove the newest selection since it was only added as part of this multi-click. - let newest_selection = self.newest_selection::(buffer, cx); - let mut selections = self.selections(cx); + let newest_selection = self.newest_selection::(buffer); + let mut selections = self.local_selections(cx); selections.retain(|selection| selection.id != newest_selection.id); self.update_selections::(selections, None, cx) } @@ -879,7 +879,7 @@ impl Editor { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let tail = self - .newest_selection::(&display_map.buffer_snapshot, cx) + .newest_selection::(&display_map.buffer_snapshot) .tail(); self.columnar_selection_tail = Some(display_map.buffer_snapshot.anchor_before(tail)); @@ -986,7 +986,7 @@ impl Editor { fn end_selection(&mut self, cx: &mut ViewContext) { self.columnar_selection_tail.take(); if self.pending_selection.is_some() { - let selections = self.selections::(cx); + let selections = self.local_selections::(cx); self.update_selections(selections, None, cx); } } @@ -1047,13 +1047,13 @@ impl Editor { reversed: selection.reversed, goal: selection.goal, }; - if self.selections::(cx).is_empty() { + if self.local_selections::(cx).is_empty() { self.update_selections(vec![selection], Some(Autoscroll::Fit), cx); } } else { let buffer = self.buffer.read(cx).snapshot(cx); - let mut oldest_selection = self.oldest_selection::(&buffer, cx); - if self.selection_count(cx) == 1 { + let mut oldest_selection = self.oldest_selection::(&buffer); + if self.selection_count() == 1 { oldest_selection.start = oldest_selection.head().clone(); oldest_selection.end = oldest_selection.head().clone(); } @@ -1142,7 +1142,7 @@ impl Editor { self.start_transaction(cx); let mut old_selections = SmallVec::<[_; 32]>::new(); { - let selections = self.selections::(cx); + let selections = self.local_selections::(cx); let buffer = self.buffer.read(cx).snapshot(cx); for selection in selections.iter() { let start_point = selection.start; @@ -1264,7 +1264,7 @@ impl Editor { fn insert(&mut self, text: &str, cx: &mut ViewContext) { self.start_transaction(cx); - let old_selections = self.selections::(cx); + let old_selections = self.local_selections::(cx); let mut new_selections = Vec::new(); self.buffer.update(cx, |buffer, cx| { let edit_ranges = old_selections.iter().map(|s| s.start..s.end); @@ -1295,7 +1295,7 @@ impl Editor { } fn autoclose_pairs(&mut self, cx: &mut ViewContext) { - let selections = self.selections::(cx); + let selections = self.local_selections::(cx); let new_autoclose_pair = self.buffer.update(cx, |buffer, cx| { let snapshot = buffer.snapshot(cx); let autoclose_pair = snapshot.language().and_then(|language| { @@ -1356,7 +1356,7 @@ impl Editor { } fn skip_autoclose_end(&mut self, text: &str, cx: &mut ViewContext) -> bool { - let old_selections = self.selections::(cx); + let old_selections = self.local_selections::(cx); let autoclose_pair = if let Some(autoclose_pair) = self.autoclose_stack.last() { autoclose_pair } else { @@ -1407,7 +1407,7 @@ impl Editor { pub fn backspace(&mut self, _: &Backspace, cx: &mut ViewContext) { self.start_transaction(cx); - let mut selections = self.selections::(cx); + let mut selections = self.local_selections::(cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); for selection in &mut selections { if selection.is_empty() { @@ -1427,7 +1427,7 @@ impl Editor { pub fn delete(&mut self, _: &Delete, cx: &mut ViewContext) { self.start_transaction(cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx); + let mut selections = self.local_selections::(cx); for selection in &mut selections { if selection.is_empty() { let head = selection.head().to_display_point(&display_map); @@ -1446,7 +1446,7 @@ impl Editor { pub fn tab(&mut self, _: &Tab, cx: &mut ViewContext) { self.start_transaction(cx); let tab_size = self.build_settings.borrow()(cx).tab_size; - let mut selections = self.selections::(cx); + let mut selections = self.local_selections::(cx); let mut last_indent = None; self.buffer.update(cx, |buffer, cx| { for selection in &mut selections { @@ -1518,7 +1518,7 @@ impl Editor { pub fn outdent(&mut self, _: &Outdent, cx: &mut ViewContext) { self.start_transaction(cx); let tab_size = self.build_settings.borrow()(cx).tab_size; - let selections = self.selections::(cx); + let selections = self.local_selections::(cx); let mut deletion_ranges = Vec::new(); let mut last_outdent = None; { @@ -1558,14 +1558,18 @@ impl Editor { buffer.edit(deletion_ranges, "", cx); }); - self.update_selections(self.selections::(cx), Some(Autoscroll::Fit), cx); + self.update_selections( + self.local_selections::(cx), + Some(Autoscroll::Fit), + cx, + ); self.end_transaction(cx); } pub fn delete_line(&mut self, _: &DeleteLine, cx: &mut ViewContext) { self.start_transaction(cx); - let selections = self.selections::(cx); + let selections = self.local_selections::(cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let buffer = self.buffer.read(cx).snapshot(cx); @@ -1634,7 +1638,7 @@ impl Editor { pub fn duplicate_line(&mut self, _: &DuplicateLine, cx: &mut ViewContext) { self.start_transaction(cx); - let mut selections = self.selections::(cx); + let mut selections = self.local_selections::(cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let buffer = &display_map.buffer_snapshot; @@ -1692,7 +1696,7 @@ impl Editor { pub fn move_line_up(&mut self, _: &MoveLineUp, cx: &mut ViewContext) { self.start_transaction(cx); - let selections = self.selections::(cx); + let selections = self.local_selections::(cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let buffer = self.buffer.read(cx).snapshot(cx); @@ -1782,7 +1786,7 @@ impl Editor { pub fn move_line_down(&mut self, _: &MoveLineDown, cx: &mut ViewContext) { self.start_transaction(cx); - let selections = self.selections::(cx); + let selections = self.local_selections::(cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let buffer = self.buffer.read(cx).snapshot(cx); @@ -1869,7 +1873,7 @@ impl Editor { pub fn cut(&mut self, _: &Cut, cx: &mut ViewContext) { self.start_transaction(cx); let mut text = String::new(); - let mut selections = self.selections::(cx); + let mut selections = self.local_selections::(cx); let mut clipboard_selections = Vec::with_capacity(selections.len()); { let buffer = self.buffer.read(cx).read(cx); @@ -1900,7 +1904,7 @@ impl Editor { } pub fn copy(&mut self, _: &Copy, cx: &mut ViewContext) { - let selections = self.selections::(cx); + let selections = self.local_selections::(cx); let mut text = String::new(); let mut clipboard_selections = Vec::with_capacity(selections.len()); { @@ -1934,7 +1938,7 @@ impl Editor { if let Some(item) = cx.as_mut().read_from_clipboard() { let clipboard_text = item.text(); if let Some(mut clipboard_selections) = item.metadata::>() { - let mut selections = self.selections::(cx); + let mut selections = self.local_selections::(cx); let all_selections_were_entire_line = clipboard_selections.iter().all(|s| s.is_entire_line); if clipboard_selections.len() != selections.len() { @@ -1997,7 +2001,7 @@ impl Editor { pub fn move_left(&mut self, _: &MoveLeft, cx: &mut ViewContext) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx); + let mut selections = self.local_selections::(cx); for selection in &mut selections { let start = selection.start.to_display_point(&display_map); let end = selection.end.to_display_point(&display_map); @@ -2019,7 +2023,7 @@ impl Editor { pub fn select_left(&mut self, _: &SelectLeft, cx: &mut ViewContext) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx); + let mut selections = self.local_selections::(cx); for selection in &mut selections { let head = selection.head().to_display_point(&display_map); let cursor = movement::left(&display_map, head) @@ -2033,7 +2037,7 @@ impl Editor { pub fn move_right(&mut self, _: &MoveRight, cx: &mut ViewContext) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx); + let mut selections = self.local_selections::(cx); for selection in &mut selections { let start = selection.start.to_display_point(&display_map); let end = selection.end.to_display_point(&display_map); @@ -2055,7 +2059,7 @@ impl Editor { pub fn select_right(&mut self, _: &SelectRight, cx: &mut ViewContext) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx); + let mut selections = self.local_selections::(cx); for selection in &mut selections { let head = selection.head().to_display_point(&display_map); let cursor = movement::right(&display_map, head) @@ -2074,7 +2078,7 @@ impl Editor { } let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx); + let mut selections = self.local_selections::(cx); for selection in &mut selections { let start = selection.start.to_display_point(&display_map); let end = selection.end.to_display_point(&display_map); @@ -2094,7 +2098,7 @@ impl Editor { pub fn select_up(&mut self, _: &SelectUp, cx: &mut ViewContext) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx); + let mut selections = self.local_selections::(cx); for selection in &mut selections { let head = selection.head().to_display_point(&display_map); let (head, goal) = movement::up(&display_map, head, selection.goal).unwrap(); @@ -2112,7 +2116,7 @@ impl Editor { } let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx); + let mut selections = self.local_selections::(cx); for selection in &mut selections { let start = selection.start.to_display_point(&display_map); let end = selection.end.to_display_point(&display_map); @@ -2132,7 +2136,7 @@ impl Editor { pub fn select_down(&mut self, _: &SelectDown, cx: &mut ViewContext) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx); + let mut selections = self.local_selections::(cx); for selection in &mut selections { let head = selection.head().to_display_point(&display_map); let (head, goal) = movement::down(&display_map, head, selection.goal).unwrap(); @@ -2149,7 +2153,7 @@ impl Editor { cx: &mut ViewContext, ) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx); + let mut selections = self.local_selections::(cx); for selection in &mut selections { let head = selection.head().to_display_point(&display_map); let cursor = movement::prev_word_boundary(&display_map, head).to_point(&display_map); @@ -2167,7 +2171,7 @@ impl Editor { cx: &mut ViewContext, ) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx); + let mut selections = self.local_selections::(cx); for selection in &mut selections { let head = selection.head().to_display_point(&display_map); let cursor = movement::prev_word_boundary(&display_map, head).to_point(&display_map); @@ -2184,7 +2188,7 @@ impl Editor { ) { self.start_transaction(cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx); + let mut selections = self.local_selections::(cx); for selection in &mut selections { if selection.is_empty() { let head = selection.head().to_display_point(&display_map); @@ -2205,7 +2209,7 @@ impl Editor { cx: &mut ViewContext, ) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx); + let mut selections = self.local_selections::(cx); for selection in &mut selections { let head = selection.head().to_display_point(&display_map); let cursor = movement::next_word_boundary(&display_map, head).to_point(&display_map); @@ -2223,7 +2227,7 @@ impl Editor { cx: &mut ViewContext, ) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx); + let mut selections = self.local_selections::(cx); for selection in &mut selections { let head = selection.head().to_display_point(&display_map); let cursor = movement::next_word_boundary(&display_map, head).to_point(&display_map); @@ -2240,7 +2244,7 @@ impl Editor { ) { self.start_transaction(cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx); + let mut selections = self.local_selections::(cx); for selection in &mut selections { if selection.is_empty() { let head = selection.head().to_display_point(&display_map); @@ -2261,7 +2265,7 @@ impl Editor { cx: &mut ViewContext, ) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx); + let mut selections = self.local_selections::(cx); for selection in &mut selections { let head = selection.head().to_display_point(&display_map); let new_head = movement::line_beginning(&display_map, head, true); @@ -2280,7 +2284,7 @@ impl Editor { cx: &mut ViewContext, ) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx); + let mut selections = self.local_selections::(cx); for selection in &mut selections { let head = selection.head().to_display_point(&display_map); let new_head = movement::line_beginning(&display_map, head, *toggle_indent); @@ -2303,7 +2307,7 @@ impl Editor { pub fn move_to_end_of_line(&mut self, _: &MoveToEndOfLine, cx: &mut ViewContext) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx); + let mut selections = self.local_selections::(cx); { for selection in &mut selections { let head = selection.head().to_display_point(&display_map); @@ -2320,7 +2324,7 @@ impl Editor { pub fn select_to_end_of_line(&mut self, _: &SelectToEndOfLine, cx: &mut ViewContext) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx); + let mut selections = self.local_selections::(cx); for selection in &mut selections { let head = selection.head().to_display_point(&display_map); let new_head = movement::line_end(&display_map, head); @@ -2356,7 +2360,7 @@ impl Editor { } pub fn select_to_beginning(&mut self, _: &SelectToBeginning, cx: &mut ViewContext) { - let mut selection = self.selections::(cx).last().unwrap().clone(); + let mut selection = self.local_selections::(cx).last().unwrap().clone(); selection.set_head(Point::zero()); self.update_selections(vec![selection], Some(Autoscroll::Fit), cx); } @@ -2374,7 +2378,7 @@ impl Editor { } pub fn select_to_end(&mut self, _: &SelectToEnd, cx: &mut ViewContext) { - let mut selection = self.selections::(cx).first().unwrap().clone(); + let mut selection = self.local_selections::(cx).first().unwrap().clone(); selection.set_head(self.buffer.read(cx).read(cx).len()); self.update_selections(vec![selection], Some(Autoscroll::Fit), cx); } @@ -2392,7 +2396,7 @@ impl Editor { pub fn select_line(&mut self, _: &SelectLine, cx: &mut ViewContext) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx); + let mut selections = self.local_selections::(cx); let max_point = display_map.buffer_snapshot.max_point(); for selection in &mut selections { let rows = selection.spanned_rows(true, &display_map).buffer_rows; @@ -2411,7 +2415,7 @@ impl Editor { let mut to_unfold = Vec::new(); let mut new_selections = Vec::new(); { - let selections = self.selections::(cx); + let selections = self.local_selections::(cx); let buffer = self.buffer.read(cx).read(cx); for selection in selections { for row in selection.start.row..selection.end.row { @@ -2448,7 +2452,7 @@ impl Editor { fn add_selection(&mut self, above: bool, cx: &mut ViewContext) { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let mut selections = self.selections::(cx); + let mut selections = self.local_selections::(cx); let mut state = self.add_selections_state.take().unwrap_or_else(|| { let oldest_selection = selections.iter().min_by_key(|s| s.id).unwrap().clone(); let range = oldest_selection.display_range(&display_map).sorted(); @@ -2543,7 +2547,7 @@ impl Editor { let replace_newest = action.0; let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let buffer = &display_map.buffer_snapshot; - let mut selections = self.selections::(cx); + let mut selections = self.local_selections::(cx); if let Some(mut select_next_state) = self.select_next_state.take() { let query = &select_next_state.query; if !select_next_state.done { @@ -2650,7 +2654,7 @@ impl Editor { let comment_prefix_whitespace = &full_comment_prefix[comment_prefix.len()..]; self.start_transaction(cx); - let mut selections = self.selections::(cx); + let mut selections = self.local_selections::(cx); let mut all_selection_lines_are_comments = true; let mut edit_ranges = Vec::new(); let mut last_toggled_row = None; @@ -2727,7 +2731,11 @@ impl Editor { } }); - self.update_selections(self.selections::(cx), Some(Autoscroll::Fit), cx); + self.update_selections( + self.local_selections::(cx), + Some(Autoscroll::Fit), + cx, + ); self.end_transaction(cx); } @@ -2736,7 +2744,7 @@ impl Editor { _: &SelectLargerSyntaxNode, cx: &mut ViewContext, ) { - let old_selections = self.selections::(cx).into_boxed_slice(); + let old_selections = self.local_selections::(cx).into_boxed_slice(); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let buffer = self.buffer.read(cx).snapshot(cx); @@ -2794,7 +2802,7 @@ impl Editor { _: &MoveToEnclosingBracket, cx: &mut ViewContext, ) { - let mut selections = self.selections::(cx); + let mut selections = self.local_selections::(cx); let buffer = self.buffer.read(cx).snapshot(cx); for selection in &mut selections { if let Some((open_range, close_range)) = @@ -2818,7 +2826,7 @@ impl Editor { pub fn show_next_diagnostic(&mut self, _: &ShowNextDiagnostic, cx: &mut ViewContext) { let buffer = self.buffer.read(cx).snapshot(cx); - let selection = self.newest_selection::(&buffer, cx); + let selection = self.newest_selection::(&buffer); let active_primary_range = self.active_diagnostics.as_ref().map(|active_diagnostics| { active_diagnostics .primary_range @@ -2998,54 +3006,65 @@ impl Editor { } } - pub fn intersecting_selections<'a>( + pub fn all_selections_in_range<'a, D>( &'a self, - set_id: SelectionSetId, range: Range, cx: &'a mut MutableAppContext, - ) -> Vec> { - todo!() - // let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - // let buffer = self.buffer.read(cx); - - // let pending_selection = if set_id == self.selection_set_id { - // self.pending_selection.as_ref().and_then(|pending| { - // let selection_start = pending.selection.start.to_display_point(&display_map); - // let selection_end = pending.selection.end.to_display_point(&display_map); - // if selection_start <= range.end || selection_end <= range.end { - // Some(Selection { - // id: pending.selection.id, - // start: selection_start, - // end: selection_end, - // reversed: pending.selection.reversed, - // goal: pending.selection.goal, - // }) - // } else { - // None - // } - // }) - // } else { - // None - // }; - - // let range = (range.start.to_offset(&display_map, Bias::Left), Bias::Left) - // ..(range.end.to_offset(&display_map, Bias::Left), Bias::Right); - // buffer - // .selection_set(set_id, cx) - // .unwrap() - // .intersecting_selections::(range, &buffer.read(cx)) - // .map(move |s| Selection { - // id: s.id, - // start: s.start.to_display_point(&display_map), - // end: s.end.to_display_point(&display_map), - // reversed: s.reversed, - // goal: s.goal, - // }) - // .chain(pending_selection) - // .collect() - } - - pub fn selections<'a, D>(&self, cx: &'a AppContext) -> Vec> + ) -> HashMap>> + where + D: TextDimension + Ord + Sub, + { + let mut result = HashMap::new(); + + let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + let buffer = &display_map.buffer_snapshot; + let range = range.start.to_offset(&display_map, Bias::Left) + ..range.end.to_offset(&display_map, Bias::Left); + + let anchor_range = buffer.anchor_before(range.start)..buffer.anchor_after(range.end); + let start_ix = match self + .selections + .binary_search_by(|probe| probe.end.cmp(&anchor_range.start, &buffer).unwrap()) + { + Ok(ix) | Err(ix) => ix, + }; + let end_ix = match self + .selections + .binary_search_by(|probe| probe.start.cmp(&anchor_range.end, &buffer).unwrap()) + { + Ok(ix) | Err(ix) => ix, + }; + + let selections = &self.selections[start_ix..end_ix]; + let mut summaries = buffer + .summaries_for_anchors::(selections.iter().flat_map(|s| [&s.start, &s.end])) + .into_iter(); + + result.insert( + self.replica_id(cx), + selections + .iter() + .map(|s| Selection { + id: s.id, + start: summaries.next().unwrap(), + end: summaries.next().unwrap(), + reversed: s.reversed, + goal: s.goal, + }) + .collect(), + ); + + for (replica_id, selections) in display_map + .buffer_snapshot + .remote_selections_in_range(range) + { + result.insert(replica_id, selections.collect()); + } + + result + } + + pub fn local_selections<'a, D>(&self, cx: &'a AppContext) -> Vec> where D: 'a + TextDimension + Ord + Sub, { @@ -3067,7 +3086,7 @@ impl Editor { }) .peekable(); - let mut pending_selection = self.pending_selection::(&buffer, cx); + let mut pending_selection = self.pending_selection::(&buffer); iter::from_fn(move || { if let Some(pending) = pending_selection.as_mut() { @@ -3098,18 +3117,16 @@ impl Editor { fn pending_selection>( &self, snapshot: &MultiBufferSnapshot, - cx: &AppContext, ) -> Option> { self.pending_selection .as_ref() - .map(|pending| self.resolve_selection(&pending.selection, &snapshot, cx)) + .map(|pending| self.resolve_selection(&pending.selection, &snapshot)) } fn resolve_selection>( &self, selection: &Selection, buffer: &MultiBufferSnapshot, - cx: &AppContext, ) -> Selection { Selection { id: selection.id, @@ -3120,7 +3137,7 @@ impl Editor { } } - fn selection_count<'a>(&self, cx: &'a AppContext) -> usize { + fn selection_count<'a>(&self) -> usize { let mut count = self.selections.len(); if self.pending_selection.is_some() { count += 1; @@ -3131,27 +3148,25 @@ impl Editor { pub fn oldest_selection>( &self, snapshot: &MultiBufferSnapshot, - cx: &AppContext, ) -> Selection { self.selections .iter() .min_by_key(|s| s.id) - .map(|selection| self.resolve_selection(selection, snapshot, cx)) - .or_else(|| self.pending_selection(snapshot, cx)) + .map(|selection| self.resolve_selection(selection, snapshot)) + .or_else(|| self.pending_selection(snapshot)) .unwrap() } pub fn newest_selection>( &self, snapshot: &MultiBufferSnapshot, - cx: &AppContext, ) -> Selection { - self.pending_selection(snapshot, cx) + self.pending_selection(snapshot) .or_else(|| { self.selections .iter() .min_by_key(|s| s.id) - .map(|selection| self.resolve_selection(selection, snapshot, cx)) + .map(|selection| self.resolve_selection(selection, snapshot)) }) .unwrap() } @@ -3249,7 +3264,7 @@ impl Editor { pub fn fold(&mut self, _: &Fold, cx: &mut ViewContext) { let mut fold_ranges = Vec::new(); - let selections = self.selections::(cx); + let selections = self.local_selections::(cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); for selection in selections { let range = selection.display_range(&display_map).sorted(); @@ -3272,7 +3287,7 @@ impl Editor { } pub fn unfold(&mut self, _: &Unfold, cx: &mut ViewContext) { - let selections = self.selections::(cx); + let selections = self.local_selections::(cx); let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let buffer = &display_map.buffer_snapshot; let ranges = selections @@ -3332,7 +3347,7 @@ impl Editor { } pub fn fold_selected_ranges(&mut self, _: &FoldSelectedRanges, cx: &mut ViewContext) { - let selections = self.selections::(cx); + let selections = self.local_selections::(cx); let ranges = selections.into_iter().map(|s| s.start..s.end); self.fold_ranges(ranges, cx); } diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index cbc9ff223d9ae33a16bf4afe6372e904cdf857f0..6989cc0fccb65d3ba0c5b768fc17b296f9467322 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -249,7 +249,7 @@ impl CursorPosition { self.selected_count = 0; let mut last_selection: Option> = None; - for selection in editor.selections::(cx) { + for selection in editor.local_selections::(cx) { self.selected_count += selection.end - selection.start; if last_selection .as_ref() @@ -323,9 +323,7 @@ impl DiagnosticMessage { fn update(&mut self, editor: ViewHandle, cx: &mut ViewContext) { let editor = editor.read(cx); let buffer = editor.buffer().read(cx); - let cursor_position = editor - .newest_selection::(&buffer.read(cx), cx) - .head(); + let cursor_position = editor.newest_selection::(&buffer.read(cx)).head(); let new_diagnostic = buffer .read(cx) .diagnostics_in_range::<_, usize>(cursor_position..cursor_position) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 1e93c16f7779cba32509133c76c0430f5409bb7e..51879bcf63c901c99939a96ef7be332602c5dbe6 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -7,10 +7,9 @@ use clock::ReplicaId; use collections::HashMap; use gpui::{AppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task}; use language::{ - Buffer, BufferChunks, BufferSnapshot, Chunk, DiagnosticEntry, Event, File, Language, + Buffer, BufferChunks, BufferSnapshot, Chunk, DiagnosticEntry, Event, File, Language, Selection, ToOffset as _, ToPoint as _, TransactionId, }; -pub use selection::SelectionSet; use std::{ cell::{Ref, RefCell}, cmp, io, @@ -24,7 +23,7 @@ use text::{ locator::Locator, rope::TextDimension, subscription::{Subscription, Topic}, - AnchorRangeExt as _, Edit, Point, PointUtf16, SelectionSetId, TextSummary, + AnchorRangeExt as _, Edit, Point, PointUtf16, TextSummary, }; use theme::SyntaxTheme; @@ -36,7 +35,6 @@ pub struct MultiBuffer { snapshot: RefCell, buffers: HashMap, subscriptions: Topic, - selection_sets: HashMap, singleton: bool, replica_id: ReplicaId, } @@ -104,7 +102,6 @@ impl MultiBuffer { snapshot: Default::default(), buffers: Default::default(), subscriptions: Default::default(), - selection_sets: Default::default(), singleton: false, replica_id, } @@ -937,6 +934,19 @@ impl MultiBufferSnapshot { } None } + + pub fn remote_selections_in_range<'a, I, O>( + &'a self, + range: Range, + ) -> impl 'a + Iterator>)> + where + I: ToOffset, + O: TextDimension, + { + self.as_singleton() + .unwrap() + .remote_selections_in_range(range.start.to_offset(self)..range.end.to_offset(self)) + } } impl Excerpt { diff --git a/crates/editor/src/multi_buffer/selection.rs b/crates/editor/src/multi_buffer/selection.rs index 3a4369b3da9527ffdaa00aa84606f2e1acd78c01..86d36ae16b690f1700468d13da4169a47d264c76 100644 --- a/crates/editor/src/multi_buffer/selection.rs +++ b/crates/editor/src/multi_buffer/selection.rs @@ -1,85 +1,6 @@ -use super::{Anchor, MultiBufferSnapshot, ToOffset}; -use std::{ - ops::{Range, Sub}, - sync::Arc, -}; -use sum_tree::Bias; -use text::{rope::TextDimension, Selection, SelectionSetId}; - -#[derive(Clone, Debug, Eq, PartialEq)] -pub struct SelectionSet { - pub id: SelectionSetId, - pub active: bool, - pub selections: Arc<[Selection]>, -} - -impl SelectionSet { - pub fn len(&self) -> usize { - self.selections.len() - } - - pub fn selections<'a, D>( - &'a self, - snapshot: &'a MultiBufferSnapshot, - ) -> impl 'a + Iterator> - where - D: TextDimension + Ord + Sub, - { - resolve_selections(&self.selections, snapshot) - } - - pub fn intersecting_selections<'a, D, I>( - &'a self, - range: Range<(I, Bias)>, - snapshot: &'a MultiBufferSnapshot, - ) -> impl 'a + Iterator> - where - D: TextDimension + Ord + Sub, - I: 'a + ToOffset, - { - let start = snapshot.anchor_at(range.start.0, range.start.1); - let end = snapshot.anchor_at(range.end.0, range.end.1); - let start_ix = match self - .selections - .binary_search_by(|probe| probe.end.cmp(&start, snapshot).unwrap()) - { - Ok(ix) | Err(ix) => ix, - }; - let end_ix = match self - .selections - .binary_search_by(|probe| probe.start.cmp(&end, snapshot).unwrap()) - { - Ok(ix) | Err(ix) => ix, - }; - resolve_selections(&self.selections[start_ix..end_ix], snapshot) - } - - pub fn oldest_selection<'a, D>( - &'a self, - snapshot: &'a MultiBufferSnapshot, - ) -> Option> - where - D: TextDimension + Ord + Sub, - { - self.selections - .iter() - .min_by_key(|selection| selection.id) - .map(|selection| resolve_selection(selection, snapshot)) - } - - pub fn newest_selection<'a, D>( - &'a self, - snapshot: &'a MultiBufferSnapshot, - ) -> Option> - where - D: TextDimension + Ord + Sub, - { - self.selections - .iter() - .max_by_key(|selection| selection.id) - .map(|selection| resolve_selection(selection, snapshot)) - } -} +use super::{Anchor, MultiBufferSnapshot}; +use std::ops::Sub; +use text::{rope::TextDimension, Selection}; fn resolve_selection<'a, D>( selection: &'a Selection, diff --git a/crates/go_to_line/src/go_to_line.rs b/crates/go_to_line/src/go_to_line.rs index 4c021ac1a44d9b085c6240d745cee9a59ff9d3d0..85c5a6439b350807023e17d9b6e025aa54a2440f 100644 --- a/crates/go_to_line/src/go_to_line.rs +++ b/crates/go_to_line/src/go_to_line.rs @@ -67,13 +67,14 @@ impl GoToLine { let (restore_state, cursor_point, max_point) = active_editor.update(cx, |editor, cx| { let restore_state = Some(RestoreState { scroll_position: editor.scroll_position(cx), - selections: editor.selections::(cx), + selections: editor.local_selections::(cx), }); + let buffer = editor.buffer().read(cx).read(cx); ( restore_state, - editor.newest_selection(cx).head(), - editor.buffer().read(cx).read(cx).max_point(), + editor.newest_selection(&buffer).head(), + buffer.max_point(), ) }); @@ -143,7 +144,7 @@ impl GoToLine { let display_point = point.to_display_point(&snapshot); active_editor.select_ranges([point..point], Some(Autoscroll::Center), cx); active_editor.set_highlighted_row(Some(display_point.row())); - Some(active_editor.newest_selection(cx)) + Some(active_editor.newest_selection(&snapshot.buffer_snapshot)) }); cx.notify(); } @@ -162,7 +163,9 @@ impl Entity for GoToLine { self.active_editor.update(cx, |editor, cx| { editor.set_highlighted_row(None); if let Some((line_selection, restore_state)) = line_selection.zip(restore_state) { - if line_selection.id == editor.newest_selection::(cx).id { + let newest_selection = + editor.newest_selection::(&editor.buffer().read(cx).read(cx)); + if line_selection.id == newest_selection.id { editor.set_scroll_position(restore_state.scroll_position, cx); editor.update_selections(restore_state.selections, None, cx); } diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index b408bf589b0e21ac40cee7e8d772443bb30ad623..c0f968cb271f649ddd5cede478d454b9f878e3b0 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -30,7 +30,8 @@ use std::{ time::{Duration, Instant, SystemTime, UNIX_EPOCH}, vec, }; -use text::operation_queue::OperationQueue; +use sum_tree::TreeMap; +use text::{operation_queue::OperationQueue, rope::TextDimension}; pub use text::{Buffer as TextBuffer, Operation as _, *}; use theme::SyntaxTheme; use tree_sitter::{InputEdit, Parser, QueryCursor, Tree}; @@ -64,6 +65,7 @@ pub struct Buffer { syntax_tree: Mutex>, parsing_in_background: bool, parse_count: usize, + remote_selections: TreeMap]>>, diagnostics: DiagnosticSet, diagnostics_update_count: usize, language_server: Option, @@ -76,6 +78,7 @@ pub struct BufferSnapshot { text: text::BufferSnapshot, tree: Option, diagnostics: DiagnosticSet, + remote_selections: TreeMap]>>, diagnostics_update_count: usize, is_parsing: bool, language: Option>, @@ -112,6 +115,15 @@ pub enum Operation { diagnostics: Arc<[DiagnosticEntry]>, lamport_timestamp: clock::Lamport, }, + UpdateSelections { + replica_id: ReplicaId, + selections: Arc<[Selection]>, + lamport_timestamp: clock::Lamport, + }, + RemoveSelections { + replica_id: ReplicaId, + lamport_timestamp: clock::Lamport, + }, } #[derive(Clone, Debug, Eq, PartialEq)] @@ -329,6 +341,7 @@ impl Buffer { autoindent_requests: Default::default(), pending_autoindent: Default::default(), language: None, + remote_selections: Default::default(), diagnostics: Default::default(), diagnostics_update_count: 0, language_server: None, @@ -342,6 +355,7 @@ impl Buffer { BufferSnapshot { text: self.text.snapshot(), tree: self.syntax_tree(), + remote_selections: self.remote_selections.clone(), diagnostics: self.diagnostics.clone(), diagnostics_update_count: self.diagnostics_update_count, is_parsing: self.parsing_in_background, @@ -1286,6 +1300,10 @@ impl Buffer { && self.text.can_resolve(&diagnostic.range.end) }) } + Operation::UpdateSelections { selections, .. } => selections + .iter() + .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)), + Operation::RemoveSelections { .. } => true, } } @@ -1297,6 +1315,21 @@ impl Buffer { Operation::UpdateDiagnostics { diagnostics, .. } => { self.apply_diagnostic_update(diagnostics, cx); } + Operation::UpdateSelections { + replica_id, + selections, + lamport_timestamp, + } => { + self.remote_selections.insert(replica_id, selections); + self.text.observe_lamport_timestamp(lamport_timestamp); + } + Operation::RemoveSelections { + replica_id: set_id, + lamport_timestamp, + } => { + self.remote_selections.remove(&set_id); + self.text.observe_lamport_timestamp(lamport_timestamp); + } } } @@ -1323,7 +1356,7 @@ impl Buffer { } pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext) { - self.text.remove_peer(replica_id); + self.remote_selections.remove(&replica_id); cx.notify(); } @@ -1615,6 +1648,43 @@ impl BufferSnapshot { .min_by_key(|(open_range, close_range)| close_range.end - open_range.start) } + pub fn remote_selections_in_range<'a, I, O>( + &'a self, + range: Range, + ) -> impl 'a + Iterator>)> + where + I: ToOffset, + O: TextDimension, + { + let range = self.anchor_before(range.start)..self.anchor_after(range.end); + self.remote_selections + .iter() + .map(move |(replica_id, selections)| { + let start_ix = match selections + .binary_search_by(|probe| probe.end.cmp(&range.start, self).unwrap()) + { + Ok(ix) | Err(ix) => ix, + }; + let end_ix = match selections + .binary_search_by(|probe| probe.start.cmp(&range.end, self).unwrap()) + { + Ok(ix) | Err(ix) => ix, + }; + + let selections = &selections[start_ix..end_ix]; + let mut summaries = + self.summaries_for_anchors(selections.iter().flat_map(|s| [&s.start, &s.end])); + let resolved = selections.iter().map(move |s| Selection { + id: s.id, + start: summaries.next().unwrap(), + end: summaries.next().unwrap(), + reversed: s.reversed, + goal: s.goal, + }); + (*replica_id, resolved) + }) + } + pub fn diagnostics_in_range<'a, T, O>( &'a self, search_range: Range, @@ -1650,6 +1720,7 @@ impl Clone for BufferSnapshot { Self { text: self.text.clone(), tree: self.tree.clone(), + remote_selections: self.remote_selections.clone(), diagnostics: self.diagnostics.clone(), diagnostics_update_count: self.diagnostics_update_count, is_parsing: self.is_parsing, @@ -1889,6 +1960,12 @@ impl operation_queue::Operation for Operation { } Operation::UpdateDiagnostics { lamport_timestamp, .. + } + | Operation::UpdateSelections { + lamport_timestamp, .. + } + | Operation::RemoveSelections { + lamport_timestamp, .. } => *lamport_timestamp, } } diff --git a/crates/language/src/proto.rs b/crates/language/src/proto.rs index 6f36c7dc0bccdf6afaa60ba05bf7cb898c86f8ba..8921ffa42154586964f918bda081f6e36e9062db 100644 --- a/crates/language/src/proto.rs +++ b/crates/language/src/proto.rs @@ -41,13 +41,12 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation { .collect(), version: From::from(&undo.version), }), - Operation::Buffer(text::Operation::UpdateSelections { - set_id, + Operation::UpdateSelections { + replica_id, selections, lamport_timestamp, - }) => proto::operation::Variant::UpdateSelections(proto::operation::UpdateSelections { - replica_id: set_id.replica_id as u32, - local_timestamp: set_id.value, + } => proto::operation::Variant::UpdateSelections(proto::operation::UpdateSelections { + replica_id: *replica_id as u32, lamport_timestamp: lamport_timestamp.value, selections: selections .iter() @@ -59,24 +58,13 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation { }) .collect(), }), - Operation::Buffer(text::Operation::RemoveSelections { - set_id, + Operation::RemoveSelections { + replica_id, lamport_timestamp, - }) => proto::operation::Variant::RemoveSelections(proto::operation::RemoveSelections { - replica_id: set_id.replica_id as u32, - local_timestamp: set_id.value, + } => proto::operation::Variant::RemoveSelections(proto::operation::RemoveSelections { + replica_id: *replica_id as u32, lamport_timestamp: lamport_timestamp.value, }), - Operation::Buffer(text::Operation::SetActiveSelections { - set_id, - lamport_timestamp, - }) => proto::operation::Variant::SetActiveSelections( - proto::operation::SetActiveSelections { - replica_id: lamport_timestamp.replica_id as u32, - local_timestamp: set_id.map(|set_id| set_id.value), - lamport_timestamp: lamport_timestamp.value, - }, - ), Operation::UpdateDiagnostics { diagnostics, lamport_timestamp, @@ -108,22 +96,16 @@ pub fn serialize_edit_operation(operation: &EditOperation) -> proto::operation:: } } -pub fn serialize_selection_set(set: &SelectionSet) -> proto::SelectionSet { - proto::SelectionSet { - replica_id: set.id.replica_id as u32, - lamport_timestamp: set.id.value as u32, - is_active: set.active, - selections: set - .selections - .iter() - .map(|selection| proto::Selection { - id: selection.id as u64, - start: Some(serialize_anchor(&selection.start)), - end: Some(serialize_anchor(&selection.end)), - reversed: selection.reversed, - }) - .collect(), - } +pub fn serialize_selections(selections: &Arc<[Selection]>) -> Vec { + selections + .iter() + .map(|selection| proto::Selection { + id: selection.id as u64, + start: Some(serialize_anchor(&selection.start)), + end: Some(serialize_anchor(&selection.end)), + reversed: selection.reversed, + }) + .collect() } pub fn serialize_diagnostics<'a>( @@ -215,42 +197,22 @@ pub fn deserialize_operation(message: proto::Operation) -> Result { }) .collect::>(); - Operation::Buffer(text::Operation::UpdateSelections { - set_id: clock::Lamport { - replica_id: message.replica_id as ReplicaId, - value: message.local_timestamp, - }, + Operation::UpdateSelections { + replica_id: message.replica_id as ReplicaId, lamport_timestamp: clock::Lamport { replica_id: message.replica_id as ReplicaId, value: message.lamport_timestamp, }, selections: Arc::from(selections), - }) - } - proto::operation::Variant::RemoveSelections(message) => { - Operation::Buffer(text::Operation::RemoveSelections { - set_id: clock::Lamport { - replica_id: message.replica_id as ReplicaId, - value: message.local_timestamp, - }, - lamport_timestamp: clock::Lamport { - replica_id: message.replica_id as ReplicaId, - value: message.lamport_timestamp, - }, - }) - } - proto::operation::Variant::SetActiveSelections(message) => { - Operation::Buffer(text::Operation::SetActiveSelections { - set_id: message.local_timestamp.map(|value| clock::Lamport { - replica_id: message.replica_id as ReplicaId, - value, - }), - lamport_timestamp: clock::Lamport { - replica_id: message.replica_id as ReplicaId, - value: message.lamport_timestamp, - }, - }) + } } + proto::operation::Variant::RemoveSelections(message) => Operation::RemoveSelections { + replica_id: message.replica_id as ReplicaId, + lamport_timestamp: clock::Lamport { + replica_id: message.replica_id as ReplicaId, + value: message.lamport_timestamp, + }, + }, proto::operation::Variant::UpdateDiagnostics(message) => Operation::UpdateDiagnostics { diagnostics: Arc::from(deserialize_diagnostics(message.diagnostics)), lamport_timestamp: clock::Lamport { @@ -280,28 +242,21 @@ pub fn deserialize_edit_operation(edit: proto::operation::Edit) -> EditOperation } } -pub fn deserialize_selection_set(set: proto::SelectionSet) -> SelectionSet { - SelectionSet { - id: clock::Lamport { - replica_id: set.replica_id as u16, - value: set.lamport_timestamp, - }, - active: set.is_active, - selections: Arc::from( - set.selections - .into_iter() - .filter_map(|selection| { - Some(Selection { - id: selection.id as usize, - start: deserialize_anchor(selection.start?)?, - end: deserialize_anchor(selection.end?)?, - reversed: selection.reversed, - goal: SelectionGoal::None, - }) +pub fn deserialize_selections(selections: Vec) -> Arc<[Selection]> { + Arc::from( + selections + .into_iter() + .filter_map(|selection| { + Some(Selection { + id: selection.id as usize, + start: deserialize_anchor(selection.start?)?, + end: deserialize_anchor(selection.end?)?, + reversed: selection.reversed, + goal: SelectionGoal::None, }) - .collect::>(), - ), - } + }) + .collect::>(), + ) } pub fn deserialize_diagnostics( diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index 7e7a180cd2acb5b64492f534f89b626bd29bffe5..7a06abb1226f8f51bc80404f0b1cf3ed51ce589c 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -286,8 +286,7 @@ message Operation { Undo undo = 2; UpdateSelections update_selections = 3; RemoveSelections remove_selections = 4; - SetActiveSelections set_active_selections = 5; - UpdateDiagnostics update_diagnostics = 6; + UpdateDiagnostics update_diagnostics = 5; } message Edit { @@ -316,20 +315,12 @@ message Operation { message UpdateSelections { uint32 replica_id = 1; - uint32 local_timestamp = 2; uint32 lamport_timestamp = 3; repeated Selection selections = 4; } message RemoveSelections { uint32 replica_id = 1; - uint32 local_timestamp = 2; - uint32 lamport_timestamp = 3; - } - - message SetActiveSelections { - uint32 replica_id = 1; - optional uint32 local_timestamp = 2; uint32 lamport_timestamp = 3; } } diff --git a/crates/sum_tree/src/tree_map.rs b/crates/sum_tree/src/tree_map.rs index 75405cbef43478e4b83f0caf54a6dae36b3d0cf9..3619d7870e60028d3f132ff120850206cecbef2c 100644 --- a/crates/sum_tree/src/tree_map.rs +++ b/crates/sum_tree/src/tree_map.rs @@ -2,6 +2,7 @@ use std::{cmp::Ordering, fmt::Debug}; use crate::{Bias, Dimension, Item, KeyedItem, SeekTarget, SumTree, Summary}; +#[derive(Clone)] pub struct TreeMap(SumTree>) where K: Clone + Debug + Default, @@ -19,7 +20,7 @@ pub struct MapKey(K); #[derive(Clone, Debug, Default)] pub struct MapKeyRef<'a, K>(Option<&'a K>); -impl TreeMap { +impl TreeMap { pub fn get<'a>(&self, key: &'a K) -> Option<&V> { let mut cursor = self.0.cursor::>(); let key = MapKeyRef(Some(key)); @@ -49,6 +50,20 @@ impl TreeMap self.0 = new_tree; removed } + + pub fn iter<'a>(&'a self) -> impl 'a + Iterator { + self.0.iter().map(|entry| (&entry.key, &entry.value)) + } +} + +impl Default for TreeMap +where + K: Clone + Debug + Default, + V: Clone + Debug, +{ + fn default() -> Self { + Self(Default::default()) + } } impl Item for MapEntry diff --git a/crates/text/src/selection.rs b/crates/text/src/selection.rs index 0afbf8cf74ad3239984be18243933d0eb420fc24..4e7d6f52367094fa5bbf07cd8f55250259bd7601 100644 --- a/crates/text/src/selection.rs +++ b/crates/text/src/selection.rs @@ -1,10 +1,6 @@ use crate::Anchor; use crate::{rope::TextDimension, BufferSnapshot, ToOffset, ToPoint}; -use std::{cmp::Ordering, ops::Range, sync::Arc}; -use sum_tree::Bias; - -pub type SelectionSetId = clock::Lamport; -pub type SelectionsVersion = usize; +use std::cmp::Ordering; #[derive(Copy, Clone, Debug, Eq, PartialEq)] pub enum SelectionGoal { @@ -22,20 +18,6 @@ pub struct Selection { pub goal: SelectionGoal, } -#[derive(Clone, Debug, Eq, PartialEq)] -pub struct SelectionSet { - pub id: SelectionSetId, - pub active: bool, - pub selections: Arc<[Selection]>, -} - -#[derive(Debug, Eq, PartialEq)] -pub struct SelectionState { - pub id: usize, - pub reversed: bool, - pub goal: SelectionGoal, -} - impl Selection { pub fn head(&self) -> T { if self.reversed { @@ -90,78 +72,3 @@ impl Selection { } } } - -impl SelectionSet { - pub fn len(&self) -> usize { - self.selections.len() - } - - pub fn selections<'a, D>( - &'a self, - snapshot: &'a BufferSnapshot, - ) -> impl 'a + Iterator> - where - D: TextDimension, - { - let anchors = self - .selections - .iter() - .flat_map(|selection| [&selection.start, &selection.end].into_iter()); - let mut positions = snapshot.summaries_for_anchors::(anchors); - self.selections.iter().map(move |selection| Selection { - start: positions.next().unwrap(), - end: positions.next().unwrap(), - goal: selection.goal, - reversed: selection.reversed, - id: selection.id, - }) - } - - pub fn intersecting_selections<'a, D, I>( - &'a self, - range: Range<(I, Bias)>, - snapshot: &'a BufferSnapshot, - ) -> impl 'a + Iterator> - where - D: TextDimension, - I: 'a + ToOffset, - { - let start = snapshot.anchor_at(range.start.0, range.start.1); - let end = snapshot.anchor_at(range.end.0, range.end.1); - let start_ix = match self - .selections - .binary_search_by(|probe| probe.end.cmp(&start, snapshot).unwrap()) - { - Ok(ix) | Err(ix) => ix, - }; - let end_ix = match self - .selections - .binary_search_by(|probe| probe.start.cmp(&end, snapshot).unwrap()) - { - Ok(ix) | Err(ix) => ix, - }; - self.selections[start_ix..end_ix] - .iter() - .map(|s| s.resolve(snapshot)) - } - - pub fn oldest_selection<'a, D>(&'a self, snapshot: &'a BufferSnapshot) -> Option> - where - D: TextDimension, - { - self.selections - .iter() - .min_by_key(|s| s.id) - .map(|s| s.resolve(snapshot)) - } - - pub fn newest_selection<'a, D>(&'a self, snapshot: &'a BufferSnapshot) -> Option> - where - D: TextDimension, - { - self.selections - .iter() - .max_by_key(|s| s.id) - .map(|s| s.resolve(snapshot)) - } -} diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 5da3c7043e617065e977203f9e9d014ed804579b..d715a3fbadeea7d79ca99895ce5b743f53bb04c8 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -44,7 +44,6 @@ pub struct Buffer { snapshot: BufferSnapshot, last_edit: clock::Local, history: History, - selection_sets: HashMap, deferred_ops: OperationQueue, deferred_replicas: HashSet, replica_id: ReplicaId, @@ -413,19 +412,6 @@ pub enum Operation { undo: UndoOperation, lamport_timestamp: clock::Lamport, }, - UpdateSelections { - set_id: SelectionSetId, - selections: Arc<[Selection]>, - lamport_timestamp: clock::Lamport, - }, - RemoveSelections { - set_id: SelectionSetId, - lamport_timestamp: clock::Lamport, - }, - SetActiveSelections { - set_id: Option, - lamport_timestamp: clock::Lamport, - }, } #[derive(Clone, Debug, Eq, PartialEq)] @@ -487,7 +473,6 @@ impl Buffer { }, last_edit: clock::Local::default(), history, - selection_sets: Default::default(), deferred_ops: OperationQueue::new(), deferred_replicas: HashSet::default(), replica_id, @@ -514,6 +499,10 @@ impl Buffer { self.lamport_clock } + pub fn observe_lamport_timestamp(&mut self, timestamp: clock::Lamport) { + self.lamport_clock.observe(timestamp); + } + pub fn remote_id(&self) -> u64 { self.remote_id } @@ -754,47 +743,6 @@ impl Buffer { self.lamport_clock.observe(lamport_timestamp); } } - Operation::UpdateSelections { - set_id, - selections, - lamport_timestamp, - } => { - if let Some(set) = self.selection_sets.get_mut(&set_id) { - set.selections = selections; - } else { - self.selection_sets.insert( - set_id, - SelectionSet { - id: set_id, - selections, - active: false, - }, - ); - } - self.lamport_clock.observe(lamport_timestamp); - } - Operation::RemoveSelections { - set_id, - lamport_timestamp, - } => { - self.selection_sets.remove(&set_id); - self.lamport_clock.observe(lamport_timestamp); - } - Operation::SetActiveSelections { - set_id, - lamport_timestamp, - } => { - for (id, set) in &mut self.selection_sets { - if id.replica_id == lamport_timestamp.replica_id { - if Some(*id) == set_id { - set.active = true; - } else { - set.active = false; - } - } - } - self.lamport_clock.observe(lamport_timestamp); - } } Ok(()) } @@ -1107,13 +1055,6 @@ impl Buffer { match op { Operation::Edit(edit) => self.version.ge(&edit.version), Operation::Undo { undo, .. } => self.version.ge(&undo.version), - Operation::UpdateSelections { selections, .. } => selections - .iter() - .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)), - Operation::RemoveSelections { .. } => true, - Operation::SetActiveSelections { set_id, .. } => { - set_id.map_or(true, |set_id| self.selection_sets.contains_key(&set_id)) - } } } } @@ -1151,11 +1092,6 @@ impl Buffer { } } - pub fn remove_peer(&mut self, replica_id: ReplicaId) { - self.selection_sets - .retain(|set_id, _| set_id.replica_id != replica_id) - } - pub fn base_text(&self) -> &Arc { &self.history.base_text } @@ -2007,15 +1943,6 @@ impl operation_queue::Operation for Operation { Operation::Undo { lamport_timestamp, .. } => *lamport_timestamp, - Operation::UpdateSelections { - lamport_timestamp, .. - } => *lamport_timestamp, - Operation::RemoveSelections { - lamport_timestamp, .. - } => *lamport_timestamp, - Operation::SetActiveSelections { - lamport_timestamp, .. - } => *lamport_timestamp, } } } From 1e7184ea0766dc381a63ccdcb63a05be54dc8813 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Sat, 11 Dec 2021 13:42:46 -0700 Subject: [PATCH 057/196] Get selections rendering again when local selections are owned by Editor --- Cargo.lock | 1 + crates/editor/Cargo.toml | 1 + crates/editor/src/editor.rs | 129 ++++++++++++++++------- crates/editor/src/element.rs | 49 ++++----- crates/editor/src/multi_buffer.rs | 68 +++++++----- crates/editor/src/multi_buffer/anchor.rs | 17 +-- crates/language/src/buffer.rs | 22 +--- 7 files changed, 169 insertions(+), 118 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 35d21eb7862a1aed599c4b4821dfac6b75cb42a4..75e353144de2b4e89c48646b85086b79eda60103 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1531,6 +1531,7 @@ dependencies = [ "ctor", "env_logger", "gpui", + "itertools", "language", "lazy_static", "log", diff --git a/crates/editor/Cargo.toml b/crates/editor/Cargo.toml index 24dbcff3da878a303964edc1f1f5a447450f275a..96d302d78ba979e921043374a086efcac58898bf 100644 --- a/crates/editor/Cargo.toml +++ b/crates/editor/Cargo.toml @@ -28,6 +28,7 @@ util = { path = "../util" } workspace = { path = "../workspace" } aho-corasick = "0.7" anyhow = "1.0" +itertools = "0.10" lazy_static = "1.4" log = "0.4" parking_lot = "0.11" diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 07a01f73638a4b40aa0e6951cfcdb313449d602e..88b2d5837415fd91570a3af1de17ce9f88626901 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -3006,61 +3006,90 @@ impl Editor { } } - pub fn all_selections_in_range<'a, D>( + pub fn visible_selections<'a>( &'a self, - range: Range, + display_rows: Range, cx: &'a mut MutableAppContext, - ) -> HashMap>> - where - D: TextDimension + Ord + Sub, - { - let mut result = HashMap::new(); - + ) -> HashMap>> { let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let buffer = &display_map.buffer_snapshot; - let range = range.start.to_offset(&display_map, Bias::Left) - ..range.end.to_offset(&display_map, Bias::Left); - let anchor_range = buffer.anchor_before(range.start)..buffer.anchor_after(range.end); + let start = if display_rows.start == 0 { + Anchor::min() + } else { + buffer.anchor_before( + DisplayPoint::new(display_rows.start, 0).to_offset(&display_map, Bias::Left), + ) + }; + let end = if display_rows.end > display_map.max_point().row() { + Anchor::max() + } else { + buffer.anchor_before( + DisplayPoint::new(display_rows.end, 0).to_offset(&display_map, Bias::Right), + ) + }; + + dbg!(&start, &end); + dbg!(&self.selections); + let start_ix = match self .selections - .binary_search_by(|probe| probe.end.cmp(&anchor_range.start, &buffer).unwrap()) + .binary_search_by(|probe| probe.end.cmp(&start, &buffer).unwrap()) { Ok(ix) | Err(ix) => ix, }; let end_ix = match self .selections - .binary_search_by(|probe| probe.start.cmp(&anchor_range.end, &buffer).unwrap()) + .binary_search_by(|probe| probe.start.cmp(&end, &buffer).unwrap()) { - Ok(ix) | Err(ix) => ix, + Ok(ix) => ix + 1, + Err(ix) => ix, }; - let selections = &self.selections[start_ix..end_ix]; - let mut summaries = buffer - .summaries_for_anchors::(selections.iter().flat_map(|s| [&s.start, &s.end])) - .into_iter(); + dbg!(start_ix, end_ix); + + fn display_selection( + selection: &Selection, + display_map: &DisplaySnapshot, + ) -> Selection { + Selection { + id: selection.id, + start: selection.start.to_display_point(&display_map), + end: selection.end.to_display_point(&display_map), + reversed: selection.reversed, + goal: selection.goal, + } + } + + let mut result = HashMap::new(); result.insert( self.replica_id(cx), - selections + self.selections[start_ix..end_ix] .iter() - .map(|s| Selection { - id: s.id, - start: summaries.next().unwrap(), - end: summaries.next().unwrap(), - reversed: s.reversed, - goal: s.goal, - }) + .chain( + self.pending_selection + .as_ref() + .map(|pending| &pending.selection), + ) + .map(|s| display_selection(s, &display_map)) .collect(), ); for (replica_id, selections) in display_map .buffer_snapshot - .remote_selections_in_range(range) + .remote_selections_in_range(start..end) { - result.insert(replica_id, selections.collect()); + result.insert( + replica_id, + selections + .map(|s| display_selection(&s, &display_map)) + .collect(), + ); } + dbg!(&result); + result } @@ -3137,6 +3166,31 @@ impl Editor { } } + fn resolve_selections< + 'a, + D: TextDimension + Ord + Sub, + I: 'a + Iterator>, + >( + &self, + selections: I, + buffer: &MultiBufferSnapshot, + ) -> impl 'a + Iterator> { + use itertools::Itertools as _; + + let (to_map, to_summarize) = selections.tee(); + let mut summaries = buffer + .summaries_for_anchors::(to_summarize.flat_map(|s| [&s.start, &s.end])) + .into_iter(); + + to_map.map(move |s| Selection { + id: s.id, + start: summaries.next().unwrap(), + end: summaries.next().unwrap(), + reversed: s.reversed, + goal: s.goal, + }) + } + fn selection_count<'a>(&self) -> usize { let mut count = self.selections.len(); if self.pending_selection.is_some() { @@ -3729,8 +3783,6 @@ pub fn diagnostic_style( #[cfg(test)] mod tests { - use std::mem; - use super::*; use language::LanguageConfig; use text::Point; @@ -3786,6 +3838,7 @@ mod tests { view.update_selection(DisplayPoint::new(0, 0), 0, Vector2F::zero(), cx); }); + eprintln!(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"); assert_eq!( editor.update(cx, |view, cx| view.selection_ranges(cx)), [ @@ -3984,6 +4037,8 @@ mod tests { }); view.update(cx, |view, cx| { + dbg!(&view.selections); + assert_eq!( view.selection_ranges(cx), &[DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0)] @@ -5691,16 +5746,16 @@ mod tests { impl Editor { fn selection_ranges(&self, cx: &mut MutableAppContext) -> Vec> { - let snapshot = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - self.selections - .iter() + self.visible_selections(0..self.max_point(cx).row() + 1, cx) + .get(&self.replica_id(cx)) + .unwrap() + .into_iter() .map(|s| { - let mut range = - s.start.to_display_point(&snapshot)..s.end.to_display_point(&snapshot); if s.reversed { - mem::swap(&mut range.start, &mut range.end); + s.end..s.start + } else { + s.start..s.end } - range }) .collect() } diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 27b29e678c16859560f4ffd3500d7386fc2124f7..3572d839b6ea10e01aa49f8d0984b2ce99a5db6c 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -733,37 +733,28 @@ impl Element for EditorElement { let scroll_top = scroll_position.y() * line_height; let end_row = ((scroll_top + size.y()) / line_height).ceil() as u32 + 1; // Add 1 to ensure selections bleed off screen - let selections = HashMap::new(); - let active_rows = BTreeMap::new(); + let mut active_rows = BTreeMap::new(); let mut highlighted_row = None; - self.update_view(cx.app, |view, cx| { + let selections = self.update_view(cx.app, |view, cx| { highlighted_row = view.highlighted_row(); - - // TODO: Get this working with editors owning their own selections - - // for selection_set_id in view.active_selection_sets(cx).collect::>() { - // let replica_selections = view.intersecting_selections( - // selection_set_id, - // DisplayPoint::new(start_row, 0)..DisplayPoint::new(end_row, 0), - // cx, - // ); - // for selection in &replica_selections { - // if selection_set_id == view.selection_set_id { - // let is_empty = selection.start == selection.end; - // let selection_start = snapshot.prev_row_boundary(selection.start).0; - // let selection_end = snapshot.next_row_boundary(selection.end).0; - // for row in cmp::max(selection_start.row(), start_row) - // ..=cmp::min(selection_end.row(), end_row) - // { - // let contains_non_empty_selection = - // active_rows.entry(row).or_insert(!is_empty); - // *contains_non_empty_selection |= !is_empty; - // } - // } - // } - - // selections.insert(selection_set_id.replica_id, replica_selections); - // } + let selections = view.visible_selections(start_row..end_row, cx); + for (replica_id, selections) in &selections { + if *replica_id == view.replica_id(cx) { + for selection in selections { + let is_empty = selection.start == selection.end; + let selection_start = snapshot.prev_row_boundary(selection.start).0; + let selection_end = snapshot.next_row_boundary(selection.end).0; + for row in cmp::max(selection_start.row(), start_row) + ..=cmp::min(selection_end.row(), end_row) + { + let contains_non_empty_selection = + active_rows.entry(row).or_insert(!is_empty); + *contains_non_empty_selection |= !is_empty; + } + } + } + } + selections }); let line_number_layouts = self.layout_rows(start_row..end_row, &active_rows, &snapshot, cx); diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 51879bcf63c901c99939a96ef7be332602c5dbe6..de81bce210a60c84c14a798d466680f3e8b2208b 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -7,8 +7,8 @@ use clock::ReplicaId; use collections::HashMap; use gpui::{AppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task}; use language::{ - Buffer, BufferChunks, BufferSnapshot, Chunk, DiagnosticEntry, Event, File, Language, Selection, - ToOffset as _, ToPoint as _, TransactionId, + Buffer, BufferChunks, BufferSnapshot, Chunk, DiagnosticEntry, Event, File, FromAnchor, + Language, Selection, ToOffset as _, ToPoint as _, TransactionId, }; use std::{ cell::{Ref, RefCell}, @@ -805,19 +805,18 @@ impl MultiBufferSnapshot { let mut summaries = Vec::new(); while let Some(anchor) = anchors.peek() { let excerpt_id = &anchor.excerpt_id; - cursor.seek(&Some(excerpt_id), Bias::Left, &()); - if let Some(excerpt) = cursor.item() { - let excerpt_exists = excerpt.id == *excerpt_id; - let excerpt_anchors = std::iter::from_fn(|| { - let anchor = anchors.peek()?; - if anchor.excerpt_id == *excerpt_id { - Some(&anchors.next().unwrap().text_anchor) - } else { - None - } - }); + let excerpt_anchors = std::iter::from_fn(|| { + let anchor = anchors.peek()?; + if anchor.excerpt_id == *excerpt_id { + Some(&anchors.next().unwrap().text_anchor) + } else { + None + } + }); - if excerpt_exists { + cursor.seek_forward(&Some(excerpt_id), Bias::Left, &()); + if let Some(excerpt) = cursor.item() { + if excerpt.id == *excerpt_id { let mut excerpt_start = D::from_text_summary(&cursor.start().text); excerpt_start.add_summary(&excerpt.header_summary(), &()); let excerpt_buffer_start = excerpt.range.start.summary::(&excerpt.buffer); @@ -834,12 +833,12 @@ impl MultiBufferSnapshot { excerpt_start }), ); - } else { - excerpt_anchors.for_each(drop); + continue; } - } else { - break; } + + let summary = D::from_text_summary(&cursor.start().text); + summaries.extend(excerpt_anchors.map(|_| summary.clone())); } summaries @@ -935,17 +934,34 @@ impl MultiBufferSnapshot { None } - pub fn remote_selections_in_range<'a, I, O>( + pub fn remote_selections_in_range<'a>( &'a self, - range: Range, - ) -> impl 'a + Iterator>)> - where - I: ToOffset, - O: TextDimension, - { + range: Range, + ) -> impl 'a + Iterator>)> { + // TODO + let excerpt_id = self.excerpts.first().unwrap().id.clone(); self.as_singleton() .unwrap() - .remote_selections_in_range(range.start.to_offset(self)..range.end.to_offset(self)) + .remote_selections_in_range(range.start.text_anchor..range.end.text_anchor) + .map(move |(replica_id, selections)| { + let excerpt_id = excerpt_id.clone(); + ( + replica_id, + selections.map(move |s| Selection { + id: s.id, + start: Anchor { + excerpt_id: excerpt_id.clone(), + text_anchor: s.start.clone(), + }, + end: Anchor { + excerpt_id: excerpt_id.clone(), + text_anchor: s.end.clone(), + }, + reversed: s.reversed, + goal: s.goal, + }), + ) + }) } } diff --git a/crates/editor/src/multi_buffer/anchor.rs b/crates/editor/src/multi_buffer/anchor.rs index 2b8b35ef4114eea3cd8620903fb8d562fffa44a4..2cc4817a92b9f8d22de3e2e3640ea14eec669061 100644 --- a/crates/editor/src/multi_buffer/anchor.rs +++ b/crates/editor/src/multi_buffer/anchor.rs @@ -31,15 +31,16 @@ impl Anchor { pub fn cmp<'a>(&self, other: &Anchor, snapshot: &MultiBufferSnapshot) -> Result { let excerpt_id_cmp = self.excerpt_id.cmp(&other.excerpt_id); if excerpt_id_cmp.is_eq() { - if self.excerpt_id == ExcerptId::max() { - return Ok(Ordering::Equal); + if self.excerpt_id == ExcerptId::min() || self.excerpt_id == ExcerptId::max() { + Ok(Ordering::Equal) + } else { + self.text_anchor.cmp( + &other.text_anchor, + snapshot + .buffer_snapshot_for_excerpt(&self.excerpt_id) + .ok_or_else(|| anyhow!("excerpt {:?} not found", self.excerpt_id))?, + ) } - self.text_anchor.cmp( - &other.text_anchor, - snapshot - .buffer_snapshot_for_excerpt(&self.excerpt_id) - .ok_or_else(|| anyhow!("excerpt {:?} not found", self.excerpt_id))?, - ) } else { return Ok(excerpt_id_cmp); } diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index c0f968cb271f649ddd5cede478d454b9f878e3b0..f3eb3dc97403b92a11a046fd0b010d0e744e36b0 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -1648,15 +1648,11 @@ impl BufferSnapshot { .min_by_key(|(open_range, close_range)| close_range.end - open_range.start) } - pub fn remote_selections_in_range<'a, I, O>( + pub fn remote_selections_in_range<'a>( &'a self, - range: Range, - ) -> impl 'a + Iterator>)> - where - I: ToOffset, - O: TextDimension, + range: Range, + ) -> impl 'a + Iterator>)> { - let range = self.anchor_before(range.start)..self.anchor_after(range.end); self.remote_selections .iter() .map(move |(replica_id, selections)| { @@ -1671,17 +1667,7 @@ impl BufferSnapshot { Ok(ix) | Err(ix) => ix, }; - let selections = &selections[start_ix..end_ix]; - let mut summaries = - self.summaries_for_anchors(selections.iter().flat_map(|s| [&s.start, &s.end])); - let resolved = selections.iter().map(move |s| Selection { - id: s.id, - start: summaries.next().unwrap(), - end: summaries.next().unwrap(), - reversed: s.reversed, - goal: s.goal, - }); - (*replica_id, resolved) + (*replica_id, selections[start_ix..end_ix].iter()) }) } From 44cd0be0684552a7622376f16e6140389b90060e Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Sun, 12 Dec 2021 14:12:03 -0700 Subject: [PATCH 058/196] Restore selections upon undo/redo of edits performed in the current editor --- crates/editor/src/editor.rs | 67 ++++++++++++++----------------------- 1 file changed, 26 insertions(+), 41 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 88b2d5837415fd91570a3af1de17ce9f88626901..064d504f43cdbad8413b5fdb0044c44400767a41 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -1990,13 +1990,21 @@ impl Editor { } pub fn undo(&mut self, _: &Undo, cx: &mut ViewContext) { - self.buffer.update(cx, |buffer, cx| buffer.undo(cx)); - self.request_autoscroll(Autoscroll::Fit, cx); + if let Some(tx_id) = self.buffer.update(cx, |buffer, cx| buffer.undo(cx)) { + if let Some((selections, _)) = self.selection_history.get(&tx_id) { + self.selections = selections.clone(); + } + self.request_autoscroll(Autoscroll::Fit, cx); + } } pub fn redo(&mut self, _: &Redo, cx: &mut ViewContext) { - self.buffer.update(cx, |buffer, cx| buffer.redo(cx)); - self.request_autoscroll(Autoscroll::Fit, cx); + if let Some(tx_id) = self.buffer.update(cx, |buffer, cx| buffer.redo(cx)) { + if let Some((_, Some(selections))) = self.selection_history.get(&tx_id) { + self.selections = selections.clone(); + } + self.request_autoscroll(Autoscroll::Fit, cx); + } } pub fn move_left(&mut self, _: &MoveLeft, cx: &mut ViewContext) { @@ -3029,9 +3037,6 @@ impl Editor { ) }; - dbg!(&start, &end); - dbg!(&self.selections); - let start_ix = match self .selections .binary_search_by(|probe| probe.end.cmp(&start, &buffer).unwrap()) @@ -3046,8 +3051,6 @@ impl Editor { Err(ix) => ix, }; - dbg!(start_ix, end_ix); - fn display_selection( selection: &Selection, display_map: &DisplaySnapshot, @@ -3166,31 +3169,6 @@ impl Editor { } } - fn resolve_selections< - 'a, - D: TextDimension + Ord + Sub, - I: 'a + Iterator>, - >( - &self, - selections: I, - buffer: &MultiBufferSnapshot, - ) -> impl 'a + Iterator> { - use itertools::Itertools as _; - - let (to_map, to_summarize) = selections.tee(); - let mut summaries = buffer - .summaries_for_anchors::(to_summarize.flat_map(|s| [&s.start, &s.end])) - .into_iter(); - - to_map.map(move |s| Selection { - id: s.id, - start: summaries.next().unwrap(), - end: summaries.next().unwrap(), - reversed: s.reversed, - goal: s.goal, - }) - } - fn selection_count<'a>(&self) -> usize { let mut count = self.selections.len(); if self.pending_selection.is_some() { @@ -3296,15 +3274,22 @@ impl Editor { fn start_transaction(&mut self, cx: &mut ViewContext) { self.end_selection(cx); - self.buffer.update(cx, |buffer, cx| { - buffer.start_transaction(cx); - }); + if let Some(tx_id) = self + .buffer + .update(cx, |buffer, cx| buffer.start_transaction(cx)) + { + self.selection_history + .insert(tx_id, (self.selections.clone(), None)); + } } - fn end_transaction(&self, cx: &mut ViewContext) { - self.buffer.update(cx, |buffer, cx| { - buffer.end_transaction(cx); - }); + fn end_transaction(&mut self, cx: &mut ViewContext) { + if let Some(tx_id) = self + .buffer + .update(cx, |buffer, cx| buffer.end_transaction(cx)) + { + self.selection_history.get_mut(&tx_id).unwrap().1 = Some(self.selections.clone()); + } } pub fn page_up(&mut self, _: &PageUp, _: &mut ViewContext) { From cdbcbdfe6d577892fe839278e18baa1ce2920940 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Sun, 12 Dec 2021 15:04:19 -0700 Subject: [PATCH 059/196] Test undo/redo at editor layer, including selection restoration --- crates/editor/src/editor.rs | 410 ++++++++++++++------ crates/editor/src/multi_buffer.rs | 43 +- crates/editor/src/multi_buffer/selection.rs | 42 -- crates/language/src/buffer.rs | 6 +- crates/text/src/text.rs | 4 + 5 files changed, 325 insertions(+), 180 deletions(-) delete mode 100644 crates/editor/src/multi_buffer/selection.rs diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 064d504f43cdbad8413b5fdb0044c44400767a41..a4f2ccd22d1a3ea0ce1720c831af78034de33201 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -41,7 +41,7 @@ use std::{ ops::{Deref, Range, RangeInclusive, Sub}, rc::Rc, sync::Arc, - time::Duration, + time::{Duration, Instant}, }; use sum_tree::Bias; use text::rope::TextDimension; @@ -3091,8 +3091,6 @@ impl Editor { ); } - dbg!(&result); - result } @@ -3273,10 +3271,14 @@ impl Editor { } fn start_transaction(&mut self, cx: &mut ViewContext) { + self.start_transaction_at(Instant::now(), cx); + } + + fn start_transaction_at(&mut self, now: Instant, cx: &mut ViewContext) { self.end_selection(cx); if let Some(tx_id) = self .buffer - .update(cx, |buffer, cx| buffer.start_transaction(cx)) + .update(cx, |buffer, cx| buffer.start_transaction_at(now, cx)) { self.selection_history .insert(tx_id, (self.selections.clone(), None)); @@ -3284,9 +3286,13 @@ impl Editor { } fn end_transaction(&mut self, cx: &mut ViewContext) { + self.end_transaction_at(Instant::now(), cx); + } + + fn end_transaction_at(&mut self, now: Instant, cx: &mut ViewContext) { if let Some(tx_id) = self .buffer - .update(cx, |buffer, cx| buffer.end_transaction(cx)) + .update(cx, |buffer, cx| buffer.end_transaction_at(now, cx)) { self.selection_history.get_mut(&tx_id).unwrap().1 = Some(self.selections.clone()); } @@ -3770,10 +3776,80 @@ pub fn diagnostic_style( mod tests { use super::*; use language::LanguageConfig; + use std::time::Instant; use text::Point; use unindent::Unindent; use util::test::sample_text; + #[gpui::test] + fn test_undo_redo_with_selection_restoration(cx: &mut MutableAppContext) { + let mut now = Instant::now(); + let buffer = MultiBuffer::build_simple("123456", cx); + let settings = EditorSettings::test(cx); + let (_, editor) = cx.add_window(Default::default(), |cx| { + build_editor(buffer.clone(), settings, cx) + }); + + editor.update(cx, |editor, cx| { + editor.start_transaction_at(now, cx); + editor.select_ranges([2..4], None, cx); + editor.insert("cd", cx); + editor.end_transaction_at(now, cx); + assert_eq!(editor.text(cx), "12cd56"); + assert_eq!(editor.selected_ranges(cx), vec![4..4]); + + editor.start_transaction_at(now, cx); + editor.select_ranges([4..5], None, cx); + editor.insert("e", cx); + editor.end_transaction_at(now, cx); + assert_eq!(editor.text(cx), "12cde6"); + assert_eq!(editor.selected_ranges(cx), vec![5..5]); + + now += buffer.read(cx).transaction_group_interval(cx) + Duration::from_millis(1); + editor.select_ranges([2..2], None, cx); + + // Simulate an edit in another editor + buffer.update(cx, |buffer, cx| { + buffer.start_transaction_at(now, cx); + buffer.edit([0..1], "a", cx); + buffer.edit([1..1], "b", cx); + buffer.end_transaction_at(now, cx); + }); + + assert_eq!(editor.text(cx), "ab2cde6"); + assert_eq!(editor.selected_ranges(cx), vec![3..3]); + + // Last transaction happened past the group interval in a different editor. + // Undo it individually and don't restore selections. + editor.undo(&Undo, cx); + assert_eq!(editor.text(cx), "12cde6"); + assert_eq!(editor.selected_ranges(cx), vec![2..2]); + + // First two transactions happened within the group interval in this editor. + // Undo them together and restore selections. + editor.undo(&Undo, cx); + editor.undo(&Undo, cx); // Undo stack is empty here, so this is a no-op. + assert_eq!(editor.text(cx), "123456"); + assert_eq!(editor.selected_ranges(cx), vec![0..0]); + + // Redo the first two transactions together. + editor.redo(&Redo, cx); + assert_eq!(editor.text(cx), "12cde6"); + assert_eq!(editor.selected_ranges(cx), vec![4..4]); + + // Redo the last transaction on its own. + editor.redo(&Redo, cx); + assert_eq!(editor.text(cx), "ab2cde6"); + assert_eq!(editor.selected_ranges(cx), vec![5..5]); + + // Test empty transactions. + editor.start_transaction_at(now, cx); + editor.end_transaction_at(now, cx); + editor.undo(&Undo, cx); + assert_eq!(editor.text(cx), "12cde6"); + }); + } + #[gpui::test] fn test_selection_with_mouse(cx: &mut gpui::MutableAppContext) { let buffer = MultiBuffer::build_simple("aaaaaa\nbbbbbb\ncccccc\ndddddd\n", cx); @@ -3786,7 +3862,7 @@ mod tests { }); assert_eq!( - editor.update(cx, |view, cx| view.selection_ranges(cx)), + editor.update(cx, |view, cx| view.selected_display_ranges(cx)), [DisplayPoint::new(2, 2)..DisplayPoint::new(2, 2)] ); @@ -3795,7 +3871,7 @@ mod tests { }); assert_eq!( - editor.update(cx, |view, cx| view.selection_ranges(cx)), + editor.update(cx, |view, cx| view.selected_display_ranges(cx)), [DisplayPoint::new(2, 2)..DisplayPoint::new(3, 3)] ); @@ -3804,7 +3880,7 @@ mod tests { }); assert_eq!( - editor.update(cx, |view, cx| view.selection_ranges(cx)), + editor.update(cx, |view, cx| view.selected_display_ranges(cx)), [DisplayPoint::new(2, 2)..DisplayPoint::new(1, 1)] ); @@ -3814,7 +3890,7 @@ mod tests { }); assert_eq!( - editor.update(cx, |view, cx| view.selection_ranges(cx)), + editor.update(cx, |view, cx| view.selected_display_ranges(cx)), [DisplayPoint::new(2, 2)..DisplayPoint::new(1, 1)] ); @@ -3825,7 +3901,7 @@ mod tests { eprintln!(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"); assert_eq!( - editor.update(cx, |view, cx| view.selection_ranges(cx)), + editor.update(cx, |view, cx| view.selected_display_ranges(cx)), [ DisplayPoint::new(2, 2)..DisplayPoint::new(1, 1), DisplayPoint::new(3, 3)..DisplayPoint::new(0, 0) @@ -3837,7 +3913,7 @@ mod tests { }); assert_eq!( - editor.update(cx, |view, cx| view.selection_ranges(cx)), + editor.update(cx, |view, cx| view.selected_display_ranges(cx)), [DisplayPoint::new(3, 3)..DisplayPoint::new(0, 0)] ); } @@ -3851,7 +3927,7 @@ mod tests { view.update(cx, |view, cx| { view.begin_selection(DisplayPoint::new(2, 2), false, 1, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), [DisplayPoint::new(2, 2)..DisplayPoint::new(2, 2)] ); }); @@ -3859,7 +3935,7 @@ mod tests { view.update(cx, |view, cx| { view.update_selection(DisplayPoint::new(3, 3), 0, Vector2F::zero(), cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), [DisplayPoint::new(2, 2)..DisplayPoint::new(3, 3)] ); }); @@ -3868,7 +3944,7 @@ mod tests { view.cancel(&Cancel, cx); view.update_selection(DisplayPoint::new(1, 1), 0, Vector2F::zero(), cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), [DisplayPoint::new(2, 2)..DisplayPoint::new(3, 3)] ); }); @@ -3889,7 +3965,7 @@ mod tests { view.update_selection(DisplayPoint::new(0, 3), 0, Vector2F::zero(), cx); view.end_selection(cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), [ DisplayPoint::new(0, 1)..DisplayPoint::new(0, 3), DisplayPoint::new(3, 4)..DisplayPoint::new(1, 1), @@ -3900,7 +3976,7 @@ mod tests { view.update(cx, |view, cx| { view.cancel(&Cancel, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), [DisplayPoint::new(3, 4)..DisplayPoint::new(1, 1)] ); }); @@ -3908,7 +3984,7 @@ mod tests { view.update(cx, |view, cx| { view.cancel(&Cancel, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), [DisplayPoint::new(1, 1)..DisplayPoint::new(1, 1)] ); }); @@ -4022,46 +4098,44 @@ mod tests { }); view.update(cx, |view, cx| { - dbg!(&view.selections); - assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0)] ); view.move_down(&MoveDown, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0)] ); view.move_right(&MoveRight, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[DisplayPoint::new(1, 4)..DisplayPoint::new(1, 4)] ); view.move_left(&MoveLeft, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0)] ); view.move_up(&MoveUp, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0)] ); view.move_to_end(&MoveToEnd, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[DisplayPoint::new(5, 6)..DisplayPoint::new(5, 6)] ); view.move_to_beginning(&MoveToBeginning, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0)] ); @@ -4069,13 +4143,13 @@ mod tests { .unwrap(); view.select_to_beginning(&SelectToBeginning, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[DisplayPoint::new(0, 1)..DisplayPoint::new(0, 0)] ); view.select_to_end(&SelectToEnd, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[DisplayPoint::new(0, 1)..DisplayPoint::new(5, 6)] ); }); @@ -4104,38 +4178,83 @@ mod tests { assert_eq!(view.display_text(cx), "ⓐⓑ…ⓔ\nab…e\nαβ…ε\n"); view.move_right(&MoveRight, cx); - assert_eq!(view.selection_ranges(cx), &[empty_range(0, "ⓐ".len())]); + assert_eq!( + view.selected_display_ranges(cx), + &[empty_range(0, "ⓐ".len())] + ); view.move_right(&MoveRight, cx); - assert_eq!(view.selection_ranges(cx), &[empty_range(0, "ⓐⓑ".len())]); + assert_eq!( + view.selected_display_ranges(cx), + &[empty_range(0, "ⓐⓑ".len())] + ); view.move_right(&MoveRight, cx); - assert_eq!(view.selection_ranges(cx), &[empty_range(0, "ⓐⓑ…".len())]); + assert_eq!( + view.selected_display_ranges(cx), + &[empty_range(0, "ⓐⓑ…".len())] + ); view.move_down(&MoveDown, cx); - assert_eq!(view.selection_ranges(cx), &[empty_range(1, "ab…".len())]); + assert_eq!( + view.selected_display_ranges(cx), + &[empty_range(1, "ab…".len())] + ); view.move_left(&MoveLeft, cx); - assert_eq!(view.selection_ranges(cx), &[empty_range(1, "ab".len())]); + assert_eq!( + view.selected_display_ranges(cx), + &[empty_range(1, "ab".len())] + ); view.move_left(&MoveLeft, cx); - assert_eq!(view.selection_ranges(cx), &[empty_range(1, "a".len())]); + assert_eq!( + view.selected_display_ranges(cx), + &[empty_range(1, "a".len())] + ); view.move_down(&MoveDown, cx); - assert_eq!(view.selection_ranges(cx), &[empty_range(2, "α".len())]); + assert_eq!( + view.selected_display_ranges(cx), + &[empty_range(2, "α".len())] + ); view.move_right(&MoveRight, cx); - assert_eq!(view.selection_ranges(cx), &[empty_range(2, "αβ".len())]); + assert_eq!( + view.selected_display_ranges(cx), + &[empty_range(2, "αβ".len())] + ); view.move_right(&MoveRight, cx); - assert_eq!(view.selection_ranges(cx), &[empty_range(2, "αβ…".len())]); + assert_eq!( + view.selected_display_ranges(cx), + &[empty_range(2, "αβ…".len())] + ); view.move_right(&MoveRight, cx); - assert_eq!(view.selection_ranges(cx), &[empty_range(2, "αβ…ε".len())]); + assert_eq!( + view.selected_display_ranges(cx), + &[empty_range(2, "αβ…ε".len())] + ); view.move_up(&MoveUp, cx); - assert_eq!(view.selection_ranges(cx), &[empty_range(1, "ab…e".len())]); + assert_eq!( + view.selected_display_ranges(cx), + &[empty_range(1, "ab…e".len())] + ); view.move_up(&MoveUp, cx); - assert_eq!(view.selection_ranges(cx), &[empty_range(0, "ⓐⓑ…ⓔ".len())]); + assert_eq!( + view.selected_display_ranges(cx), + &[empty_range(0, "ⓐⓑ…ⓔ".len())] + ); view.move_left(&MoveLeft, cx); - assert_eq!(view.selection_ranges(cx), &[empty_range(0, "ⓐⓑ…".len())]); + assert_eq!( + view.selected_display_ranges(cx), + &[empty_range(0, "ⓐⓑ…".len())] + ); view.move_left(&MoveLeft, cx); - assert_eq!(view.selection_ranges(cx), &[empty_range(0, "ⓐⓑ".len())]); + assert_eq!( + view.selected_display_ranges(cx), + &[empty_range(0, "ⓐⓑ".len())] + ); view.move_left(&MoveLeft, cx); - assert_eq!(view.selection_ranges(cx), &[empty_range(0, "ⓐ".len())]); + assert_eq!( + view.selected_display_ranges(cx), + &[empty_range(0, "ⓐ".len())] + ); }); } @@ -4151,22 +4270,40 @@ mod tests { .unwrap(); view.move_down(&MoveDown, cx); - assert_eq!(view.selection_ranges(cx), &[empty_range(1, "abcd".len())]); + assert_eq!( + view.selected_display_ranges(cx), + &[empty_range(1, "abcd".len())] + ); view.move_down(&MoveDown, cx); - assert_eq!(view.selection_ranges(cx), &[empty_range(2, "αβγ".len())]); + assert_eq!( + view.selected_display_ranges(cx), + &[empty_range(2, "αβγ".len())] + ); view.move_down(&MoveDown, cx); - assert_eq!(view.selection_ranges(cx), &[empty_range(3, "abcd".len())]); + assert_eq!( + view.selected_display_ranges(cx), + &[empty_range(3, "abcd".len())] + ); view.move_down(&MoveDown, cx); - assert_eq!(view.selection_ranges(cx), &[empty_range(4, "ⓐⓑⓒⓓⓔ".len())]); + assert_eq!( + view.selected_display_ranges(cx), + &[empty_range(4, "ⓐⓑⓒⓓⓔ".len())] + ); view.move_up(&MoveUp, cx); - assert_eq!(view.selection_ranges(cx), &[empty_range(3, "abcd".len())]); + assert_eq!( + view.selected_display_ranges(cx), + &[empty_range(3, "abcd".len())] + ); view.move_up(&MoveUp, cx); - assert_eq!(view.selection_ranges(cx), &[empty_range(2, "αβγ".len())]); + assert_eq!( + view.selected_display_ranges(cx), + &[empty_range(2, "αβγ".len())] + ); }); } @@ -4189,7 +4326,7 @@ mod tests { view.update(cx, |view, cx| { view.move_to_beginning_of_line(&MoveToBeginningOfLine, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[ DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0), DisplayPoint::new(1, 2)..DisplayPoint::new(1, 2), @@ -4200,7 +4337,7 @@ mod tests { view.update(cx, |view, cx| { view.move_to_beginning_of_line(&MoveToBeginningOfLine, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[ DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0), DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0), @@ -4211,7 +4348,7 @@ mod tests { view.update(cx, |view, cx| { view.move_to_beginning_of_line(&MoveToBeginningOfLine, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[ DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0), DisplayPoint::new(1, 2)..DisplayPoint::new(1, 2), @@ -4222,7 +4359,7 @@ mod tests { view.update(cx, |view, cx| { view.move_to_end_of_line(&MoveToEndOfLine, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[ DisplayPoint::new(0, 3)..DisplayPoint::new(0, 3), DisplayPoint::new(1, 5)..DisplayPoint::new(1, 5), @@ -4234,7 +4371,7 @@ mod tests { view.update(cx, |view, cx| { view.move_to_end_of_line(&MoveToEndOfLine, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[ DisplayPoint::new(0, 3)..DisplayPoint::new(0, 3), DisplayPoint::new(1, 5)..DisplayPoint::new(1, 5), @@ -4246,7 +4383,7 @@ mod tests { view.move_left(&MoveLeft, cx); view.select_to_beginning_of_line(&SelectToBeginningOfLine(true), cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[ DisplayPoint::new(0, 2)..DisplayPoint::new(0, 0), DisplayPoint::new(1, 4)..DisplayPoint::new(1, 2), @@ -4257,7 +4394,7 @@ mod tests { view.update(cx, |view, cx| { view.select_to_beginning_of_line(&SelectToBeginningOfLine(true), cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[ DisplayPoint::new(0, 2)..DisplayPoint::new(0, 0), DisplayPoint::new(1, 4)..DisplayPoint::new(1, 0), @@ -4268,7 +4405,7 @@ mod tests { view.update(cx, |view, cx| { view.select_to_beginning_of_line(&SelectToBeginningOfLine(true), cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[ DisplayPoint::new(0, 2)..DisplayPoint::new(0, 0), DisplayPoint::new(1, 4)..DisplayPoint::new(1, 2), @@ -4279,7 +4416,7 @@ mod tests { view.update(cx, |view, cx| { view.select_to_end_of_line(&SelectToEndOfLine, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[ DisplayPoint::new(0, 2)..DisplayPoint::new(0, 3), DisplayPoint::new(1, 4)..DisplayPoint::new(1, 5), @@ -4291,7 +4428,7 @@ mod tests { view.delete_to_end_of_line(&DeleteToEndOfLine, cx); assert_eq!(view.display_text(cx), "ab\n de"); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[ DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2), DisplayPoint::new(1, 4)..DisplayPoint::new(1, 4), @@ -4303,7 +4440,7 @@ mod tests { view.delete_to_beginning_of_line(&DeleteToBeginningOfLine, cx); assert_eq!(view.display_text(cx), "\n"); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[ DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0), DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0), @@ -4331,7 +4468,7 @@ mod tests { view.update(cx, |view, cx| { view.move_to_previous_word_boundary(&MoveToPreviousWordBoundary, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[ DisplayPoint::new(0, 9)..DisplayPoint::new(0, 9), DisplayPoint::new(2, 3)..DisplayPoint::new(2, 3), @@ -4342,7 +4479,7 @@ mod tests { view.update(cx, |view, cx| { view.move_to_previous_word_boundary(&MoveToPreviousWordBoundary, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[ DisplayPoint::new(0, 7)..DisplayPoint::new(0, 7), DisplayPoint::new(2, 2)..DisplayPoint::new(2, 2), @@ -4353,7 +4490,7 @@ mod tests { view.update(cx, |view, cx| { view.move_to_previous_word_boundary(&MoveToPreviousWordBoundary, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[ DisplayPoint::new(0, 4)..DisplayPoint::new(0, 4), DisplayPoint::new(2, 0)..DisplayPoint::new(2, 0), @@ -4364,7 +4501,7 @@ mod tests { view.update(cx, |view, cx| { view.move_to_previous_word_boundary(&MoveToPreviousWordBoundary, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[ DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0), DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0), @@ -4375,7 +4512,7 @@ mod tests { view.update(cx, |view, cx| { view.move_to_previous_word_boundary(&MoveToPreviousWordBoundary, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[ DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0), DisplayPoint::new(0, 23)..DisplayPoint::new(0, 23), @@ -4386,7 +4523,7 @@ mod tests { view.update(cx, |view, cx| { view.move_to_next_word_boundary(&MoveToNextWordBoundary, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[ DisplayPoint::new(0, 3)..DisplayPoint::new(0, 3), DisplayPoint::new(0, 24)..DisplayPoint::new(0, 24), @@ -4397,7 +4534,7 @@ mod tests { view.update(cx, |view, cx| { view.move_to_next_word_boundary(&MoveToNextWordBoundary, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[ DisplayPoint::new(0, 7)..DisplayPoint::new(0, 7), DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0), @@ -4408,7 +4545,7 @@ mod tests { view.update(cx, |view, cx| { view.move_to_next_word_boundary(&MoveToNextWordBoundary, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[ DisplayPoint::new(0, 9)..DisplayPoint::new(0, 9), DisplayPoint::new(2, 3)..DisplayPoint::new(2, 3), @@ -4420,7 +4557,7 @@ mod tests { view.move_right(&MoveRight, cx); view.select_to_previous_word_boundary(&SelectToPreviousWordBoundary, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[ DisplayPoint::new(0, 10)..DisplayPoint::new(0, 9), DisplayPoint::new(2, 4)..DisplayPoint::new(2, 3), @@ -4431,7 +4568,7 @@ mod tests { view.update(cx, |view, cx| { view.select_to_previous_word_boundary(&SelectToPreviousWordBoundary, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[ DisplayPoint::new(0, 10)..DisplayPoint::new(0, 7), DisplayPoint::new(2, 4)..DisplayPoint::new(2, 2), @@ -4442,7 +4579,7 @@ mod tests { view.update(cx, |view, cx| { view.select_to_next_word_boundary(&SelectToNextWordBoundary, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[ DisplayPoint::new(0, 10)..DisplayPoint::new(0, 9), DisplayPoint::new(2, 4)..DisplayPoint::new(2, 3), @@ -4469,37 +4606,37 @@ mod tests { view.move_to_next_word_boundary(&MoveToNextWordBoundary, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[DisplayPoint::new(1, 9)..DisplayPoint::new(1, 9)] ); view.move_to_next_word_boundary(&MoveToNextWordBoundary, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[DisplayPoint::new(1, 14)..DisplayPoint::new(1, 14)] ); view.move_to_next_word_boundary(&MoveToNextWordBoundary, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[DisplayPoint::new(2, 4)..DisplayPoint::new(2, 4)] ); view.move_to_next_word_boundary(&MoveToNextWordBoundary, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[DisplayPoint::new(2, 8)..DisplayPoint::new(2, 8)] ); view.move_to_previous_word_boundary(&MoveToPreviousWordBoundary, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[DisplayPoint::new(2, 4)..DisplayPoint::new(2, 4)] ); view.move_to_previous_word_boundary(&MoveToPreviousWordBoundary, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[DisplayPoint::new(1, 14)..DisplayPoint::new(1, 14)] ); }); @@ -4593,7 +4730,7 @@ mod tests { view.tab(&Tab, cx); assert_eq!(view.text(cx), " one two\nthree\n four"); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[ DisplayPoint::new(0, 4)..DisplayPoint::new(0, 7), DisplayPoint::new(0, 8)..DisplayPoint::new(0, 11), @@ -4604,7 +4741,7 @@ mod tests { view.outdent(&Outdent, cx); assert_eq!(view.text(cx), "one two\nthree\n four"); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[ DisplayPoint::new(0, 0)..DisplayPoint::new(0, 3), DisplayPoint::new(0, 4)..DisplayPoint::new(0, 7), @@ -4619,13 +4756,13 @@ mod tests { view.tab(&Tab, cx); assert_eq!(view.text(cx), "one two\n three\n four"); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[DisplayPoint::new(1, 5)..DisplayPoint::new(2, 0)] ); view.outdent(&Outdent, cx); assert_eq!(view.text(cx), "one two\nthree\n four"); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[DisplayPoint::new(1, 1)..DisplayPoint::new(2, 0)] ); }); @@ -4711,7 +4848,7 @@ mod tests { view.delete_line(&DeleteLine, cx); assert_eq!(view.display_text(cx), "ghi"); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), vec![ DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0), DisplayPoint::new(0, 1)..DisplayPoint::new(0, 1) @@ -4728,7 +4865,7 @@ mod tests { view.delete_line(&DeleteLine, cx); assert_eq!(view.display_text(cx), "ghi\n"); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), vec![DisplayPoint::new(0, 1)..DisplayPoint::new(0, 1)] ); }); @@ -4753,7 +4890,7 @@ mod tests { view.duplicate_line(&DuplicateLine, cx); assert_eq!(view.display_text(cx), "abc\nabc\ndef\ndef\nghi\n\n"); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), vec![ DisplayPoint::new(1, 0)..DisplayPoint::new(1, 1), DisplayPoint::new(1, 2)..DisplayPoint::new(1, 2), @@ -4778,7 +4915,7 @@ mod tests { view.duplicate_line(&DuplicateLine, cx); assert_eq!(view.display_text(cx), "abc\ndef\nghi\nabc\ndef\nghi\n"); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), vec![ DisplayPoint::new(3, 1)..DisplayPoint::new(4, 1), DisplayPoint::new(4, 2)..DisplayPoint::new(5, 1), @@ -4822,7 +4959,7 @@ mod tests { "aa…bbb\nccc…eeee\nggggg\n…i\njjjjj\nfffff" ); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), vec![ DisplayPoint::new(0, 1)..DisplayPoint::new(0, 1), DisplayPoint::new(2, 1)..DisplayPoint::new(2, 1), @@ -4839,7 +4976,7 @@ mod tests { "ccc…eeee\naa…bbb\nfffff\nggggg\n…i\njjjjj" ); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), vec![ DisplayPoint::new(1, 1)..DisplayPoint::new(1, 1), DisplayPoint::new(3, 1)..DisplayPoint::new(3, 1), @@ -4856,7 +4993,7 @@ mod tests { "ccc…eeee\nfffff\naa…bbb\nggggg\n…i\njjjjj" ); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), vec![ DisplayPoint::new(2, 1)..DisplayPoint::new(2, 1), DisplayPoint::new(3, 1)..DisplayPoint::new(3, 1), @@ -4873,7 +5010,7 @@ mod tests { "ccc…eeee\naa…bbb\nggggg\n…i\njjjjj\nfffff" ); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), vec![ DisplayPoint::new(1, 1)..DisplayPoint::new(1, 1), DisplayPoint::new(2, 1)..DisplayPoint::new(2, 1), @@ -4907,7 +5044,7 @@ mod tests { view.paste(&Paste, cx); assert_eq!(view.display_text(cx), "two one✅ four three six five "); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[ DisplayPoint::new(0, 11)..DisplayPoint::new(0, 11), DisplayPoint::new(0, 22)..DisplayPoint::new(0, 22), @@ -4975,7 +5112,7 @@ mod tests { "123\n4567\n9\n( 8ne✅ three five ) two one✅ four three six five ( one✅ three five ) " ); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[ DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2), DisplayPoint::new(2, 1)..DisplayPoint::new(2, 1), @@ -5009,7 +5146,7 @@ mod tests { "123\n123\n123\n67\n123\n9\n( 8ne✅ three five ) two one✅ four three six five ( one✅ three five ) " ); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[ DisplayPoint::new(1, 1)..DisplayPoint::new(1, 1), DisplayPoint::new(3, 0)..DisplayPoint::new(3, 0), @@ -5027,7 +5164,7 @@ mod tests { view.update(cx, |view, cx| { view.select_all(&SelectAll, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), &[DisplayPoint::new(0, 0)..DisplayPoint::new(2, 3)] ); }); @@ -5051,7 +5188,7 @@ mod tests { .unwrap(); view.select_line(&SelectLine, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), vec![ DisplayPoint::new(0, 0)..DisplayPoint::new(2, 0), DisplayPoint::new(4, 0)..DisplayPoint::new(5, 0), @@ -5062,7 +5199,7 @@ mod tests { view.update(cx, |view, cx| { view.select_line(&SelectLine, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), vec![ DisplayPoint::new(0, 0)..DisplayPoint::new(3, 0), DisplayPoint::new(4, 0)..DisplayPoint::new(5, 5), @@ -5073,7 +5210,7 @@ mod tests { view.update(cx, |view, cx| { view.select_line(&SelectLine, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), vec![DisplayPoint::new(0, 0)..DisplayPoint::new(5, 5)] ); }); @@ -5113,7 +5250,7 @@ mod tests { "aaaaa\nbbbbb\nccc…eeee\nfffff\nggggg\n…i" ); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), [ DisplayPoint::new(0, 1)..DisplayPoint::new(0, 1), DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2), @@ -5132,7 +5269,7 @@ mod tests { "aaaaa\nbbbbb\nccccc\nddddd\neeeee\nfffff\nggggg\nhhhhh\niiiii" ); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), [ DisplayPoint::new(0, 5)..DisplayPoint::new(0, 5), DisplayPoint::new(1, 5)..DisplayPoint::new(1, 5), @@ -5160,7 +5297,7 @@ mod tests { view.update(cx, |view, cx| { view.add_selection_above(&AddSelectionAbove, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), vec![ DisplayPoint::new(0, 3)..DisplayPoint::new(0, 3), DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3) @@ -5171,7 +5308,7 @@ mod tests { view.update(cx, |view, cx| { view.add_selection_above(&AddSelectionAbove, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), vec![ DisplayPoint::new(0, 3)..DisplayPoint::new(0, 3), DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3) @@ -5182,7 +5319,7 @@ mod tests { view.update(cx, |view, cx| { view.add_selection_below(&AddSelectionBelow, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), vec![DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3)] ); }); @@ -5190,7 +5327,7 @@ mod tests { view.update(cx, |view, cx| { view.add_selection_below(&AddSelectionBelow, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), vec![ DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3), DisplayPoint::new(4, 3)..DisplayPoint::new(4, 3) @@ -5201,7 +5338,7 @@ mod tests { view.update(cx, |view, cx| { view.add_selection_below(&AddSelectionBelow, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), vec![ DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3), DisplayPoint::new(4, 3)..DisplayPoint::new(4, 3) @@ -5216,7 +5353,7 @@ mod tests { view.update(cx, |view, cx| { view.add_selection_below(&AddSelectionBelow, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), vec![ DisplayPoint::new(1, 4)..DisplayPoint::new(1, 3), DisplayPoint::new(4, 4)..DisplayPoint::new(4, 3) @@ -5227,7 +5364,7 @@ mod tests { view.update(cx, |view, cx| { view.add_selection_below(&AddSelectionBelow, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), vec![ DisplayPoint::new(1, 4)..DisplayPoint::new(1, 3), DisplayPoint::new(4, 4)..DisplayPoint::new(4, 3) @@ -5238,7 +5375,7 @@ mod tests { view.update(cx, |view, cx| { view.add_selection_above(&AddSelectionAbove, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), vec![DisplayPoint::new(1, 4)..DisplayPoint::new(1, 3)] ); }); @@ -5246,7 +5383,7 @@ mod tests { view.update(cx, |view, cx| { view.add_selection_above(&AddSelectionAbove, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), vec![DisplayPoint::new(1, 4)..DisplayPoint::new(1, 3)] ); }); @@ -5256,7 +5393,7 @@ mod tests { .unwrap(); view.add_selection_below(&AddSelectionBelow, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), vec![ DisplayPoint::new(0, 1)..DisplayPoint::new(0, 3), DisplayPoint::new(1, 1)..DisplayPoint::new(1, 4), @@ -5268,7 +5405,7 @@ mod tests { view.update(cx, |view, cx| { view.add_selection_below(&AddSelectionBelow, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), vec![ DisplayPoint::new(0, 1)..DisplayPoint::new(0, 3), DisplayPoint::new(1, 1)..DisplayPoint::new(1, 4), @@ -5281,7 +5418,7 @@ mod tests { view.update(cx, |view, cx| { view.add_selection_above(&AddSelectionAbove, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), vec![ DisplayPoint::new(0, 1)..DisplayPoint::new(0, 3), DisplayPoint::new(1, 1)..DisplayPoint::new(1, 4), @@ -5297,7 +5434,7 @@ mod tests { view.update(cx, |view, cx| { view.add_selection_above(&AddSelectionAbove, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), vec![ DisplayPoint::new(0, 3)..DisplayPoint::new(0, 1), DisplayPoint::new(1, 3)..DisplayPoint::new(1, 1), @@ -5310,7 +5447,7 @@ mod tests { view.update(cx, |view, cx| { view.add_selection_below(&AddSelectionBelow, cx); assert_eq!( - view.selection_ranges(cx), + view.selected_display_ranges(cx), vec![ DisplayPoint::new(1, 3)..DisplayPoint::new(1, 1), DisplayPoint::new(3, 2)..DisplayPoint::new(3, 1), @@ -5356,7 +5493,7 @@ mod tests { view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx); }); assert_eq!( - view.update(&mut cx, |view, cx| view.selection_ranges(cx)), + view.update(&mut cx, |view, cx| view.selected_display_ranges(cx)), &[ DisplayPoint::new(0, 23)..DisplayPoint::new(0, 27), DisplayPoint::new(2, 35)..DisplayPoint::new(2, 7), @@ -5368,7 +5505,7 @@ mod tests { view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx); }); assert_eq!( - view.update(&mut cx, |view, cx| view.selection_ranges(cx)), + view.update(&mut cx, |view, cx| view.selected_display_ranges(cx)), &[ DisplayPoint::new(0, 16)..DisplayPoint::new(0, 28), DisplayPoint::new(4, 1)..DisplayPoint::new(2, 0), @@ -5379,7 +5516,7 @@ mod tests { view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx); }); assert_eq!( - view.update(&mut cx, |view, cx| view.selection_ranges(cx)), + view.update(&mut cx, |view, cx| view.selected_display_ranges(cx)), &[DisplayPoint::new(5, 0)..DisplayPoint::new(0, 0)] ); @@ -5388,7 +5525,7 @@ mod tests { view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx); }); assert_eq!( - view.update(&mut cx, |view, cx| view.selection_ranges(cx)), + view.update(&mut cx, |view, cx| view.selected_display_ranges(cx)), &[DisplayPoint::new(5, 0)..DisplayPoint::new(0, 0)] ); @@ -5396,7 +5533,7 @@ mod tests { view.select_smaller_syntax_node(&SelectSmallerSyntaxNode, cx); }); assert_eq!( - view.update(&mut cx, |view, cx| view.selection_ranges(cx)), + view.update(&mut cx, |view, cx| view.selected_display_ranges(cx)), &[ DisplayPoint::new(0, 16)..DisplayPoint::new(0, 28), DisplayPoint::new(4, 1)..DisplayPoint::new(2, 0), @@ -5407,7 +5544,7 @@ mod tests { view.select_smaller_syntax_node(&SelectSmallerSyntaxNode, cx); }); assert_eq!( - view.update(&mut cx, |view, cx| view.selection_ranges(cx)), + view.update(&mut cx, |view, cx| view.selected_display_ranges(cx)), &[ DisplayPoint::new(0, 23)..DisplayPoint::new(0, 27), DisplayPoint::new(2, 35)..DisplayPoint::new(2, 7), @@ -5419,7 +5556,7 @@ mod tests { view.select_smaller_syntax_node(&SelectSmallerSyntaxNode, cx); }); assert_eq!( - view.update(&mut cx, |view, cx| view.selection_ranges(cx)), + view.update(&mut cx, |view, cx| view.selected_display_ranges(cx)), &[ DisplayPoint::new(0, 25)..DisplayPoint::new(0, 25), DisplayPoint::new(2, 24)..DisplayPoint::new(2, 12), @@ -5432,7 +5569,7 @@ mod tests { view.select_smaller_syntax_node(&SelectSmallerSyntaxNode, cx); }); assert_eq!( - view.update(&mut cx, |view, cx| view.selection_ranges(cx)), + view.update(&mut cx, |view, cx| view.selected_display_ranges(cx)), &[ DisplayPoint::new(0, 25)..DisplayPoint::new(0, 25), DisplayPoint::new(2, 24)..DisplayPoint::new(2, 12), @@ -5453,7 +5590,7 @@ mod tests { view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx); }); assert_eq!( - view.update(&mut cx, |view, cx| view.selection_ranges(cx)), + view.update(&mut cx, |view, cx| view.selected_display_ranges(cx)), &[ DisplayPoint::new(0, 16)..DisplayPoint::new(0, 28), DisplayPoint::new(2, 35)..DisplayPoint::new(2, 7), @@ -5730,16 +5867,33 @@ mod tests { } impl Editor { - fn selection_ranges(&self, cx: &mut MutableAppContext) -> Vec> { - self.visible_selections(0..self.max_point(cx).row() + 1, cx) - .get(&self.replica_id(cx)) - .unwrap() - .into_iter() + fn selected_ranges>( + &self, + cx: &mut MutableAppContext, + ) -> Vec> { + self.local_selections::(cx) + .iter() + .map(|s| { + if s.reversed { + s.end.clone()..s.start.clone() + } else { + s.start.clone()..s.end.clone() + } + }) + .collect() + } + + fn selected_display_ranges(&self, cx: &mut MutableAppContext) -> Vec> { + let display_map = self + .display_map + .update(cx, |display_map, cx| display_map.snapshot(cx)); + self.selections + .iter() .map(|s| { if s.reversed { - s.end..s.start + s.end.to_display_point(&display_map)..s.start.to_display_point(&display_map) } else { - s.start..s.end + s.start.to_display_point(&display_map)..s.end.to_display_point(&display_map) } }) .collect() diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index de81bce210a60c84c14a798d466680f3e8b2208b..1ba453b0d012fda18ac954f9af2fa5cbc888a6b6 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -1,5 +1,4 @@ mod anchor; -mod selection; pub use anchor::{Anchor, AnchorRangeExt}; use anyhow::Result; @@ -7,8 +6,8 @@ use clock::ReplicaId; use collections::HashMap; use gpui::{AppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task}; use language::{ - Buffer, BufferChunks, BufferSnapshot, Chunk, DiagnosticEntry, Event, File, FromAnchor, - Language, Selection, ToOffset as _, ToPoint as _, TransactionId, + Buffer, BufferChunks, BufferSnapshot, Chunk, DiagnosticEntry, Event, File, Language, Selection, + ToOffset as _, ToPoint as _, TransactionId, }; use std::{ cell::{Ref, RefCell}, @@ -16,7 +15,7 @@ use std::{ iter::Peekable, ops::{Range, Sub}, sync::Arc, - time::SystemTime, + time::{Duration, Instant, SystemTime}, }; use sum_tree::{Bias, Cursor, SumTree}; use text::{ @@ -40,11 +39,15 @@ pub struct MultiBuffer { } pub trait ToOffset: 'static { - fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize; + fn to_offset(&self, snapshot: &MultiBufferSnapshot) -> usize; } pub trait ToPoint: 'static { - fn to_point<'a>(&self, snapshot: &MultiBufferSnapshot) -> Point; + fn to_point(&self, snapshot: &MultiBufferSnapshot) -> Point; +} + +pub trait FromAnchor: 'static { + fn from_anchor(anchor: &Anchor, snapshot: &MultiBufferSnapshot) -> Self; } #[derive(Debug)] @@ -130,6 +133,13 @@ impl MultiBuffer { self.replica_id } + pub fn transaction_group_interval(&self, cx: &AppContext) -> Duration { + self.as_singleton() + .unwrap() + .read(cx) + .transaction_group_interval() + } + pub fn snapshot(&self, cx: &AppContext) -> MultiBufferSnapshot { self.sync(cx); self.snapshot.borrow().clone() @@ -200,10 +210,18 @@ impl MultiBuffer { } pub fn start_transaction(&mut self, cx: &mut ModelContext) -> Option { + self.start_transaction_at(Instant::now(), cx) + } + + pub(crate) fn start_transaction_at( + &mut self, + now: Instant, + cx: &mut ModelContext, + ) -> Option { // TODO self.as_singleton() .unwrap() - .update(cx, |buffer, _| buffer.start_transaction()) + .update(cx, |buffer, _| buffer.start_transaction_at(now)) } pub fn end_transaction(&mut self, cx: &mut ModelContext) -> Option { @@ -213,6 +231,17 @@ impl MultiBuffer { .update(cx, |buffer, cx| buffer.end_transaction(cx)) } + pub(crate) fn end_transaction_at( + &mut self, + now: Instant, + cx: &mut ModelContext, + ) -> Option { + // TODO + self.as_singleton() + .unwrap() + .update(cx, |buffer, cx| buffer.end_transaction_at(now, cx)) + } + pub fn undo(&mut self, cx: &mut ModelContext) -> Option { // TODO self.as_singleton() diff --git a/crates/editor/src/multi_buffer/selection.rs b/crates/editor/src/multi_buffer/selection.rs deleted file mode 100644 index 86d36ae16b690f1700468d13da4169a47d264c76..0000000000000000000000000000000000000000 --- a/crates/editor/src/multi_buffer/selection.rs +++ /dev/null @@ -1,42 +0,0 @@ -use super::{Anchor, MultiBufferSnapshot}; -use std::ops::Sub; -use text::{rope::TextDimension, Selection}; - -fn resolve_selection<'a, D>( - selection: &'a Selection, - snapshot: &'a MultiBufferSnapshot, -) -> Selection -where - D: TextDimension + Ord + Sub, -{ - Selection { - id: selection.id, - start: selection.start.summary::(snapshot), - end: selection.end.summary::(snapshot), - reversed: selection.reversed, - goal: selection.goal, - } -} - -fn resolve_selections<'a, D>( - selections: &'a [Selection], - snapshot: &'a MultiBufferSnapshot, -) -> impl 'a + Iterator> -where - D: TextDimension + Ord + Sub, -{ - let mut summaries = snapshot - .summaries_for_anchors::( - selections - .iter() - .flat_map(|selection| [&selection.start, &selection.end]), - ) - .into_iter(); - selections.iter().map(move |selection| Selection { - id: selection.id, - start: summaries.next().unwrap(), - end: summaries.next().unwrap(), - reversed: selection.reversed, - goal: selection.goal, - }) -} diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index f3eb3dc97403b92a11a046fd0b010d0e744e36b0..9ed8d784f3bd4324951943508425ea712ed48602 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -31,7 +31,7 @@ use std::{ vec, }; use sum_tree::TreeMap; -use text::{operation_queue::OperationQueue, rope::TextDimension}; +use text::operation_queue::OperationQueue; pub use text::{Buffer as TextBuffer, Operation as _, *}; use theme::SyntaxTheme; use tree_sitter::{InputEdit, Parser, QueryCursor, Tree}; @@ -1062,7 +1062,7 @@ impl Buffer { self.start_transaction_at(Instant::now()) } - pub(crate) fn start_transaction_at(&mut self, now: Instant) -> Option { + pub fn start_transaction_at(&mut self, now: Instant) -> Option { self.text.start_transaction_at(now) } @@ -1070,7 +1070,7 @@ impl Buffer { self.end_transaction_at(Instant::now(), cx) } - pub(crate) fn end_transaction_at( + pub fn end_transaction_at( &mut self, now: Instant, cx: &mut ModelContext, diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index d715a3fbadeea7d79ca99895ce5b743f53bb04c8..2c2de7dfe5608f31c272dbadfc1ff7a39f48009c 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -511,6 +511,10 @@ impl Buffer { self.deferred_ops.len() } + pub fn transaction_group_interval(&self) -> Duration { + self.history.group_interval + } + pub fn edit(&mut self, ranges: R, new_text: T) -> EditOperation where R: IntoIterator, From 2adf11e20482d4d1f2d90a532bcb31a4751024a0 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Mon, 13 Dec 2021 11:20:10 +0100 Subject: [PATCH 060/196] Write a simple unit test for TreeMap and fix bug in `remove` --- crates/sum_tree/src/tree_map.rs | 42 +++++++++++++++++++++++++++++---- 1 file changed, 37 insertions(+), 5 deletions(-) diff --git a/crates/sum_tree/src/tree_map.rs b/crates/sum_tree/src/tree_map.rs index 3619d7870e60028d3f132ff120850206cecbef2c..76eb78476c37c407d9fd290139d81b9f20e39ee6 100644 --- a/crates/sum_tree/src/tree_map.rs +++ b/crates/sum_tree/src/tree_map.rs @@ -5,7 +5,7 @@ use crate::{Bias, Dimension, Item, KeyedItem, SeekTarget, SumTree, Summary}; #[derive(Clone)] pub struct TreeMap(SumTree>) where - K: Clone + Debug + Default, + K: Clone + Debug + Default + Ord, V: Clone + Debug; #[derive(Clone)] @@ -41,7 +41,7 @@ impl TreeMap { let mut cursor = self.0.cursor::>(); let key = MapKeyRef(Some(key)); let mut new_tree = cursor.slice(&key, Bias::Left, &()); - if key.cmp(cursor.start(), &()) == Ordering::Equal { + if key.cmp(&cursor.end(&()), &()) == Ordering::Equal { removed = Some(cursor.item().unwrap().value.clone()); cursor.next(&()); } @@ -58,7 +58,7 @@ impl TreeMap { impl Default for TreeMap where - K: Clone + Debug + Default, + K: Clone + Debug + Default + Ord, V: Clone + Debug, { fn default() -> Self { @@ -68,13 +68,13 @@ where impl Item for MapEntry where - K: Clone + Debug + Default + Clone, + K: Clone + Debug + Default + Ord, V: Clone, { type Summary = MapKey; fn summary(&self) -> Self::Summary { - todo!() + self.key() } } @@ -118,3 +118,35 @@ where self.0.cmp(&cursor_location.0) } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_basic() { + let mut map = TreeMap::default(); + assert_eq!(map.iter().collect::>(), vec![]); + + map.insert(3, "c"); + assert_eq!(map.iter().collect::>(), vec![(&3, &"c")]); + + map.insert(1, "a"); + assert_eq!(map.iter().collect::>(), vec![(&1, &"a"), (&3, &"c")]); + + map.insert(2, "b"); + assert_eq!( + map.iter().collect::>(), + vec![(&1, &"a"), (&2, &"b"), (&3, &"c")] + ); + + map.remove(&2); + assert_eq!(map.iter().collect::>(), vec![(&1, &"a"), (&3, &"c")]); + + map.remove(&3); + assert_eq!(map.iter().collect::>(), vec![(&1, &"a")]); + + map.remove(&1); + assert_eq!(map.iter().collect::>(), vec![]); + } +} From c984b39aaa707122097cb44e31d93cd176697e9d Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Mon, 13 Dec 2021 11:38:38 +0100 Subject: [PATCH 061/196] Show remote collaborators' active selections --- crates/editor/src/editor.rs | 37 ++++++++++++++------- crates/editor/src/multi_buffer.rs | 30 ++++++++++++++++- crates/language/src/buffer.rs | 55 +++++++++++++++++++++++++++---- crates/language/src/proto.rs | 15 ++------- crates/rpc/proto/zed.proto | 4 +-- crates/rpc/src/rpc.rs | 2 +- crates/text/src/text.rs | 16 ++++----- script/seed-db | 3 +- 8 files changed, 117 insertions(+), 45 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index a4f2ccd22d1a3ea0ce1720c831af78034de33201..211585b9ad07089dacd032fbb7cee44c6c8d9c49 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -1991,8 +1991,8 @@ impl Editor { pub fn undo(&mut self, _: &Undo, cx: &mut ViewContext) { if let Some(tx_id) = self.buffer.update(cx, |buffer, cx| buffer.undo(cx)) { - if let Some((selections, _)) = self.selection_history.get(&tx_id) { - self.selections = selections.clone(); + if let Some((selections, _)) = self.selection_history.get(&tx_id).cloned() { + self.set_selections(selections, cx); } self.request_autoscroll(Autoscroll::Fit, cx); } @@ -2000,8 +2000,8 @@ impl Editor { pub fn redo(&mut self, _: &Redo, cx: &mut ViewContext) { if let Some(tx_id) = self.buffer.update(cx, |buffer, cx| buffer.redo(cx)) { - if let Some((_, Some(selections))) = self.selection_history.get(&tx_id) { - self.selections = selections.clone(); + if let Some((_, Some(selections))) = self.selection_history.get(&tx_id).cloned() { + self.set_selections(selections, cx); } self.request_autoscroll(Autoscroll::Fit, cx); } @@ -3256,13 +3256,23 @@ impl Editor { } self.pause_cursor_blinking(cx); - self.selections = Arc::from_iter(selections.into_iter().map(|selection| Selection { - id: selection.id, - start: buffer.anchor_before(selection.start), - end: buffer.anchor_before(selection.end), - reversed: selection.reversed, - goal: selection.goal, - })); + self.set_selections( + Arc::from_iter(selections.into_iter().map(|selection| Selection { + id: selection.id, + start: buffer.anchor_before(selection.start), + end: buffer.anchor_before(selection.end), + reversed: selection.reversed, + goal: selection.goal, + })), + cx, + ); + } + + fn set_selections(&mut self, selections: Arc<[Selection]>, cx: &mut ViewContext) { + self.selections = selections; + self.buffer.update(cx, |buffer, cx| { + buffer.set_active_selections(&self.selections, cx) + }); } fn request_autoscroll(&mut self, autoscroll: Autoscroll, cx: &mut ViewContext) { @@ -3651,11 +3661,16 @@ impl View for Editor { fn on_focus(&mut self, cx: &mut ViewContext) { self.focused = true; self.blink_cursors(self.blink_epoch, cx); + self.buffer.update(cx, |buffer, cx| { + buffer.set_active_selections(&self.selections, cx) + }); } fn on_blur(&mut self, cx: &mut ViewContext) { self.focused = false; self.show_local_cursors = false; + self.buffer + .update(cx, |buffer, cx| buffer.remove_active_selections(cx)); cx.emit(Event::Blurred); cx.notify(); } diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 1ba453b0d012fda18ac954f9af2fa5cbc888a6b6..58bd41ffcf17d52d058b85caad9bb795cfb2e8c4 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -12,7 +12,7 @@ use language::{ use std::{ cell::{Ref, RefCell}, cmp, io, - iter::Peekable, + iter::{FromIterator, Peekable}, ops::{Range, Sub}, sync::Arc, time::{Duration, Instant, SystemTime}, @@ -242,6 +242,34 @@ impl MultiBuffer { .update(cx, |buffer, cx| buffer.end_transaction_at(now, cx)) } + pub fn set_active_selections( + &mut self, + selections: &[Selection], + cx: &mut ModelContext, + ) { + // TODO + let this = self.read(cx); + self.as_singleton().unwrap().update(cx, |buffer, cx| { + let buffer_snapshot = buffer.snapshot(); + let selections = selections.iter().map(|selection| Selection { + id: selection.id, + start: buffer_snapshot.anchor_before(selection.start.to_offset(&this)), + end: buffer_snapshot.anchor_before(selection.end.to_offset(&this)), + reversed: selection.reversed, + goal: selection.goal, + }); + buffer.set_active_selections(Arc::from_iter(selections), cx); + }); + } + + pub fn remove_active_selections(&mut self, cx: &mut ModelContext) { + for buffer in self.buffers.values() { + buffer + .buffer + .update(cx, |buffer, cx| buffer.remove_active_selections(cx)); + } + } + pub fn undo(&mut self, cx: &mut ModelContext) -> Option { // TODO self.as_singleton() diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 9ed8d784f3bd4324951943508425ea712ed48602..120f1b710fe712728565b52440daad4fe4d77174 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -289,6 +289,12 @@ impl Buffer { .map(|op| text::Operation::Edit(proto::deserialize_edit_operation(op))); buffer.apply_ops(ops)?; let mut this = Self::build(buffer, file); + for selection_set in message.selections { + this.remote_selections.insert( + selection_set.replica_id as ReplicaId, + proto::deserialize_selections(selection_set.selections), + ); + } this.apply_diagnostic_update( Arc::from(proto::deserialize_diagnostics(message.diagnostics)), cx, @@ -306,7 +312,14 @@ impl Buffer { .history() .map(proto::serialize_edit_operation) .collect(), - selections: Vec::new(), + selections: self + .remote_selections + .iter() + .map(|(replica_id, selections)| proto::SelectionSet { + replica_id: *replica_id as u32, + selections: proto::serialize_selections(selections), + }) + .collect(), diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()), } } @@ -835,7 +848,7 @@ impl Buffer { cx.emit(Event::DiagnosticsUpdated); Ok(Operation::UpdateDiagnostics { diagnostics: Arc::from(self.diagnostics.iter().cloned().collect::>()), - lamport_timestamp: self.lamport_timestamp(), + lamport_timestamp: self.text.lamport_clock.tick(), }) } @@ -1084,6 +1097,35 @@ impl Buffer { } } + pub fn set_active_selections( + &mut self, + selections: Arc<[Selection]>, + cx: &mut ModelContext, + ) { + let lamport_timestamp = self.text.lamport_clock.tick(); + self.remote_selections + .insert(self.text.replica_id(), selections.clone()); + self.send_operation( + Operation::UpdateSelections { + replica_id: self.text.replica_id(), + selections, + lamport_timestamp, + }, + cx, + ); + } + + pub fn remove_active_selections(&mut self, cx: &mut ModelContext) { + let lamport_timestamp = self.text.lamport_clock.tick(); + self.send_operation( + Operation::RemoveSelections { + replica_id: self.text.replica_id(), + lamport_timestamp, + }, + cx, + ); + } + fn update_language_server(&mut self) { let language_server = if let Some(language_server) = self.language_server.as_mut() { language_server @@ -1321,14 +1363,14 @@ impl Buffer { lamport_timestamp, } => { self.remote_selections.insert(replica_id, selections); - self.text.observe_lamport_timestamp(lamport_timestamp); + self.text.lamport_clock.observe(lamport_timestamp); } Operation::RemoveSelections { - replica_id: set_id, + replica_id, lamport_timestamp, } => { - self.remote_selections.remove(&set_id); - self.text.observe_lamport_timestamp(lamport_timestamp); + self.remote_selections.remove(&replica_id); + self.text.lamport_clock.observe(lamport_timestamp); } } } @@ -1655,6 +1697,7 @@ impl BufferSnapshot { { self.remote_selections .iter() + .filter(|(replica_id, _)| **replica_id != self.text.replica_id()) .map(move |(replica_id, selections)| { let start_ix = match selections .binary_search_by(|probe| probe.end.cmp(&range.start, self).unwrap()) diff --git a/crates/language/src/proto.rs b/crates/language/src/proto.rs index 8921ffa42154586964f918bda081f6e36e9062db..25e2dfe223cd11287ba48f5d3232e3e37403cfaa 100644 --- a/crates/language/src/proto.rs +++ b/crates/language/src/proto.rs @@ -1,13 +1,12 @@ -use std::sync::Arc; - use crate::{diagnostic_set::DiagnosticEntry, Diagnostic, Operation}; use anyhow::{anyhow, Result}; use clock::ReplicaId; use lsp::DiagnosticSeverity; use rpc::proto; +use std::sync::Arc; use text::*; -pub use proto::Buffer; +pub use proto::{Buffer, SelectionSet}; pub fn serialize_operation(operation: &Operation) -> proto::Operation { proto::Operation { @@ -48,15 +47,7 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation { } => proto::operation::Variant::UpdateSelections(proto::operation::UpdateSelections { replica_id: *replica_id as u32, lamport_timestamp: lamport_timestamp.value, - selections: selections - .iter() - .map(|selection| proto::Selection { - id: selection.id as u64, - start: Some(serialize_anchor(&selection.start)), - end: Some(serialize_anchor(&selection.end)), - reversed: selection.reversed, - }) - .collect(), + selections: serialize_selections(selections), }), Operation::RemoveSelections { replica_id, diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index 7a06abb1226f8f51bc80404f0b1cf3ed51ce589c..5ea70b7d8a4c1f23ada14b47e0001fd3065a50cd 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -234,9 +234,7 @@ message Buffer { message SelectionSet { uint32 replica_id = 1; - uint32 lamport_timestamp = 2; - bool is_active = 3; - repeated Selection selections = 4; + repeated Selection selections = 2; } message Selection { diff --git a/crates/rpc/src/rpc.rs b/crates/rpc/src/rpc.rs index 110b6a6659ec1db3e43dab649e2afc3c9ae72934..3263ad8063f1b7b4f6947a3e0139128a66a91722 100644 --- a/crates/rpc/src/rpc.rs +++ b/crates/rpc/src/rpc.rs @@ -5,4 +5,4 @@ pub mod proto; pub use conn::Connection; pub use peer::*; -pub const PROTOCOL_VERSION: u32 = 3; +pub const PROTOCOL_VERSION: u32 = 4; diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 2c2de7dfe5608f31c272dbadfc1ff7a39f48009c..30fd985319243c094a6e2a29995376b9fc16e8c4 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -49,12 +49,13 @@ pub struct Buffer { replica_id: ReplicaId, remote_id: u64, local_clock: clock::Local, - lamport_clock: clock::Lamport, + pub lamport_clock: clock::Lamport, subscriptions: Topic, } #[derive(Clone, Debug)] pub struct BufferSnapshot { + replica_id: ReplicaId, visible_text: Rope, deleted_text: Rope, undo_map: UndoMap, @@ -464,6 +465,7 @@ impl Buffer { Buffer { snapshot: BufferSnapshot { + replica_id, visible_text, deleted_text: Rope::new(), fragments, @@ -495,14 +497,6 @@ impl Buffer { self.local_clock.replica_id } - pub fn lamport_timestamp(&self) -> clock::Lamport { - self.lamport_clock - } - - pub fn observe_lamport_timestamp(&mut self, timestamp: clock::Lamport) { - self.lamport_clock.observe(timestamp); - } - pub fn remote_id(&self) -> u64 { self.remote_id } @@ -1219,6 +1213,10 @@ impl BufferSnapshot { &self.visible_text } + pub fn replica_id(&self) -> ReplicaId { + self.replica_id + } + pub fn row_count(&self) -> u32 { self.max_point().row + 1 } diff --git a/script/seed-db b/script/seed-db index 195dc5fb8d49c40bb6abf8306def6211111ac56f..f42a33ba002411298a759608fabff52f12841616 100755 --- a/script/seed-db +++ b/script/seed-db @@ -1,5 +1,4 @@ #!/bin/bash set -e -cd server -cargo run --features seed-support --bin seed +cargo run --package=zed-server --features seed-support --bin seed From 6645e2820ce40757de7910f389328a64fd474a3d Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Mon, 13 Dec 2021 15:43:26 +0100 Subject: [PATCH 062/196] First attempt at implementing `MultiBuffer::edit_internal` Co-Authored-By: Nathan Sobo --- crates/editor/src/multi_buffer.rs | 105 +++++++++++++++++++++++++++--- 1 file changed, 96 insertions(+), 9 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 58bd41ffcf17d52d058b85caad9bb795cfb2e8c4..503707e848f51168cc41555edc22aeb312d171ef 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -73,6 +73,7 @@ pub struct ExcerptProperties<'a, T> { #[derive(Clone)] struct Excerpt { id: ExcerptId, + buffer_id: usize, buffer: BufferSnapshot, range: Range, text_summary: TextSummary, @@ -195,18 +196,100 @@ impl MultiBuffer { S: ToOffset, T: Into, { - // TODO let snapshot = self.read(cx); - let ranges_iter = ranges_iter - .into_iter() - .map(|range| range.start.to_offset(&snapshot)..range.end.to_offset(&snapshot)); - self.as_singleton().unwrap().update(cx, |buffer, cx| { - if autoindent { - buffer.edit_with_autoindent(ranges_iter, new_text, cx); + let mut buffer_edits: HashMap, bool)>> = Default::default(); + let mut cursor = snapshot.excerpts.cursor::(); + for range in ranges_iter { + let start = range.start.to_offset(&snapshot); + let end = range.end.to_offset(&snapshot); + cursor.seek(&start, Bias::Right, &()); + let start_excerpt = cursor.item().expect("start offset out of bounds"); + let start_overshoot = + (start - cursor.start()).saturating_sub(start_excerpt.header_height as usize); + let buffer_start = + start_excerpt.range.start.to_offset(&start_excerpt.buffer) + start_overshoot; + + cursor.seek(&end, Bias::Right, &()); + let end_excerpt = cursor.item().expect("end offset out of bounds"); + let end_overshoot = + (end - cursor.start()).saturating_sub(end_excerpt.header_height as usize); + let buffer_end = end_excerpt.range.start.to_offset(&end_excerpt.buffer) + end_overshoot; + + if start_excerpt.id == end_excerpt.id { + buffer_edits + .entry(start_excerpt.buffer_id) + .or_insert(Vec::new()) + .push((buffer_start..buffer_end, true)); } else { - buffer.edit(ranges_iter, new_text, cx); + let start_excerpt_range = + buffer_start..start_excerpt.range.end.to_offset(&start_excerpt.buffer); + let end_excerpt_range = + end_excerpt.range.start.to_offset(&end_excerpt.buffer)..buffer_end; + buffer_edits + .entry(start_excerpt.buffer_id) + .or_insert(Vec::new()) + .push((start_excerpt_range, true)); + buffer_edits + .entry(end_excerpt.buffer_id) + .or_insert(Vec::new()) + .push((end_excerpt_range, false)); + + cursor.seek(&start, Bias::Right, &()); + cursor.next(&()); + while let Some(excerpt) = cursor.item() { + if excerpt.id == end_excerpt.id { + break; + } + + let excerpt_range = start_excerpt.range.end.to_offset(&start_excerpt.buffer) + ..start_excerpt.range.end.to_offset(&start_excerpt.buffer); + buffer_edits + .entry(excerpt.buffer_id) + .or_insert(Vec::new()) + .push((excerpt_range, false)); + cursor.next(&()); + } } - }); + } + + let new_text = new_text.into(); + for (buffer_id, mut edits) in buffer_edits { + edits.sort_unstable_by_key(|(range, _)| range.start); + self.buffers[&buffer_id].buffer.update(cx, |buffer, cx| { + let mut edits = edits.into_iter().peekable(); + let mut insertions = Vec::new(); + let mut deletions = Vec::new(); + while let Some((mut range, mut is_insertion)) = edits.next() { + while let Some((next_range, next_is_insertion)) = edits.peek() { + if range.end >= next_range.start { + range.end = cmp::max(next_range.end, range.end); + is_insertion |= *next_is_insertion; + edits.next(); + } else { + break; + } + } + + if is_insertion { + insertions.push( + buffer.anchor_before(range.start)..buffer.anchor_before(range.end), + ); + } else { + deletions.push( + buffer.anchor_before(range.start)..buffer.anchor_before(range.end), + ); + } + } + + if autoindent { + buffer.edit_with_autoindent(deletions, "", cx); + buffer.edit_with_autoindent(insertions, new_text.clone(), cx); + } else { + buffer.edit(deletions, "", cx); + buffer.edit(insertions, new_text.clone(), cx); + } + }) + } } pub fn start_transaction(&mut self, cx: &mut ModelContext) -> Option { @@ -306,6 +389,7 @@ impl MultiBuffer { let edit_start = snapshot.excerpts.summary().text.bytes; let excerpt = Excerpt::new( id.clone(), + props.buffer.id(), buffer.snapshot(), range, props.header_height, @@ -424,6 +508,7 @@ impl MultiBuffer { new_excerpt = Excerpt::new( id.clone(), + buffer_state.buffer.id(), buffer.snapshot(), old_excerpt.range.clone(), old_excerpt.header_height, @@ -1025,6 +1110,7 @@ impl MultiBufferSnapshot { impl Excerpt { fn new( id: ExcerptId, + buffer_id: usize, buffer: BufferSnapshot, range: Range, header_height: u8, @@ -1050,6 +1136,7 @@ impl Excerpt { Excerpt { id, + buffer_id, buffer, range, text_summary, From 85674ba50691015adf4a50413b03c63e15707669 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Mon, 13 Dec 2021 16:46:35 +0100 Subject: [PATCH 063/196] WIP Co-Authored-By: Nathan Sobo --- Cargo.lock | 11 + crates/diagnostics/Cargo.toml | 14 ++ crates/diagnostics/src/diagnostics.rs | 45 ++++ crates/editor/src/editor.rs | 46 +++- crates/editor/src/items.rs | 47 +--- crates/language/src/buffer.rs | 119 ++------- crates/language/src/proto.rs | 4 + crates/language/src/tests.rs | 346 ++++++-------------------- crates/project/src/project.rs | 15 +- crates/project/src/worktree.rs | 322 +++++++++++++++++++++++- crates/rpc/proto/zed.proto | 2 + 11 files changed, 554 insertions(+), 417 deletions(-) create mode 100644 crates/diagnostics/Cargo.toml create mode 100644 crates/diagnostics/src/diagnostics.rs diff --git a/Cargo.lock b/Cargo.lock index 75e353144de2b4e89c48646b85086b79eda60103..5c9408f013cae611cde76165c9cc59a927c84fc3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1410,6 +1410,17 @@ dependencies = [ "const-oid", ] +[[package]] +name = "diagnostics" +version = "0.1.0" +dependencies = [ + "editor", + "gpui", + "postage", + "project", + "workspace", +] + [[package]] name = "digest" version = "0.8.1" diff --git a/crates/diagnostics/Cargo.toml b/crates/diagnostics/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..6f9979a22e9ac92965a8fb2242d53a087742f456 --- /dev/null +++ b/crates/diagnostics/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "diagnostics" +version = "0.1.0" +edition = "2021" + +[lib] +path = "src/diagnostics.rs" + +[dependencies] +editor = { path = "../editor" } +gpui = { path = "../gpui" } +project = { path = "../project" } +workspace = { path = "../workspace" } +postage = { version = "0.4", features = ["futures-traits"] } diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs new file mode 100644 index 0000000000000000000000000000000000000000..83bc074f1888f8637ac89ea3525a519fe8c53fe0 --- /dev/null +++ b/crates/diagnostics/src/diagnostics.rs @@ -0,0 +1,45 @@ +use editor::{Editor, MultiBuffer}; +use gpui::{elements::*, Entity, ModelHandle, RenderContext, View, ViewContext, ViewHandle}; +use postage::watch; +use project::Project; + +struct ProjectDiagnostics { + editor: ViewHandle, + project: ModelHandle, +} + +impl ProjectDiagnostics { + fn new( + project: ModelHandle, + settings: watch::Receiver, + cx: &mut ViewContext, + ) -> Self { + let mut buffer = cx.add_model(|cx| MultiBuffer::new(project.read(cx).replica_id(cx))); + for (path, diagnostics) in project.read(cx).diagnostics(cx) {} + + Self { + editor: cx.add_view(|cx| { + Editor::for_buffer( + buffer.clone(), + editor::settings_builder(buffer.downgrade(), settings), + cx, + ) + }), + project, + } + } +} + +impl Entity for ProjectDiagnostics { + type Event = (); +} + +impl View for ProjectDiagnostics { + fn ui_name() -> &'static str { + "ProjectDiagnostics" + } + + fn render(&mut self, _: &mut RenderContext) -> ElementBox { + ChildView::new(self.editor.id()).boxed() + } +} diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 211585b9ad07089dacd032fbb7cee44c6c8d9c49..52bc739d0709ca9b787b8c63e1235ea182ab78ca 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -15,10 +15,11 @@ pub use element::*; use gpui::{ action, elements::Text, + fonts::TextStyle, geometry::vector::{vec2f, Vector2F}, keymap::Binding, text_layout, AppContext, ClipboardItem, Element, ElementBox, Entity, ModelHandle, - MutableAppContext, RenderContext, View, ViewContext, WeakViewHandle, + MutableAppContext, RenderContext, View, ViewContext, WeakModelHandle, WeakViewHandle, }; use items::BufferItemHandle; use language::{ @@ -29,6 +30,7 @@ pub use multi_buffer::MultiBuffer; use multi_buffer::{ Anchor, AnchorRangeExt, MultiBufferChunks, MultiBufferSnapshot, ToOffset, ToPoint, }; +use postage::watch; use serde::{Deserialize, Serialize}; use smallvec::SmallVec; use smol::Timer; @@ -3787,6 +3789,48 @@ pub fn diagnostic_style( } } +pub fn settings_builder( + buffer: WeakModelHandle, + settings: watch::Receiver, +) -> impl Fn(&AppContext) -> EditorSettings { + move |cx| { + let settings = settings.borrow(); + let font_cache = cx.font_cache(); + let font_family_id = settings.buffer_font_family; + let font_family_name = cx.font_cache().family_name(font_family_id).unwrap(); + let font_properties = Default::default(); + let font_id = font_cache + .select_font(font_family_id, &font_properties) + .unwrap(); + let font_size = settings.buffer_font_size; + + let mut theme = settings.theme.editor.clone(); + theme.text = TextStyle { + color: theme.text.color, + font_family_name, + font_family_id, + font_id, + font_size, + font_properties, + underline: None, + }; + let language = buffer.upgrade(cx).and_then(|buf| buf.read(cx).language(cx)); + let soft_wrap = match settings.soft_wrap(language) { + workspace::settings::SoftWrap::None => SoftWrap::None, + workspace::settings::SoftWrap::EditorWidth => SoftWrap::EditorWidth, + workspace::settings::SoftWrap::PreferredLineLength => { + SoftWrap::Column(settings.preferred_line_length(language).saturating_sub(1)) + } + }; + + EditorSettings { + tab_size: settings.tab_size, + soft_wrap, + style: theme, + } + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 6989cc0fccb65d3ba0c5b768fc17b296f9467322..0a4ddc8af9590540e5a588541a6006aa747fb697 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -1,10 +1,9 @@ -use crate::{Editor, EditorSettings, Event}; +use crate::{Editor, Event}; use crate::{MultiBuffer, ToPoint as _}; use anyhow::Result; use gpui::{ - elements::*, fonts::TextStyle, AppContext, Entity, ModelContext, ModelHandle, - MutableAppContext, RenderContext, Subscription, Task, View, ViewContext, ViewHandle, - WeakModelHandle, + elements::*, AppContext, Entity, ModelContext, ModelHandle, MutableAppContext, RenderContext, + Subscription, Task, View, ViewContext, ViewHandle, WeakModelHandle, }; use language::{Diagnostic, File as _}; use postage::watch; @@ -13,8 +12,7 @@ use std::fmt::Write; use std::path::Path; use text::{Point, Selection}; use workspace::{ - settings, EntryOpener, ItemHandle, ItemView, ItemViewHandle, Settings, StatusItemView, - WeakItemHandle, + EntryOpener, ItemHandle, ItemView, ItemViewHandle, Settings, StatusItemView, WeakItemHandle, }; pub struct BufferOpener; @@ -53,42 +51,7 @@ impl ItemHandle for BufferItemHandle { Box::new(cx.add_view(window_id, |cx| { Editor::for_buffer( self.0.clone(), - move |cx| { - let settings = settings.borrow(); - let font_cache = cx.font_cache(); - let font_family_id = settings.buffer_font_family; - let font_family_name = cx.font_cache().family_name(font_family_id).unwrap(); - let font_properties = Default::default(); - let font_id = font_cache - .select_font(font_family_id, &font_properties) - .unwrap(); - let font_size = settings.buffer_font_size; - - let mut theme = settings.theme.editor.clone(); - theme.text = TextStyle { - color: theme.text.color, - font_family_name, - font_family_id, - font_id, - font_size, - font_properties, - underline: None, - }; - let language = buffer.upgrade(cx).and_then(|buf| buf.read(cx).language(cx)); - let soft_wrap = match settings.soft_wrap(language) { - settings::SoftWrap::None => crate::SoftWrap::None, - settings::SoftWrap::EditorWidth => crate::SoftWrap::EditorWidth, - settings::SoftWrap::PreferredLineLength => crate::SoftWrap::Column( - settings.preferred_line_length(language).saturating_sub(1), - ), - }; - - EditorSettings { - tab_size: settings.tab_size, - soft_wrap, - style: theme, - } - }, + crate::settings_builder(buffer, settings), cx, ) })) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 120f1b710fe712728565b52440daad4fe4d77174..4b518467aa11e78bced91dcda7fb839817db9931 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -87,6 +87,8 @@ pub struct BufferSnapshot { #[derive(Clone, Debug, PartialEq, Eq)] pub struct Diagnostic { + pub source: Option, + pub code: Option, pub severity: DiagnosticSeverity, pub message: String, pub group_id: usize, @@ -720,7 +722,7 @@ impl Buffer { pub fn update_diagnostics( &mut self, version: Option, - mut diagnostics: Vec, + mut diagnostics: Vec>, cx: &mut ModelContext, ) -> Result { diagnostics.sort_unstable_by_key(|d| (d.range.start, d.range.end)); @@ -736,7 +738,6 @@ impl Buffer { } else { self.deref() }; - let abs_path = self.file.as_ref().and_then(|f| f.abs_path()); let empty_set = HashSet::new(); let disk_based_sources = self @@ -750,26 +751,11 @@ impl Buffer { .peekable(); let mut last_edit_old_end = PointUtf16::zero(); let mut last_edit_new_end = PointUtf16::zero(); - let mut group_ids_by_diagnostic_range = HashMap::new(); - let mut diagnostics_by_group_id = HashMap::new(); - let mut next_group_id = 0; - 'outer: for diagnostic in &diagnostics { - let mut start = diagnostic.range.start.to_point_utf16(); - let mut end = diagnostic.range.end.to_point_utf16(); - let source = diagnostic.source.as_ref(); - let code = diagnostic.code.as_ref(); - let group_id = diagnostic_ranges(&diagnostic, abs_path.as_deref()) - .find_map(|range| group_ids_by_diagnostic_range.get(&(source, code, range))) - .copied() - .unwrap_or_else(|| { - let group_id = post_inc(&mut next_group_id); - for range in diagnostic_ranges(&diagnostic, abs_path.as_deref()) { - group_ids_by_diagnostic_range.insert((source, code, range), group_id); - } - group_id - }); - - if diagnostic + 'outer: for entry in &mut diagnostics { + let mut start = entry.range.start; + let mut end = entry.range.end; + if entry + .diagnostic .source .as_ref() .map_or(false, |source| disk_based_sources.contains(source)) @@ -790,46 +776,20 @@ impl Buffer { end = last_edit_new_end + (end - last_edit_old_end); } - let mut range = content.clip_point_utf16(start, Bias::Left) + entry.range = content.clip_point_utf16(start, Bias::Left) ..content.clip_point_utf16(end, Bias::Right); - if range.start == range.end { - range.end.column += 1; - range.end = content.clip_point_utf16(range.end, Bias::Right); - if range.start == range.end && range.end.column > 0 { - range.start.column -= 1; - range.start = content.clip_point_utf16(range.start, Bias::Left); + if entry.range.start == entry.range.end { + entry.range.end.column += 1; + entry.range.end = content.clip_point_utf16(entry.range.end, Bias::Right); + if entry.range.start == entry.range.end && entry.range.end.column > 0 { + entry.range.start.column -= 1; + entry.range.start = content.clip_point_utf16(entry.range.start, Bias::Left); } } - - diagnostics_by_group_id - .entry(group_id) - .or_insert(Vec::new()) - .push(DiagnosticEntry { - range, - diagnostic: Diagnostic { - severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR), - message: diagnostic.message.clone(), - group_id, - is_primary: false, - }, - }); } drop(edits_since_save); - let new_diagnostics = DiagnosticSet::new( - diagnostics_by_group_id - .into_values() - .flat_map(|mut diagnostics| { - let primary = diagnostics - .iter_mut() - .min_by_key(|entry| entry.diagnostic.severity) - .unwrap(); - primary.diagnostic.is_primary = true; - diagnostics - }), - content, - ); - self.diagnostics = new_diagnostics; + self.diagnostics = DiagnosticSet::new(diagnostics, content); if let Some(version) = version { let language_server = self.language_server.as_mut().unwrap(); @@ -1971,16 +1931,6 @@ impl ToTreeSitterPoint for Point { } } -trait ToPointUtf16 { - fn to_point_utf16(self) -> PointUtf16; -} - -impl ToPointUtf16 for lsp::Position { - fn to_point_utf16(self) -> PointUtf16 { - PointUtf16::new(self.line, self.character) - } -} - impl operation_queue::Operation for Operation { fn lamport_timestamp(&self) -> clock::Lamport { match self { @@ -2000,32 +1950,17 @@ impl operation_queue::Operation for Operation { } } -fn diagnostic_ranges<'a>( - diagnostic: &'a lsp::Diagnostic, - abs_path: Option<&'a Path>, -) -> impl 'a + Iterator> { - diagnostic - .related_information - .iter() - .flatten() - .filter_map(move |info| { - if info.location.uri.to_file_path().ok()? == abs_path? { - let info_start = PointUtf16::new( - info.location.range.start.line, - info.location.range.start.character, - ); - let info_end = PointUtf16::new( - info.location.range.end.line, - info.location.range.end.character, - ); - Some(info_start..info_end) - } else { - None - } - }) - .chain(Some( - diagnostic.range.start.to_point_utf16()..diagnostic.range.end.to_point_utf16(), - )) +impl Default for Diagnostic { + fn default() -> Self { + Self { + source: Default::default(), + code: Default::default(), + severity: DiagnosticSeverity::ERROR, + message: Default::default(), + group_id: Default::default(), + is_primary: Default::default(), + } + } } pub fn contiguous_ranges( diff --git a/crates/language/src/proto.rs b/crates/language/src/proto.rs index 25e2dfe223cd11287ba48f5d3232e3e37403cfaa..304a296088d14725b3c09e2cd851a84924cb3c4a 100644 --- a/crates/language/src/proto.rs +++ b/crates/language/src/proto.rs @@ -117,6 +117,8 @@ pub fn serialize_diagnostics<'a>( } as i32, group_id: entry.diagnostic.group_id as u64, is_primary: entry.diagnostic.is_primary, + code: entry.diagnostic.code.clone(), + source: entry.diagnostic.source.clone(), }) .collect() } @@ -269,6 +271,8 @@ pub fn deserialize_diagnostics( message: diagnostic.message, group_id: diagnostic.group_id as usize, is_primary: diagnostic.is_primary, + code: diagnostic.code, + source: diagnostic.source, }, }) }) diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index 6e2bc43dcd00e36a33cc0812e39a0668088a5763..07f21dabf8032c1d96800dbca67a064dccf17fa1 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -516,23 +516,29 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { .update_diagnostics( Some(open_notification.text_document.version), vec![ - lsp::Diagnostic { - range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)), - severity: Some(lsp::DiagnosticSeverity::ERROR), - message: "undefined variable 'A'".to_string(), - ..Default::default() + DiagnosticEntry { + range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::ERROR, + message: "undefined variable 'A'".to_string(), + ..Default::default() + }, }, - lsp::Diagnostic { - range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)), - severity: Some(lsp::DiagnosticSeverity::ERROR), - message: "undefined variable 'BB'".to_string(), - ..Default::default() + DiagnosticEntry { + range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::ERROR, + message: "undefined variable 'BB'".to_string(), + ..Default::default() + }, }, - lsp::Diagnostic { - range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)), - severity: Some(lsp::DiagnosticSeverity::ERROR), - message: "undefined variable 'CCC'".to_string(), - ..Default::default() + DiagnosticEntry { + range: PointUtf16::new(2, 9)..PointUtf16::new(2, 12), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::ERROR, + message: "undefined variable 'CCC'".to_string(), + ..Default::default() + }, }, ], cx, @@ -553,6 +559,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { message: "undefined variable 'BB'".to_string(), group_id: 1, is_primary: true, + ..Default::default() }, }, DiagnosticEntry { @@ -562,6 +569,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { message: "undefined variable 'CCC'".to_string(), group_id: 2, is_primary: true, + ..Default::default() } } ] @@ -592,17 +600,21 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { .update_diagnostics( Some(open_notification.text_document.version), vec![ - lsp::Diagnostic { - range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)), - severity: Some(lsp::DiagnosticSeverity::ERROR), - message: "undefined variable 'A'".to_string(), - ..Default::default() + DiagnosticEntry { + range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::ERROR, + message: "undefined variable 'A'".to_string(), + ..Default::default() + }, }, - lsp::Diagnostic { - range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)), - severity: Some(lsp::DiagnosticSeverity::WARNING), - message: "unreachable statement".to_string(), - ..Default::default() + DiagnosticEntry { + range: PointUtf16::new(0, 9)..PointUtf16::new(0, 12), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::WARNING, + message: "unreachable statement".to_string(), + ..Default::default() + }, }, ], cx, @@ -621,6 +633,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { message: "unreachable statement".to_string(), group_id: 1, is_primary: true, + ..Default::default() } }, DiagnosticEntry { @@ -630,6 +643,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { message: "undefined variable 'A'".to_string(), group_id: 0, is_primary: true, + ..Default::default() }, } ] @@ -670,19 +684,23 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { .update_diagnostics( Some(change_notification_2.text_document.version), vec![ - lsp::Diagnostic { - range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)), - severity: Some(lsp::DiagnosticSeverity::ERROR), - message: "undefined variable 'BB'".to_string(), - source: Some("disk".to_string()), - ..Default::default() + DiagnosticEntry { + range: PointUtf16::new(1, 9)..PointUtf16::new(1, 11), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::ERROR, + message: "undefined variable 'BB'".to_string(), + source: Some("disk".to_string()), + ..Default::default() + }, }, - lsp::Diagnostic { - range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)), - severity: Some(lsp::DiagnosticSeverity::ERROR), - message: "undefined variable 'A'".to_string(), - source: Some("disk".to_string()), - ..Default::default() + DiagnosticEntry { + range: PointUtf16::new(0, 9)..PointUtf16::new(0, 10), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::ERROR, + message: "undefined variable 'A'".to_string(), + source: Some("disk".to_string()), + ..Default::default() + }, }, ], cx, @@ -701,6 +719,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { message: "undefined variable 'A'".to_string(), group_id: 0, is_primary: true, + ..Default::default() } }, DiagnosticEntry { @@ -710,6 +729,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { message: "undefined variable 'BB'".to_string(), group_id: 1, is_primary: true, + ..Default::default() }, } ] @@ -732,23 +752,21 @@ async fn test_empty_diagnostic_ranges(mut cx: gpui::TestAppContext) { .update_diagnostics( None, vec![ - lsp::Diagnostic { - range: lsp::Range::new( - lsp::Position::new(0, 10), - lsp::Position::new(0, 10), - ), - severity: Some(lsp::DiagnosticSeverity::ERROR), - message: "syntax error 1".to_string(), - ..Default::default() + DiagnosticEntry { + range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::ERROR, + message: "syntax error 1".to_string(), + ..Default::default() + }, }, - lsp::Diagnostic { - range: lsp::Range::new( - lsp::Position::new(1, 10), - lsp::Position::new(1, 10), - ), - severity: Some(lsp::DiagnosticSeverity::ERROR), - message: "syntax error 2".to_string(), - ..Default::default() + DiagnosticEntry { + range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::ERROR, + message: "syntax error 2".to_string(), + ..Default::default() + }, }, ], cx, @@ -766,9 +784,9 @@ async fn test_empty_diagnostic_ranges(mut cx: gpui::TestAppContext) { .collect::>(), &[ ("let one = ", None), - (";", Some(lsp::DiagnosticSeverity::ERROR)), + (";", Some(DiagnosticSeverity::ERROR)), ("\nlet two =", None), - (" ", Some(lsp::DiagnosticSeverity::ERROR)), + (" ", Some(DiagnosticSeverity::ERROR)), ("\nlet three = 3;\n", None) ] ); @@ -776,224 +794,6 @@ async fn test_empty_diagnostic_ranges(mut cx: gpui::TestAppContext) { }); } -#[gpui::test] -async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) { - cx.add_model(|cx| { - let text = " - fn foo(mut v: Vec) { - for x in &v { - v.push(1); - } - } - " - .unindent(); - - let file = FakeFile::new("/example.rs"); - let mut buffer = Buffer::from_file(0, text, Box::new(file.clone()), cx); - buffer.set_language(Some(Arc::new(rust_lang())), None, cx); - let diagnostics = vec![ - lsp::Diagnostic { - range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)), - severity: Some(DiagnosticSeverity::WARNING), - message: "error 1".to_string(), - related_information: Some(vec![lsp::DiagnosticRelatedInformation { - location: lsp::Location { - uri: lsp::Url::from_file_path(&file.abs_path).unwrap(), - range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)), - }, - message: "error 1 hint 1".to_string(), - }]), - ..Default::default() - }, - lsp::Diagnostic { - range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)), - severity: Some(DiagnosticSeverity::HINT), - message: "error 1 hint 1".to_string(), - related_information: Some(vec![lsp::DiagnosticRelatedInformation { - location: lsp::Location { - uri: lsp::Url::from_file_path(&file.abs_path).unwrap(), - range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)), - }, - message: "original diagnostic".to_string(), - }]), - ..Default::default() - }, - lsp::Diagnostic { - range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)), - severity: Some(DiagnosticSeverity::ERROR), - message: "error 2".to_string(), - related_information: Some(vec![ - lsp::DiagnosticRelatedInformation { - location: lsp::Location { - uri: lsp::Url::from_file_path(&file.abs_path).unwrap(), - range: lsp::Range::new( - lsp::Position::new(1, 13), - lsp::Position::new(1, 15), - ), - }, - message: "error 2 hint 1".to_string(), - }, - lsp::DiagnosticRelatedInformation { - location: lsp::Location { - uri: lsp::Url::from_file_path(&file.abs_path).unwrap(), - range: lsp::Range::new( - lsp::Position::new(1, 13), - lsp::Position::new(1, 15), - ), - }, - message: "error 2 hint 2".to_string(), - }, - ]), - ..Default::default() - }, - lsp::Diagnostic { - range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)), - severity: Some(DiagnosticSeverity::HINT), - message: "error 2 hint 1".to_string(), - related_information: Some(vec![lsp::DiagnosticRelatedInformation { - location: lsp::Location { - uri: lsp::Url::from_file_path(&file.abs_path).unwrap(), - range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)), - }, - message: "original diagnostic".to_string(), - }]), - ..Default::default() - }, - lsp::Diagnostic { - range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)), - severity: Some(DiagnosticSeverity::HINT), - message: "error 2 hint 2".to_string(), - related_information: Some(vec![lsp::DiagnosticRelatedInformation { - location: lsp::Location { - uri: lsp::Url::from_file_path(&file.abs_path).unwrap(), - range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)), - }, - message: "original diagnostic".to_string(), - }]), - ..Default::default() - }, - ]; - buffer.update_diagnostics(None, diagnostics, cx).unwrap(); - assert_eq!( - buffer - .snapshot() - .diagnostics_in_range::<_, Point>(0..buffer.len()) - .collect::>(), - &[ - DiagnosticEntry { - range: Point::new(1, 8)..Point::new(1, 9), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::WARNING, - message: "error 1".to_string(), - group_id: 0, - is_primary: true, - } - }, - DiagnosticEntry { - range: Point::new(1, 8)..Point::new(1, 9), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::HINT, - message: "error 1 hint 1".to_string(), - group_id: 0, - is_primary: false, - } - }, - DiagnosticEntry { - range: Point::new(1, 13)..Point::new(1, 15), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::HINT, - message: "error 2 hint 1".to_string(), - group_id: 1, - is_primary: false, - } - }, - DiagnosticEntry { - range: Point::new(1, 13)..Point::new(1, 15), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::HINT, - message: "error 2 hint 2".to_string(), - group_id: 1, - is_primary: false, - } - }, - DiagnosticEntry { - range: Point::new(2, 8)..Point::new(2, 17), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::ERROR, - message: "error 2".to_string(), - group_id: 1, - is_primary: true, - } - } - ] - ); - - assert_eq!( - buffer - .snapshot() - .diagnostic_group::(0) - .collect::>(), - &[ - DiagnosticEntry { - range: Point::new(1, 8)..Point::new(1, 9), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::WARNING, - message: "error 1".to_string(), - group_id: 0, - is_primary: true, - } - }, - DiagnosticEntry { - range: Point::new(1, 8)..Point::new(1, 9), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::HINT, - message: "error 1 hint 1".to_string(), - group_id: 0, - is_primary: false, - } - }, - ] - ); - assert_eq!( - buffer - .snapshot() - .diagnostic_group::(1) - .collect::>(), - &[ - DiagnosticEntry { - range: Point::new(1, 13)..Point::new(1, 15), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::HINT, - message: "error 2 hint 1".to_string(), - group_id: 1, - is_primary: false, - } - }, - DiagnosticEntry { - range: Point::new(1, 13)..Point::new(1, 15), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::HINT, - message: "error 2 hint 2".to_string(), - group_id: 1, - is_primary: false, - } - }, - DiagnosticEntry { - range: Point::new(2, 8)..Point::new(2, 17), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::ERROR, - message: "error 2".to_string(), - group_id: 1, - is_primary: true, - } - } - ] - ); - - buffer - }); -} - fn chunks_with_diagnostics( buffer: &Buffer, range: Range, diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index b200db63dd7e9ea3961981f9d4a6a9fda723b443..10e92ae56a68bca86787fdc624629437971fa9d2 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -4,10 +4,11 @@ mod worktree; use anyhow::Result; use client::{Client, UserStore}; +use clock::ReplicaId; use futures::Future; use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet}; use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task}; -use language::LanguageRegistry; +use language::{DiagnosticEntry, LanguageRegistry, PointUtf16}; use std::{ path::Path, sync::{atomic::AtomicBool, Arc}, @@ -62,6 +63,11 @@ impl Project { } } + pub fn replica_id(&self, cx: &AppContext) -> ReplicaId { + // TODO + self.worktrees.first().unwrap().read(cx).replica_id() + } + pub fn worktrees(&self) -> &[ModelHandle] { &self.worktrees } @@ -159,6 +165,13 @@ impl Project { } } + pub fn diagnostics<'a>( + &'a self, + cx: &'a AppContext, + ) -> impl Iterator])> { + std::iter::empty() + } + pub fn active_entry(&self) -> Option { self.active_entry } diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index fe6cb2e39412b72f3183b2fe06e80cb7f5815f06..166de727a74c8f5965344af2cfb481fdeea45183 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -12,7 +12,10 @@ use gpui::{ executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle, }; -use language::{Buffer, Language, LanguageRegistry, Operation, Rope}; +use language::{ + Buffer, Diagnostic, DiagnosticEntry, DiagnosticSeverity, Language, LanguageRegistry, Operation, + PointUtf16, Rope, +}; use lazy_static::lazy_static; use lsp::LanguageServer; use parking_lot::Mutex; @@ -30,7 +33,7 @@ use std::{ ffi::{OsStr, OsString}, fmt, future::Future, - ops::Deref, + ops::{Deref, Range}, path::{Path, PathBuf}, sync::{ atomic::{AtomicUsize, Ordering::SeqCst}, @@ -40,7 +43,7 @@ use std::{ }; use sum_tree::Bias; use sum_tree::{Edit, SeekTarget, SumTree}; -use util::{ResultExt, TryFutureExt}; +use util::{post_inc, ResultExt, TryFutureExt}; lazy_static! { static ref GITIGNORE: &'static OsStr = OsStr::new(".gitignore"); @@ -747,20 +750,67 @@ impl Worktree { cx: &mut ModelContext, ) -> Result<()> { let this = self.as_local_mut().ok_or_else(|| anyhow!("not local"))?; - let file_path = params + let abs_path = params .uri .to_file_path() - .map_err(|_| anyhow!("URI is not a file"))? + .map_err(|_| anyhow!("URI is not a file"))?; + let worktree_path = abs_path .strip_prefix(&this.abs_path) .context("path is not within worktree")? .to_owned(); + let mut group_ids_by_diagnostic_range = HashMap::new(); + let mut diagnostics_by_group_id = HashMap::new(); + let mut next_group_id = 0; + for diagnostic in ¶ms.diagnostics { + let source = diagnostic.source.as_ref(); + let code = diagnostic.code.as_ref(); + let group_id = diagnostic_ranges(&diagnostic, &abs_path) + .find_map(|range| group_ids_by_diagnostic_range.get(&(source, code, range))) + .copied() + .unwrap_or_else(|| { + let group_id = post_inc(&mut next_group_id); + for range in diagnostic_ranges(&diagnostic, &abs_path) { + group_ids_by_diagnostic_range.insert((source, code, range), group_id); + } + group_id + }); + + diagnostics_by_group_id + .entry(group_id) + .or_insert(Vec::new()) + .push(DiagnosticEntry { + range: diagnostic.range.start.to_point_utf16() + ..diagnostic.range.end.to_point_utf16(), + diagnostic: Diagnostic { + source: diagnostic.source.clone(), + code: diagnostic.code.clone(), + severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR), + message: diagnostic.message.clone(), + group_id, + is_primary: false, + }, + }); + } + + let diagnostics = diagnostics_by_group_id + .into_values() + .flat_map(|mut diagnostics| { + let primary = diagnostics + .iter_mut() + .min_by_key(|entry| entry.diagnostic.severity) + .unwrap(); + primary.diagnostic.is_primary = true; + diagnostics + }) + .collect::>(); + for buffer in this.open_buffers.values() { if let Some(buffer) = buffer.upgrade(cx) { if buffer .read(cx) .file() - .map_or(false, |file| file.path().as_ref() == file_path) + .map_or(false, |file| file.path().as_ref() == worktree_path) { let (remote_id, operation) = buffer.update(cx, |buffer, cx| { ( @@ -774,7 +824,7 @@ impl Worktree { } } - this.diagnostics.insert(file_path, params.diagnostics); + this.diagnostics.insert(worktree_path, diagnostics); Ok(()) } @@ -838,7 +888,7 @@ pub struct LocalWorktree { share: Option, open_buffers: HashMap>, shared_buffers: HashMap>>, - diagnostics: HashMap>, + diagnostics: HashMap>>, collaborators: HashMap, queued_operations: Vec<(u64, Operation)>, languages: Arc, @@ -2998,6 +3048,44 @@ impl<'a> TryFrom<(&'a CharBag, proto::Entry)> for Entry { } } +trait ToPointUtf16 { + fn to_point_utf16(self) -> PointUtf16; +} + +impl ToPointUtf16 for lsp::Position { + fn to_point_utf16(self) -> PointUtf16 { + PointUtf16::new(self.line, self.character) + } +} + +fn diagnostic_ranges<'a>( + diagnostic: &'a lsp::Diagnostic, + abs_path: &'a Path, +) -> impl 'a + Iterator> { + diagnostic + .related_information + .iter() + .flatten() + .filter_map(move |info| { + if info.location.uri.to_file_path().ok()? == abs_path { + let info_start = PointUtf16::new( + info.location.range.start.line, + info.location.range.start.character, + ); + let info_end = PointUtf16::new( + info.location.range.end.line, + info.location.range.end.character, + ); + Some(info_start..info_end) + } else { + None + } + }) + .chain(Some( + diagnostic.range.start.to_point_utf16()..diagnostic.range.end.to_point_utf16(), + )) +} + #[cfg(test)] mod tests { use super::*; @@ -3740,6 +3828,224 @@ mod tests { }); } + #[gpui::test] + async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) { + cx.add_model(|cx| { + let text = " + fn foo(mut v: Vec) { + for x in &v { + v.push(1); + } + } + " + .unindent(); + + let file = FakeFile::new("/example.rs"); + let mut buffer = Buffer::from_file(0, text, Box::new(file.clone()), cx); + buffer.set_language(Some(Arc::new(rust_lang())), None, cx); + let diagnostics = vec![ + DiagnosticEntry { + range: PointUtf16::new(1, 8)..PointUtf16::new(1, 9), + diagnostic: Diagnostic { + severity: Some(DiagnosticSeverity::WARNING), + message: "error 1".to_string(), + related_information: Some(vec![lsp::DiagnosticRelatedInformation { + location: lsp::Location { + uri: lsp::Url::from_file_path(&file.abs_path).unwrap(), + range: PointUtf16::new(1, 8)..PointUtf16::new(1, 9), + }, + message: "error 1 hint 1".to_string(), + }]), + ..Default::default() + }, + }, + DiagnosticEntry { + range: PointUtf16::new(1, 8)..PointUtf16::new(1, 9), + diagnostic: Diagnostic {}, + severity: Some(DiagnosticSeverity::HINT), + message: "error 1 hint 1".to_string(), + related_information: Some(vec![lsp::DiagnosticRelatedInformation { + location: lsp::Location { + uri: lsp::Url::from_file_path(&file.abs_path).unwrap(), + range: PointUtf16::new(1, 8)..PointUtf16::new(1, 9), + }, + message: "original diagnostic".to_string(), + }]), + ..Default::default() + }, + DiagnosticEntry { + range: PointUtf16::new(2, 8)..PointUtf16::new(2, 17), + diagnostic: Diagnostic {}, + severity: Some(DiagnosticSeverity::ERROR), + message: "error 2".to_string(), + related_information: Some(vec![ + lsp::DiagnosticRelatedInformation { + location: lsp::Location { + uri: lsp::Url::from_file_path(&file.abs_path).unwrap(), + range: PointUtf16::new(1, 13)..PointUtf16::new(1, 15), + }, + message: "error 2 hint 1".to_string(), + }, + lsp::DiagnosticRelatedInformation { + location: lsp::Location { + uri: lsp::Url::from_file_path(&file.abs_path).unwrap(), + range: PointUtf16::new(1, 13)..PointUtf16::new(1, 15), + }, + message: "error 2 hint 2".to_string(), + }, + ]), + ..Default::default() + }, + DiagnosticEntry { + range: PointUtf16::new(1, 13)..PointUtf16::new(1, 15), + diagnostic: Diagnostic {}, + severity: Some(DiagnosticSeverity::HINT), + message: "error 2 hint 1".to_string(), + related_information: Some(vec![lsp::DiagnosticRelatedInformation { + location: lsp::Location { + uri: lsp::Url::from_file_path(&file.abs_path).unwrap(), + range: PointUtf16::new(2, 8)..PointUtf16::new(2, 17), + }, + message: "original diagnostic".to_string(), + }]), + ..Default::default() + }, + DiagnosticEntry { + range: PointUtf16::new(1, 13)..PointUtf16::new(1, 15), + diagnostic: Diagnostic {}, + severity: Some(DiagnosticSeverity::HINT), + message: "error 2 hint 2".to_string(), + related_information: Some(vec![lsp::DiagnosticRelatedInformation { + location: lsp::Location { + uri: lsp::Url::from_file_path(&file.abs_path).unwrap(), + range: PointUtf16::new(2, 8)..PointUtf16::new(2, 17), + }, + message: "original diagnostic".to_string(), + }]), + ..Default::default() + }, + ]; + buffer.update_diagnostics(None, diagnostics, cx).unwrap(); + assert_eq!( + buffer + .snapshot() + .diagnostics_in_range::<_, Point>(0..buffer.len()) + .collect::>(), + &[ + DiagnosticEntry { + range: Point::new(1, 8)..Point::new(1, 9), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::WARNING, + message: "error 1".to_string(), + group_id: 0, + is_primary: true, + } + }, + DiagnosticEntry { + range: Point::new(1, 8)..Point::new(1, 9), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::HINT, + message: "error 1 hint 1".to_string(), + group_id: 0, + is_primary: false, + } + }, + DiagnosticEntry { + range: Point::new(1, 13)..Point::new(1, 15), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::HINT, + message: "error 2 hint 1".to_string(), + group_id: 1, + is_primary: false, + } + }, + DiagnosticEntry { + range: Point::new(1, 13)..Point::new(1, 15), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::HINT, + message: "error 2 hint 2".to_string(), + group_id: 1, + is_primary: false, + } + }, + DiagnosticEntry { + range: Point::new(2, 8)..Point::new(2, 17), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::ERROR, + message: "error 2".to_string(), + group_id: 1, + is_primary: true, + } + } + ] + ); + + assert_eq!( + buffer + .snapshot() + .diagnostic_group::(0) + .collect::>(), + &[ + DiagnosticEntry { + range: Point::new(1, 8)..Point::new(1, 9), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::WARNING, + message: "error 1".to_string(), + group_id: 0, + is_primary: true, + } + }, + DiagnosticEntry { + range: Point::new(1, 8)..Point::new(1, 9), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::HINT, + message: "error 1 hint 1".to_string(), + group_id: 0, + is_primary: false, + } + }, + ] + ); + assert_eq!( + buffer + .snapshot() + .diagnostic_group::(1) + .collect::>(), + &[ + DiagnosticEntry { + range: Point::new(1, 13)..Point::new(1, 15), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::HINT, + message: "error 2 hint 1".to_string(), + group_id: 1, + is_primary: false, + } + }, + DiagnosticEntry { + range: Point::new(1, 13)..Point::new(1, 15), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::HINT, + message: "error 2 hint 2".to_string(), + group_id: 1, + is_primary: false, + } + }, + DiagnosticEntry { + range: Point::new(2, 8)..Point::new(2, 17), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::ERROR, + message: "error 2".to_string(), + group_id: 1, + is_primary: true, + } + } + ] + ); + + buffer + }); + } + #[gpui::test(iterations = 100)] fn test_random(mut rng: StdRng) { let operations = env::var("OPERATIONS") diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index 5ea70b7d8a4c1f23ada14b47e0001fd3065a50cd..fa4efe695b48bfaa03b8cfbdc1fa640eefd42951 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -269,6 +269,8 @@ message Diagnostic { string message = 4; uint64 group_id = 5; bool is_primary = 6; + optional string code = 7; + optional string source = 8; enum Severity { None = 0; Error = 1; From 418a9a3d6607c2f0309fbf029bbfb7a94b18d5b5 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 13 Dec 2021 11:15:03 -0800 Subject: [PATCH 064/196] Get things compiling with diagnostics on worktree Co-Authored-By: Antonio Scandurra Co-Authored-By: Nathan Sobo --- crates/diagnostics/src/diagnostics.rs | 4 +- crates/language/src/tests.rs | 84 +----- crates/project/src/project.rs | 14 +- crates/project/src/worktree.rs | 411 +++++++++++++++----------- crates/rpc/proto/zed.proto | 8 + crates/server/src/rpc.rs | 6 +- 6 files changed, 264 insertions(+), 263 deletions(-) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 83bc074f1888f8637ac89ea3525a519fe8c53fe0..d04c69611bd8c2be9d54ccd0c66460d58ddfd77b 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -15,7 +15,9 @@ impl ProjectDiagnostics { cx: &mut ViewContext, ) -> Self { let mut buffer = cx.add_model(|cx| MultiBuffer::new(project.read(cx).replica_id(cx))); - for (path, diagnostics) in project.read(cx).diagnostics(cx) {} + for diagnostic_summary in project.read(cx).diagnostic_summaries(cx) { + // + } Self { editor: cx.add_view(|cx| { diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index 07f21dabf8032c1d96800dbca67a064dccf17fa1..93c2f99c98d8cdfbec1b38d4582e42594c71144a 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -1,14 +1,11 @@ use super::*; -use gpui::{ModelHandle, MutableAppContext, Task}; +use gpui::{ModelHandle, MutableAppContext}; use std::{ - any::Any, cell::RefCell, - ffi::OsString, iter::FromIterator, ops::Range, - path::PathBuf, rc::Rc, - time::{Duration, Instant, SystemTime}, + time::{Duration, Instant}, }; use unindent::Unindent as _; @@ -871,80 +868,3 @@ fn rust_lang() -> Language { fn empty(point: Point) -> Range { point..point } - -#[derive(Clone)] -struct FakeFile { - abs_path: PathBuf, -} - -impl FakeFile { - fn new(abs_path: impl Into) -> Self { - Self { - abs_path: abs_path.into(), - } - } -} - -impl File for FakeFile { - fn worktree_id(&self) -> usize { - todo!() - } - - fn entry_id(&self) -> Option { - todo!() - } - - fn mtime(&self) -> SystemTime { - SystemTime::now() - } - - fn path(&self) -> &Arc { - todo!() - } - - fn abs_path(&self) -> Option { - Some(self.abs_path.clone()) - } - - fn full_path(&self) -> PathBuf { - todo!() - } - - fn file_name(&self) -> Option { - todo!() - } - - fn is_deleted(&self) -> bool { - todo!() - } - - fn save( - &self, - _: u64, - _: Rope, - _: clock::Global, - _: &mut MutableAppContext, - ) -> Task> { - todo!() - } - - fn load_local(&self, _: &AppContext) -> Option>> { - todo!() - } - - fn buffer_updated(&self, _: u64, _: super::Operation, _: &mut MutableAppContext) { - todo!() - } - - fn buffer_removed(&self, _: u64, _: &mut MutableAppContext) { - todo!() - } - - fn boxed_clone(&self) -> Box { - todo!() - } - - fn as_any(&self) -> &dyn Any { - todo!() - } -} diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 10e92ae56a68bca86787fdc624629437971fa9d2..ca15f673773e895f00c0b53aa90cd7e2e1cb0af5 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -8,7 +8,7 @@ use clock::ReplicaId; use futures::Future; use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet}; use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task}; -use language::{DiagnosticEntry, LanguageRegistry, PointUtf16}; +use language::LanguageRegistry; use std::{ path::Path, sync::{atomic::AtomicBool, Arc}, @@ -39,6 +39,14 @@ pub struct ProjectPath { pub path: Arc, } +pub struct DiagnosticSummary { + pub project_path: ProjectPath, + pub error_count: usize, + pub warning_count: usize, + pub info_count: usize, + pub hint_count: usize, +} + #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct ProjectEntry { pub worktree_id: usize, @@ -165,10 +173,10 @@ impl Project { } } - pub fn diagnostics<'a>( + pub fn diagnostic_summaries<'a>( &'a self, cx: &'a AppContext, - ) -> impl Iterator])> { + ) -> impl Iterator { std::iter::empty() } diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 166de727a74c8f5965344af2cfb481fdeea45183..fa6f6a0f396dd7797e3e082113937b5e39b54882 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -1,6 +1,7 @@ use super::{ fs::{self, Fs}, ignore::IgnoreStack, + DiagnosticSummary, }; use ::ignore::gitignore::{Gitignore, GitignoreBuilder}; use anyhow::{anyhow, Context, Result}; @@ -306,6 +307,7 @@ impl Worktree { updates_tx, client: client.clone(), open_buffers: Default::default(), + diagnostics: Vec::new(), collaborators, queued_operations: Default::default(), languages, @@ -467,6 +469,13 @@ impl Worktree { } } + pub fn diagnostic_summaries<'a>( + &'a self, + cx: &'a AppContext, + ) -> impl Iterator { + std::iter::empty() + } + pub fn open_buffer( &mut self, path: impl AsRef, @@ -754,10 +763,11 @@ impl Worktree { .uri .to_file_path() .map_err(|_| anyhow!("URI is not a file"))?; - let worktree_path = abs_path - .strip_prefix(&this.abs_path) - .context("path is not within worktree")? - .to_owned(); + let worktree_path = Arc::from( + abs_path + .strip_prefix(&this.abs_path) + .context("path is not within worktree")?, + ); let mut group_ids_by_diagnostic_range = HashMap::new(); let mut diagnostics_by_group_id = HashMap::new(); @@ -784,7 +794,10 @@ impl Worktree { ..diagnostic.range.end.to_point_utf16(), diagnostic: Diagnostic { source: diagnostic.source.clone(), - code: diagnostic.code.clone(), + code: diagnostic.code.clone().map(|code| match code { + lsp::NumberOrString::Number(code) => code.to_string(), + lsp::NumberOrString::String(code) => code, + }), severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR), message: diagnostic.message.clone(), group_id, @@ -810,12 +823,12 @@ impl Worktree { if buffer .read(cx) .file() - .map_or(false, |file| file.path().as_ref() == worktree_path) + .map_or(false, |file| *file.path() == worktree_path) { let (remote_id, operation) = buffer.update(cx, |buffer, cx| { ( buffer.remote_id(), - buffer.update_diagnostics(params.version, params.diagnostics, cx), + buffer.update_diagnostics(params.version, diagnostics, cx), ) }); self.send_buffer_update(remote_id, operation?, cx); @@ -888,7 +901,7 @@ pub struct LocalWorktree { share: Option, open_buffers: HashMap>, shared_buffers: HashMap>>, - diagnostics: HashMap>>, + diagnostics: HashMap, Vec>>, collaborators: HashMap, queued_operations: Vec<(u64, Operation)>, languages: Arc, @@ -1488,6 +1501,7 @@ pub struct RemoteWorktree { replica_id: ReplicaId, open_buffers: HashMap, collaborators: HashMap, + diagnostics: Vec, languages: Arc, user_store: ModelHandle, queued_operations: Vec<(u64, Operation)>, @@ -3105,6 +3119,7 @@ mod tests { time::{SystemTime, UNIX_EPOCH}, }; use text::Point; + use unindent::Unindent as _; use util::test::temp_tree; #[gpui::test] @@ -3821,7 +3836,8 @@ mod tests { severity: lsp::DiagnosticSeverity::ERROR, message: "undefined variable 'A'".to_string(), group_id: 0, - is_primary: true + is_primary: true, + ..Default::default() } }] ) @@ -3830,220 +3846,265 @@ mod tests { #[gpui::test] async fn test_grouped_diagnostics(mut cx: gpui::TestAppContext) { - cx.add_model(|cx| { - let text = " - fn foo(mut v: Vec) { - for x in &v { - v.push(1); - } - } - " - .unindent(); + let fs = Arc::new(FakeFs::new()); + let client = Client::new(); + let http_client = FakeHttpClient::with_404_response(); + let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx)); - let file = FakeFile::new("/example.rs"); - let mut buffer = Buffer::from_file(0, text, Box::new(file.clone()), cx); - buffer.set_language(Some(Arc::new(rust_lang())), None, cx); - let diagnostics = vec![ - DiagnosticEntry { - range: PointUtf16::new(1, 8)..PointUtf16::new(1, 9), - diagnostic: Diagnostic { - severity: Some(DiagnosticSeverity::WARNING), - message: "error 1".to_string(), - related_information: Some(vec![lsp::DiagnosticRelatedInformation { - location: lsp::Location { - uri: lsp::Url::from_file_path(&file.abs_path).unwrap(), - range: PointUtf16::new(1, 8)..PointUtf16::new(1, 9), - }, - message: "error 1 hint 1".to_string(), - }]), - ..Default::default() - }, + fs.insert_tree( + "/the-dir", + json!({ + "a.rs": " + fn foo(mut v: Vec) { + for x in &v { + v.push(1); + } + } + " + .unindent(), + }), + ) + .await; + + let worktree = Worktree::open_local( + client.clone(), + user_store, + "/the-dir".as_ref(), + fs, + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + let buffer = worktree + .update(&mut cx, |tree, cx| tree.open_buffer("a.rs", cx)) + .await + .unwrap(); + + let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap(); + let message = lsp::PublishDiagnosticsParams { + uri: buffer_uri.clone(), + diagnostics: vec![ + lsp::Diagnostic { + range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)), + severity: Some(DiagnosticSeverity::WARNING), + message: "error 1".to_string(), + related_information: Some(vec![lsp::DiagnosticRelatedInformation { + location: lsp::Location { + uri: buffer_uri.clone(), + range: lsp::Range::new( + lsp::Position::new(1, 8), + lsp::Position::new(1, 9), + ), + }, + message: "error 1 hint 1".to_string(), + }]), + ..Default::default() }, - DiagnosticEntry { - range: PointUtf16::new(1, 8)..PointUtf16::new(1, 9), - diagnostic: Diagnostic {}, + lsp::Diagnostic { + range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)), severity: Some(DiagnosticSeverity::HINT), message: "error 1 hint 1".to_string(), related_information: Some(vec![lsp::DiagnosticRelatedInformation { location: lsp::Location { - uri: lsp::Url::from_file_path(&file.abs_path).unwrap(), - range: PointUtf16::new(1, 8)..PointUtf16::new(1, 9), + uri: buffer_uri.clone(), + range: lsp::Range::new( + lsp::Position::new(1, 8), + lsp::Position::new(1, 9), + ), }, message: "original diagnostic".to_string(), }]), ..Default::default() }, - DiagnosticEntry { - range: PointUtf16::new(2, 8)..PointUtf16::new(2, 17), - diagnostic: Diagnostic {}, + lsp::Diagnostic { + range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)), severity: Some(DiagnosticSeverity::ERROR), message: "error 2".to_string(), related_information: Some(vec![ lsp::DiagnosticRelatedInformation { location: lsp::Location { - uri: lsp::Url::from_file_path(&file.abs_path).unwrap(), - range: PointUtf16::new(1, 13)..PointUtf16::new(1, 15), + uri: buffer_uri.clone(), + range: lsp::Range::new( + lsp::Position::new(1, 13), + lsp::Position::new(1, 15), + ), }, message: "error 2 hint 1".to_string(), }, lsp::DiagnosticRelatedInformation { location: lsp::Location { - uri: lsp::Url::from_file_path(&file.abs_path).unwrap(), - range: PointUtf16::new(1, 13)..PointUtf16::new(1, 15), + uri: buffer_uri.clone(), + range: lsp::Range::new( + lsp::Position::new(1, 13), + lsp::Position::new(1, 15), + ), }, message: "error 2 hint 2".to_string(), }, ]), ..Default::default() }, - DiagnosticEntry { - range: PointUtf16::new(1, 13)..PointUtf16::new(1, 15), - diagnostic: Diagnostic {}, + lsp::Diagnostic { + range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)), severity: Some(DiagnosticSeverity::HINT), message: "error 2 hint 1".to_string(), related_information: Some(vec![lsp::DiagnosticRelatedInformation { location: lsp::Location { - uri: lsp::Url::from_file_path(&file.abs_path).unwrap(), - range: PointUtf16::new(2, 8)..PointUtf16::new(2, 17), + uri: buffer_uri.clone(), + range: lsp::Range::new( + lsp::Position::new(2, 8), + lsp::Position::new(2, 17), + ), }, message: "original diagnostic".to_string(), }]), ..Default::default() }, - DiagnosticEntry { - range: PointUtf16::new(1, 13)..PointUtf16::new(1, 15), - diagnostic: Diagnostic {}, + lsp::Diagnostic { + range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)), severity: Some(DiagnosticSeverity::HINT), message: "error 2 hint 2".to_string(), related_information: Some(vec![lsp::DiagnosticRelatedInformation { location: lsp::Location { - uri: lsp::Url::from_file_path(&file.abs_path).unwrap(), - range: PointUtf16::new(2, 8)..PointUtf16::new(2, 17), + uri: buffer_uri.clone(), + range: lsp::Range::new( + lsp::Position::new(2, 8), + lsp::Position::new(2, 17), + ), }, message: "original diagnostic".to_string(), }]), ..Default::default() }, - ]; - buffer.update_diagnostics(None, diagnostics, cx).unwrap(); - assert_eq!( - buffer - .snapshot() - .diagnostics_in_range::<_, Point>(0..buffer.len()) - .collect::>(), - &[ - DiagnosticEntry { - range: Point::new(1, 8)..Point::new(1, 9), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::WARNING, - message: "error 1".to_string(), - group_id: 0, - is_primary: true, - } - }, - DiagnosticEntry { - range: Point::new(1, 8)..Point::new(1, 9), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::HINT, - message: "error 1 hint 1".to_string(), - group_id: 0, - is_primary: false, - } - }, - DiagnosticEntry { - range: Point::new(1, 13)..Point::new(1, 15), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::HINT, - message: "error 2 hint 1".to_string(), - group_id: 1, - is_primary: false, - } - }, - DiagnosticEntry { - range: Point::new(1, 13)..Point::new(1, 15), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::HINT, - message: "error 2 hint 2".to_string(), - group_id: 1, - is_primary: false, - } - }, - DiagnosticEntry { - range: Point::new(2, 8)..Point::new(2, 17), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::ERROR, - message: "error 2".to_string(), - group_id: 1, - is_primary: true, - } - } - ] - ); + ], + version: None, + }; - assert_eq!( - buffer - .snapshot() - .diagnostic_group::(0) - .collect::>(), - &[ - DiagnosticEntry { - range: Point::new(1, 8)..Point::new(1, 9), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::WARNING, - message: "error 1".to_string(), - group_id: 0, - is_primary: true, - } - }, - DiagnosticEntry { - range: Point::new(1, 8)..Point::new(1, 9), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::HINT, - message: "error 1 hint 1".to_string(), - group_id: 0, - is_primary: false, - } - }, - ] - ); - assert_eq!( - buffer - .snapshot() - .diagnostic_group::(1) - .collect::>(), - &[ - DiagnosticEntry { - range: Point::new(1, 13)..Point::new(1, 15), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::HINT, - message: "error 2 hint 1".to_string(), - group_id: 1, - is_primary: false, - } - }, - DiagnosticEntry { - range: Point::new(1, 13)..Point::new(1, 15), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::HINT, - message: "error 2 hint 2".to_string(), - group_id: 1, - is_primary: false, - } - }, - DiagnosticEntry { - range: Point::new(2, 8)..Point::new(2, 17), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::ERROR, - message: "error 2".to_string(), - group_id: 1, - is_primary: true, - } - } - ] - ); + worktree + .update(&mut cx, |tree, cx| tree.update_diagnostics(message, cx)) + .unwrap(); + let buffer = buffer.read_with(&cx, |buffer, cx| buffer.snapshot()); + assert_eq!( buffer - }); + .diagnostics_in_range::<_, Point>(0..buffer.len()) + .collect::>(), + &[ + DiagnosticEntry { + range: Point::new(1, 8)..Point::new(1, 9), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::WARNING, + message: "error 1".to_string(), + group_id: 0, + is_primary: true, + ..Default::default() + } + }, + DiagnosticEntry { + range: Point::new(1, 8)..Point::new(1, 9), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::HINT, + message: "error 1 hint 1".to_string(), + group_id: 0, + is_primary: false, + ..Default::default() + } + }, + DiagnosticEntry { + range: Point::new(1, 13)..Point::new(1, 15), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::HINT, + message: "error 2 hint 1".to_string(), + group_id: 1, + is_primary: false, + ..Default::default() + } + }, + DiagnosticEntry { + range: Point::new(1, 13)..Point::new(1, 15), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::HINT, + message: "error 2 hint 2".to_string(), + group_id: 1, + is_primary: false, + ..Default::default() + } + }, + DiagnosticEntry { + range: Point::new(2, 8)..Point::new(2, 17), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::ERROR, + message: "error 2".to_string(), + group_id: 1, + is_primary: true, + ..Default::default() + } + } + ] + ); + + assert_eq!( + buffer.diagnostic_group::(0).collect::>(), + &[ + DiagnosticEntry { + range: Point::new(1, 8)..Point::new(1, 9), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::WARNING, + message: "error 1".to_string(), + group_id: 0, + is_primary: true, + ..Default::default() + } + }, + DiagnosticEntry { + range: Point::new(1, 8)..Point::new(1, 9), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::HINT, + message: "error 1 hint 1".to_string(), + group_id: 0, + is_primary: false, + ..Default::default() + } + }, + ] + ); + assert_eq!( + buffer.diagnostic_group::(1).collect::>(), + &[ + DiagnosticEntry { + range: Point::new(1, 13)..Point::new(1, 15), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::HINT, + message: "error 2 hint 1".to_string(), + group_id: 1, + is_primary: false, + ..Default::default() + } + }, + DiagnosticEntry { + range: Point::new(1, 13)..Point::new(1, 15), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::HINT, + message: "error 2 hint 2".to_string(), + group_id: 1, + is_primary: false, + ..Default::default() + } + }, + DiagnosticEntry { + range: Point::new(2, 8)..Point::new(2, 17), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::ERROR, + message: "error 2".to_string(), + group_id: 1, + is_primary: true, + ..Default::default() + } + } + ] + ); } #[gpui::test(iterations = 100)] diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index fa4efe695b48bfaa03b8cfbdc1fa640eefd42951..34c80437955cfa6604077278ebdfe86d6dc79658 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -40,6 +40,7 @@ message Envelope { UnshareWorktree unshare_worktree = 35; UpdateContacts update_contacts = 36; LeaveWorktree leave_worktree = 37; + UpdateDiagnosticSummary update_diagnostic_summary = 38; } } @@ -138,6 +139,13 @@ message BufferSaved { Timestamp mtime = 4; } +message UpdateDiagnosticSummary { + uint64 worktree_id = 1; + string path = 2; + uint32 error_count = 3; + uint32 warning_count = 4; +} + message GetChannels {} message GetChannelsResponse { diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index 7220740af47c2baab2d4bb47f4781237395563c9..e06db2a273e0cd0fbbf2680ea7d285a63dc11356 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -1719,7 +1719,8 @@ mod tests { group_id: 0, message: "message 1".to_string(), severity: lsp::DiagnosticSeverity::ERROR, - is_primary: true + is_primary: true, + ..Default::default() } }, DiagnosticEntry { @@ -1728,7 +1729,8 @@ mod tests { group_id: 1, severity: lsp::DiagnosticSeverity::WARNING, message: "message 2".to_string(), - is_primary: true + is_primary: true, + ..Default::default() } } ] From 52b8e3d1a283ecdf4cd73911992461d9ba6eecec Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 13 Dec 2021 11:34:56 -0800 Subject: [PATCH 065/196] Get tests passing after diagnostic + selection changes Co-Authored-By: Nathan Sobo --- crates/editor/src/editor.rs | 5 +++++ crates/editor/src/multi_buffer.rs | 22 ++++++++++++++++++---- crates/language/src/tests.rs | 16 ++++++++++++++++ 3 files changed, 39 insertions(+), 4 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 52bc739d0709ca9b787b8c63e1235ea182ab78ca..0eecd17ce65eae20c873ef9dbef42f8c03b300e0 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -5948,6 +5948,11 @@ mod tests { .update(cx, |display_map, cx| display_map.snapshot(cx)); self.selections .iter() + .chain( + self.pending_selection + .as_ref() + .map(|pending| &pending.selection), + ) .map(|s| { if s.reversed { s.end.to_display_point(&display_map)..s.start.to_display_point(&display_map) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 503707e848f51168cc41555edc22aeb312d171ef..d79c0749061291fd4a19f052948d1d865ee6b928 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -163,18 +163,18 @@ impl MultiBuffer { self.subscriptions.subscribe() } - pub fn edit(&mut self, ranges_iter: I, new_text: T, cx: &mut ModelContext) + pub fn edit(&mut self, ranges: I, new_text: T, cx: &mut ModelContext) where I: IntoIterator>, S: ToOffset, T: Into, { - self.edit_internal(ranges_iter, new_text, false, cx) + self.edit_internal(ranges, new_text, false, cx) } pub fn edit_with_autoindent( &mut self, - ranges_iter: I, + ranges: I, new_text: T, cx: &mut ModelContext, ) where @@ -182,7 +182,7 @@ impl MultiBuffer { S: ToOffset, T: Into, { - self.edit_internal(ranges_iter, new_text, true, cx) + self.edit_internal(ranges, new_text, true, cx) } pub fn edit_internal( @@ -196,6 +196,20 @@ impl MultiBuffer { S: ToOffset, T: Into, { + if let Some(buffer) = self.as_singleton() { + let snapshot = self.read(cx); + let ranges = ranges_iter + .into_iter() + .map(|range| range.start.to_offset(&snapshot)..range.end.to_offset(&snapshot)); + return buffer.update(cx, |buffer, cx| { + if autoindent { + buffer.edit_with_autoindent(ranges, new_text, cx) + } else { + buffer.edit(ranges, new_text, cx) + } + }); + } + let snapshot = self.read(cx); let mut buffer_edits: HashMap, bool)>> = Default::default(); let mut cursor = snapshot.excerpts.cursor::(); diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index 93c2f99c98d8cdfbec1b38d4582e42594c71144a..e18589cbc949c0b2e266b854b0fceee998e8bd2c 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -518,6 +518,8 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'A'".to_string(), + group_id: 0, + is_primary: true, ..Default::default() }, }, @@ -526,6 +528,8 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'BB'".to_string(), + group_id: 1, + is_primary: true, ..Default::default() }, }, @@ -534,6 +538,8 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'CCC'".to_string(), + group_id: 2, + is_primary: true, ..Default::default() }, }, @@ -602,6 +608,8 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'A'".to_string(), + group_id: 0, + is_primary: true, ..Default::default() }, }, @@ -610,6 +618,8 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { diagnostic: Diagnostic { severity: DiagnosticSeverity::WARNING, message: "unreachable statement".to_string(), + group_id: 1, + is_primary: true, ..Default::default() }, }, @@ -687,6 +697,8 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'BB'".to_string(), source: Some("disk".to_string()), + group_id: 1, + is_primary: true, ..Default::default() }, }, @@ -696,6 +708,8 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'A'".to_string(), source: Some("disk".to_string()), + group_id: 0, + is_primary: true, ..Default::default() }, }, @@ -714,6 +728,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'A'".to_string(), + source: Some("disk".to_string()), group_id: 0, is_primary: true, ..Default::default() @@ -724,6 +739,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'BB'".to_string(), + source: Some("disk".to_string()), group_id: 1, is_primary: true, ..Default::default() From 6ab795c6293f709c7b766dc529b724a5f0a1bb9b Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 13 Dec 2021 16:35:32 -0800 Subject: [PATCH 066/196] Handle buffer deduping in the worktree instead of in workspace Previously, buffers were only deduped by file if they were opened through Workspace::open_entry --- Cargo.lock | 1 + crates/project/Cargo.toml | 2 + crates/project/src/worktree.rs | 434 +++++++++++++++++++----------- crates/server/src/rpc.rs | 2 +- crates/workspace/src/workspace.rs | 53 +--- 5 files changed, 294 insertions(+), 198 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5c9408f013cae611cde76165c9cc59a927c84fc3..0a2cf5ad042a0e63c76bd047d69ec63b44c58e24 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3456,6 +3456,7 @@ dependencies = [ "async-trait", "client", "clock", + "collections", "fsevent", "futures", "fuzzy", diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index fce637c8e7a2b6c3ea4b68def5072ff1dd5b66ca..6d5e2790ef781ed04f0c261ee23911954509de8f 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -13,6 +13,7 @@ test-support = ["language/test-support", "text/test-support"] text = { path = "../text" } client = { path = "../client" } clock = { path = "../clock" } +collections = { path = "../collections" } fsevent = { path = "../fsevent" } fuzzy = { path = "../fuzzy" } gpui = { path = "../gpui" } @@ -37,6 +38,7 @@ toml = "0.5" [dev-dependencies] client = { path = "../client", features = ["test-support"] } +collections = { path = "../collections", features = ["test-support"] } gpui = { path = "../gpui", features = ["test-support"] } language = { path = "../language", features = ["test-support"] } lsp = { path = "../lsp", features = ["test-support"] } diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index fa6f6a0f396dd7797e3e082113937b5e39b54882..4a646cb17ba4dcad8fea012ba1dc31ca78156a4a 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -7,6 +7,7 @@ use ::ignore::gitignore::{Gitignore, GitignoreBuilder}; use anyhow::{anyhow, Context, Result}; use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore}; use clock::ReplicaId; +use collections::{hash_map, HashMap}; use futures::{Stream, StreamExt}; use fuzzy::CharBag; use gpui::{ @@ -14,8 +15,8 @@ use gpui::{ Task, UpgradeModelHandle, WeakModelHandle, }; use language::{ - Buffer, Diagnostic, DiagnosticEntry, DiagnosticSeverity, Language, LanguageRegistry, Operation, - PointUtf16, Rope, + Buffer, Diagnostic, DiagnosticEntry, DiagnosticSeverity, File as _, Language, LanguageRegistry, + Operation, PointUtf16, Rope, }; use lazy_static::lazy_static; use lsp::LanguageServer; @@ -29,7 +30,6 @@ use smol::channel::{self, Sender}; use std::{ any::Any, cmp::{self, Ordering}, - collections::HashMap, convert::{TryFrom, TryInto}, ffi::{OsStr, OsString}, fmt, @@ -240,7 +240,7 @@ impl Worktree { user_store .update(cx, |user_store, cx| user_store.load_users(user_ids, cx)) .await?; - let mut collaborators = HashMap::with_capacity(join_response.collaborators.len()); + let mut collaborators = HashMap::default(); for message in join_response.collaborators { let collaborator = Collaborator::from_proto(message, &user_store, cx).await?; collaborators.insert(collaborator.peer_id, collaborator); @@ -306,8 +306,9 @@ impl Worktree { snapshot_rx, updates_tx, client: client.clone(), + loading_buffers: Default::default(), open_buffers: Default::default(), - diagnostics: Vec::new(), + diagnostic_summaries: HashMap::default(), collaborators, queued_operations: Default::default(), languages, @@ -476,15 +477,65 @@ impl Worktree { std::iter::empty() } + pub fn loading_buffers<'a>(&'a mut self) -> &'a mut LoadingBuffers { + match self { + Worktree::Local(worktree) => &mut worktree.loading_buffers, + Worktree::Remote(worktree) => &mut worktree.loading_buffers, + } + } + pub fn open_buffer( &mut self, path: impl AsRef, cx: &mut ModelContext, ) -> Task>> { - match self { - Worktree::Local(worktree) => worktree.open_buffer(path.as_ref(), cx), - Worktree::Remote(worktree) => worktree.open_buffer(path.as_ref(), cx), + let path = path.as_ref(); + + // If there is already a buffer for the given path, then return it. + let existing_buffer = match self { + Worktree::Local(worktree) => worktree.get_open_buffer(path, cx), + Worktree::Remote(worktree) => worktree.get_open_buffer(path, cx), + }; + if let Some(existing_buffer) = existing_buffer { + return cx.spawn(move |_, _| async move { Ok(existing_buffer) }); } + + let path: Arc = Arc::from(path); + let mut loading_watch = match self.loading_buffers().entry(path.clone()) { + // If the given path is already being loaded, then wait for that existing + // task to complete and return the same buffer. + hash_map::Entry::Occupied(e) => e.get().clone(), + + // Otherwise, record the fact that this path is now being loaded. + hash_map::Entry::Vacant(entry) => { + let (mut tx, rx) = postage::watch::channel(); + entry.insert(rx.clone()); + + let load_buffer = match self { + Worktree::Local(worktree) => worktree.open_buffer(&path, cx), + Worktree::Remote(worktree) => worktree.open_buffer(&path, cx), + }; + cx.spawn(move |this, mut cx| async move { + let result = load_buffer.await; + + // After the buffer loads, record the fact that it is no longer + // loading. + this.update(&mut cx, |this, _| this.loading_buffers().remove(&path)); + *tx.borrow_mut() = Some(result.map_err(|e| Arc::new(e))); + }) + .detach(); + rx + } + }; + + cx.spawn(|_, _| async move { + loop { + if let Some(result) = loading_watch.borrow().as_ref() { + return result.clone().map_err(|e| anyhow!("{}", e)); + } + loading_watch.recv().await; + } + }) } #[cfg(feature = "test-support")] @@ -769,8 +820,8 @@ impl Worktree { .context("path is not within worktree")?, ); - let mut group_ids_by_diagnostic_range = HashMap::new(); - let mut diagnostics_by_group_id = HashMap::new(); + let mut group_ids_by_diagnostic_range = HashMap::default(); + let mut diagnostics_by_group_id = HashMap::default(); let mut next_group_id = 0; for diagnostic in ¶ms.diagnostics { let source = diagnostic.source.as_ref(); @@ -878,15 +929,18 @@ impl Worktree { } } -impl Deref for Worktree { - type Target = Snapshot; - - fn deref(&self) -> &Self::Target { - match self { - Worktree::Local(worktree) => &worktree.snapshot, - Worktree::Remote(worktree) => &worktree.snapshot, - } - } +#[derive(Clone)] +pub struct Snapshot { + id: usize, + scan_id: usize, + abs_path: Arc, + root_name: String, + root_char_bag: CharBag, + ignores: HashMap, (Arc, usize)>, + entries_by_path: SumTree, + entries_by_id: SumTree, + removed_entry_ids: HashMap, + next_entry_id: Arc, } pub struct LocalWorktree { @@ -899,9 +953,11 @@ pub struct LocalWorktree { poll_task: Option>, remote_id: watch::Receiver>, share: Option, + loading_buffers: LoadingBuffers, open_buffers: HashMap>, shared_buffers: HashMap>>, diagnostics: HashMap, Vec>>, + diagnostic_summaries: HashMap, DiagnosticSummary>, collaborators: HashMap, queued_operations: Vec<(u64, Operation)>, languages: Arc, @@ -911,6 +967,33 @@ pub struct LocalWorktree { language_servers: HashMap>, } +struct ShareState { + snapshots_tx: Sender, + _subscriptions: Vec, +} + +pub struct RemoteWorktree { + remote_id: u64, + snapshot: Snapshot, + snapshot_rx: watch::Receiver, + client: Arc, + updates_tx: postage::mpsc::Sender, + replica_id: ReplicaId, + loading_buffers: LoadingBuffers, + open_buffers: HashMap, + collaborators: HashMap, + diagnostic_summaries: HashMap, DiagnosticSummary>, + languages: Arc, + user_store: ModelHandle, + queued_operations: Vec<(u64, Operation)>, + _subscriptions: Vec, +} + +type LoadingBuffers = HashMap< + Arc, + postage::watch::Receiver, Arc>>>, +>; + #[derive(Default, Deserialize)] struct WorktreeConfig { collaborators: Vec, @@ -1015,9 +1098,11 @@ impl LocalWorktree { _maintain_remote_id_task, share: None, poll_task: None, + loading_buffers: Default::default(), open_buffers: Default::default(), shared_buffers: Default::default(), diagnostics: Default::default(), + diagnostic_summaries: Default::default(), queued_operations: Default::default(), collaborators: Default::default(), languages, @@ -1105,20 +1190,18 @@ impl LocalWorktree { } } - pub fn open_buffer( + fn get_open_buffer( &mut self, path: &Path, cx: &mut ModelContext, - ) -> Task>> { + ) -> Option> { let handle = cx.handle(); - - // If there is already a buffer for the given path, then return it. - let mut existing_buffer = None; + let mut result = None; self.open_buffers.retain(|_buffer_id, buffer| { if let Some(buffer) = buffer.upgrade(cx.as_ref()) { if let Some(file) = buffer.read(cx.as_ref()).file() { if file.worktree_id() == handle.id() && file.path().as_ref() == path { - existing_buffer = Some(buffer); + result = Some(buffer); } } true @@ -1126,45 +1209,45 @@ impl LocalWorktree { false } }); + result + } + fn open_buffer( + &mut self, + path: &Path, + cx: &mut ModelContext, + ) -> Task>> { let path = Arc::from(path); - cx.spawn(|this, mut cx| async move { - if let Some(existing_buffer) = existing_buffer { - Ok(existing_buffer) - } else { - let (file, contents) = this - .update(&mut cx, |this, cx| this.as_local().unwrap().load(&path, cx)) - .await?; - let language = this.read_with(&cx, |this, _| { - use language::File; - this.languages().select_language(file.full_path()).cloned() - }); - let (diagnostics, language_server) = this.update(&mut cx, |this, cx| { - let this = this.as_local_mut().unwrap(); - ( - this.diagnostics.remove(path.as_ref()), - language - .as_ref() - .and_then(|language| this.ensure_language_server(language, cx)), - ) - }); - let buffer = cx.add_model(|cx| { - let mut buffer = Buffer::from_file(0, contents, Box::new(file), cx); - buffer.set_language(language, language_server, cx); - if let Some(diagnostics) = diagnostics { - buffer.update_diagnostics(None, diagnostics, cx).unwrap(); - } - buffer - }); - this.update(&mut cx, |this, _| { - let this = this - .as_local_mut() - .ok_or_else(|| anyhow!("must be a local worktree"))?; + cx.spawn(move |this, mut cx| async move { + let (file, contents) = this + .update(&mut cx, |t, cx| t.as_local().unwrap().load(&path, cx)) + .await?; - this.open_buffers.insert(buffer.id(), buffer.downgrade()); - Ok(buffer) - }) - } + let (diagnostics, language, language_server) = this.update(&mut cx, |this, cx| { + let this = this.as_local_mut().unwrap(); + let diagnostics = this.diagnostics.remove(&path); + let language = this.languages.select_language(file.full_path()).cloned(); + let server = language + .as_ref() + .and_then(|language| this.ensure_language_server(language, cx)); + (diagnostics, language, server) + }); + + let buffer = cx.add_model(|cx| { + let mut buffer = Buffer::from_file(0, contents, Box::new(file), cx); + buffer.set_language(language, language_server, cx); + if let Some(diagnostics) = diagnostics { + buffer.update_diagnostics(None, diagnostics, cx).unwrap(); + } + buffer + }); + + this.update(&mut cx, |this, _| { + let this = this.as_local_mut().unwrap(); + this.open_buffers.insert(buffer.id(), buffer.downgrade()); + }); + + Ok(buffer) }) } @@ -1173,13 +1256,12 @@ impl LocalWorktree { envelope: TypedEnvelope, cx: &mut ModelContext, ) -> Task> { - let peer_id = envelope.original_sender_id(); - let path = Path::new(&envelope.payload.path); - - let buffer = self.open_buffer(path, cx); - cx.spawn(|this, mut cx| async move { - let buffer = buffer.await?; + let peer_id = envelope.original_sender_id(); + let path = Path::new(&envelope.payload.path); + let buffer = this + .update(&mut cx, |this, cx| this.open_buffer(path, cx)) + .await?; this.update(&mut cx, |this, cx| { this.as_local_mut() .unwrap() @@ -1473,6 +1555,17 @@ fn build_gitignore(abs_path: &Path, fs: &dyn Fs) -> Result { Ok(builder.build()?) } +impl Deref for Worktree { + type Target = Snapshot; + + fn deref(&self) -> &Self::Target { + match self { + Worktree::Local(worktree) => &worktree.snapshot, + Worktree::Remote(worktree) => &worktree.snapshot, + } + } +} + impl Deref for LocalWorktree { type Target = Snapshot; @@ -1487,38 +1580,18 @@ impl fmt::Debug for LocalWorktree { } } -struct ShareState { - snapshots_tx: Sender, - _subscriptions: Vec, -} - -pub struct RemoteWorktree { - remote_id: u64, - snapshot: Snapshot, - snapshot_rx: watch::Receiver, - client: Arc, - updates_tx: postage::mpsc::Sender, - replica_id: ReplicaId, - open_buffers: HashMap, - collaborators: HashMap, - diagnostics: Vec, - languages: Arc, - user_store: ModelHandle, - queued_operations: Vec<(u64, Operation)>, - _subscriptions: Vec, -} - impl RemoteWorktree { - pub fn open_buffer( + fn get_open_buffer( &mut self, path: &Path, cx: &mut ModelContext, - ) -> Task>> { + ) -> Option> { + let handle = cx.handle(); let mut existing_buffer = None; self.open_buffers.retain(|_buffer_id, buffer| { if let Some(buffer) = buffer.upgrade(cx.as_ref()) { if let Some(file) = buffer.read(cx.as_ref()).file() { - if file.worktree_id() == cx.model_id() && file.path().as_ref() == path { + if file.worktree_id() == handle.id() && file.path().as_ref() == path { existing_buffer = Some(buffer); } } @@ -1527,62 +1600,65 @@ impl RemoteWorktree { false } }); + existing_buffer + } + fn open_buffer( + &mut self, + path: &Path, + cx: &mut ModelContext, + ) -> Task>> { let rpc = self.client.clone(); let replica_id = self.replica_id; let remote_worktree_id = self.remote_id; let root_path = self.snapshot.abs_path.clone(); - let path = path.to_string_lossy().to_string(); - cx.spawn_weak(|this, mut cx| async move { - if let Some(existing_buffer) = existing_buffer { - Ok(existing_buffer) - } else { - let entry = this - .upgrade(&cx) - .ok_or_else(|| anyhow!("worktree was closed"))? - .read_with(&cx, |tree, _| tree.entry_for_path(&path).cloned()) - .ok_or_else(|| anyhow!("file does not exist"))?; - let response = rpc - .request(proto::OpenBuffer { - worktree_id: remote_worktree_id as u64, - path, - }) - .await?; - - let this = this - .upgrade(&cx) - .ok_or_else(|| anyhow!("worktree was closed"))?; - let file = File { - entry_id: Some(entry.id), - worktree: this.clone(), - worktree_path: root_path, - path: entry.path, - mtime: entry.mtime, - is_local: false, - }; - let language = this.read_with(&cx, |this, _| { - use language::File; - this.languages().select_language(file.full_path()).cloned() - }); - let remote_buffer = response.buffer.ok_or_else(|| anyhow!("empty buffer"))?; - let buffer_id = remote_buffer.id as usize; - let buffer = cx.add_model(|cx| { - Buffer::from_proto(replica_id, remote_buffer, Some(Box::new(file)), cx) - .unwrap() - .with_language(language, None, cx) - }); - this.update(&mut cx, |this, cx| { - let this = this.as_remote_mut().unwrap(); - if let Some(RemoteBuffer::Operations(pending_ops)) = this - .open_buffers - .insert(buffer_id, RemoteBuffer::Loaded(buffer.downgrade())) - { - buffer.update(cx, |buf, cx| buf.apply_ops(pending_ops, cx))?; - } - Result::<_, anyhow::Error>::Ok(()) - })?; - Ok(buffer) - } + let path: Arc = Arc::from(path); + let path_string = path.to_string_lossy().to_string(); + cx.spawn_weak(move |this, mut cx| async move { + let entry = this + .upgrade(&cx) + .ok_or_else(|| anyhow!("worktree was closed"))? + .read_with(&cx, |tree, _| tree.entry_for_path(&path).cloned()) + .ok_or_else(|| anyhow!("file does not exist"))?; + let response = rpc + .request(proto::OpenBuffer { + worktree_id: remote_worktree_id as u64, + path: path_string, + }) + .await?; + + let this = this + .upgrade(&cx) + .ok_or_else(|| anyhow!("worktree was closed"))?; + let file = File { + entry_id: Some(entry.id), + worktree: this.clone(), + worktree_path: root_path, + path: entry.path, + mtime: entry.mtime, + is_local: false, + }; + let language = this.read_with(&cx, |this, _| { + use language::File; + this.languages().select_language(file.full_path()).cloned() + }); + let remote_buffer = response.buffer.ok_or_else(|| anyhow!("empty buffer"))?; + let buffer_id = remote_buffer.id as usize; + let buffer = cx.add_model(|cx| { + Buffer::from_proto(replica_id, remote_buffer, Some(Box::new(file)), cx) + .unwrap() + .with_language(language, None, cx) + }); + this.update(&mut cx, move |this, cx| { + let this = this.as_remote_mut().unwrap(); + if let Some(RemoteBuffer::Operations(pending_ops)) = this + .open_buffers + .insert(buffer_id, RemoteBuffer::Loaded(buffer.downgrade())) + { + buffer.update(cx, |buf, cx| buf.apply_ops(pending_ops, cx))?; + } + Result::<_, anyhow::Error>::Ok(buffer) + }) }) } @@ -1665,20 +1741,6 @@ impl RemoteBuffer { } } -#[derive(Clone)] -pub struct Snapshot { - id: usize, - scan_id: usize, - abs_path: Arc, - root_name: String, - root_char_bag: CharBag, - ignores: HashMap, (Arc, usize)>, - entries_by_path: SumTree, - entries_by_id: SumTree, - removed_entry_ids: HashMap, - next_entry_id: Arc, -} - impl Snapshot { pub fn id(&self) -> usize { self.id @@ -3519,6 +3581,64 @@ mod tests { server.receive::().await.unwrap(); } + #[gpui::test] + async fn test_buffer_deduping(mut cx: gpui::TestAppContext) { + let user_id = 100; + let mut client = Client::new(); + let server = FakeServer::for_client(user_id, &mut client, &cx).await; + let user_store = server.build_user_store(client.clone(), &mut cx).await; + + let fs = Arc::new(FakeFs::new()); + fs.insert_tree( + "/the-dir", + json!({ + "a.txt": "a-contents", + "b.txt": "b-contents", + }), + ) + .await; + + let worktree = Worktree::open_local( + client.clone(), + user_store, + "/the-dir".as_ref(), + fs, + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + // Spawn multiple tasks to open paths, repeating some paths. + let (buffer_a_1, buffer_b, buffer_a_2) = worktree.update(&mut cx, |worktree, cx| { + ( + worktree.open_buffer("a.txt", cx), + worktree.open_buffer("b.txt", cx), + worktree.open_buffer("a.txt", cx), + ) + }); + + let buffer_a_1 = buffer_a_1.await.unwrap(); + let buffer_a_2 = buffer_a_2.await.unwrap(); + let buffer_b = buffer_b.await.unwrap(); + assert_eq!(buffer_a_1.read_with(&cx, |b, _| b.text()), "a-contents"); + assert_eq!(buffer_b.read_with(&cx, |b, _| b.text()), "b-contents"); + + // There is only one buffer per path. + let buffer_a_id = buffer_a_1.id(); + assert_eq!(buffer_a_2.id(), buffer_a_id); + + // Open the same path again while it is still open. + drop(buffer_a_1); + let buffer_a_3 = worktree + .update(&mut cx, |worktree, cx| worktree.open_buffer("a.txt", cx)) + .await + .unwrap(); + + // There's still only one buffer per path. + assert_eq!(buffer_a_3.id(), buffer_a_id); + } + #[gpui::test] async fn test_buffer_is_dirty(mut cx: gpui::TestAppContext) { use std::fs; @@ -3985,7 +4105,7 @@ mod tests { worktree .update(&mut cx, |tree, cx| tree.update_diagnostics(message, cx)) .unwrap(); - let buffer = buffer.read_with(&cx, |buffer, cx| buffer.snapshot()); + let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot()); assert_eq!( buffer diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index e06db2a273e0cd0fbbf2680ea7d285a63dc11356..c46e631c8b37ed33b2d5ee46ccedea42ccc0db7e 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -1064,7 +1064,7 @@ mod tests { // TODO // // Remove the selection set as client B, see those selections disappear as client A. - // cx_b.update(move |_| drop(editor_b)); + cx_b.update(move |_| drop(editor_b)); // buffer_a // .condition(&cx_a, |buffer, _| buffer.selection_sets().count() == 0) // .await; diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 1f11f116936717acc9f5c17eb11c791b97b0b7c8..e49f81efa71d0915f421d0a9ea80fad0833e37a7 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -4,7 +4,7 @@ pub mod settings; pub mod sidebar; mod status_bar; -use anyhow::{anyhow, Result}; +use anyhow::Result; use client::{Authenticate, ChannelList, Client, User, UserStore}; use gpui::{ action, @@ -28,7 +28,6 @@ use sidebar::{Side, Sidebar, SidebarItemId, ToggleSidebarItem, ToggleSidebarItem use status_bar::StatusBar; pub use status_bar::StatusItemView; use std::{ - collections::{hash_map::Entry, HashMap}, future::Future, path::{Path, PathBuf}, sync::Arc, @@ -342,10 +341,6 @@ pub struct Workspace { project: ModelHandle, entry_openers: Arc<[Box]>, items: Vec>, - loading_items: HashMap< - ProjectPath, - postage::watch::Receiver, Arc>>>, - >, _observe_current_user: Task<()>, } @@ -408,7 +403,6 @@ impl Workspace { project, entry_openers: params.entry_openers.clone(), items: Default::default(), - loading_items: Default::default(), _observe_current_user, } } @@ -606,43 +600,22 @@ impl Workspace { } }; - if let Entry::Vacant(entry) = self.loading_items.entry(project_path.clone()) { - let (mut tx, rx) = postage::watch::channel(); - entry.insert(rx); - - let project_path = project_path.clone(); - let entry_openers = self.entry_openers.clone(); - cx.as_mut() - .spawn(|mut cx| async move { - let item = worktree.update(&mut cx, move |worktree, cx| { - for opener in entry_openers.iter() { - if let Some(task) = opener.open(worktree, project_path.clone(), cx) { - return task; - } - } - - cx.spawn(|_, _| async move { - Err(anyhow!("no opener for path {:?} found", project_path)) - }) - }); - *tx.borrow_mut() = Some(item.await.map_err(Arc::new)); - }) - .detach(); - } + let project_path = project_path.clone(); + let entry_openers = self.entry_openers.clone(); + let task = worktree.update(cx, |worktree, cx| { + for opener in entry_openers.iter() { + if let Some(task) = opener.open(worktree, project_path.clone(), cx) { + return Some(task); + } + } + log::error!("no opener for path {:?} found", project_path); + None + })?; let pane = pane.downgrade(); - let mut watch = self.loading_items.get(&project_path).unwrap().clone(); - Some(cx.spawn(|this, mut cx| async move { - let load_result = loop { - if let Some(load_result) = watch.borrow().as_ref() { - break load_result.clone(); - } - watch.recv().await; - }; - + let load_result = task.await; this.update(&mut cx, |this, cx| { - this.loading_items.remove(&project_path); if let Some(pane) = pane.upgrade(&cx) { match load_result { Ok(item) => { From fe571f1d70602af24ccb74fd65a292f3ca3918f8 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 13 Dec 2021 16:36:53 -0800 Subject: [PATCH 067/196] Store diagnostic summaries on worktrees --- crates/diagnostics/src/diagnostics.rs | 2 +- crates/project/src/project.rs | 40 ++++++++++++++++++++++++--- crates/project/src/worktree.rs | 19 +++++++++---- 3 files changed, 50 insertions(+), 11 deletions(-) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index d04c69611bd8c2be9d54ccd0c66460d58ddfd77b..1162dccc7861f9abed86db83d41a7ea17924a266 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -15,7 +15,7 @@ impl ProjectDiagnostics { cx: &mut ViewContext, ) -> Self { let mut buffer = cx.add_model(|cx| MultiBuffer::new(project.read(cx).replica_id(cx))); - for diagnostic_summary in project.read(cx).diagnostic_summaries(cx) { + for (project_path, diagnostic_summary) in project.read(cx).diagnostic_summaries(cx) { // } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index ca15f673773e895f00c0b53aa90cd7e2e1cb0af5..78c59404a6213b64dee3fe0e0f2eac0e3eb31254 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -8,7 +8,8 @@ use clock::ReplicaId; use futures::Future; use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet}; use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task}; -use language::LanguageRegistry; +use language::{DiagnosticEntry, LanguageRegistry}; +use lsp::DiagnosticSeverity; use std::{ path::Path, sync::{atomic::AtomicBool, Arc}, @@ -39,14 +40,39 @@ pub struct ProjectPath { pub path: Arc, } +#[derive(Clone)] pub struct DiagnosticSummary { - pub project_path: ProjectPath, pub error_count: usize, pub warning_count: usize, pub info_count: usize, pub hint_count: usize, } +impl DiagnosticSummary { + fn new(diagnostics: &[DiagnosticEntry]) -> Self { + let mut this = Self { + error_count: 0, + warning_count: 0, + info_count: 0, + hint_count: 0, + }; + + for entry in diagnostics { + if entry.diagnostic.is_primary { + match entry.diagnostic.severity { + DiagnosticSeverity::ERROR => this.error_count += 1, + DiagnosticSeverity::WARNING => this.warning_count += 1, + DiagnosticSeverity::INFORMATION => this.info_count += 1, + DiagnosticSeverity::HINT => this.hint_count += 1, + _ => {} + } + } + } + + this + } +} + #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct ProjectEntry { pub worktree_id: usize, @@ -176,8 +202,14 @@ impl Project { pub fn diagnostic_summaries<'a>( &'a self, cx: &'a AppContext, - ) -> impl Iterator { - std::iter::empty() + ) -> impl Iterator + 'a { + self.worktrees.iter().flat_map(move |worktree| { + let worktree_id = worktree.id(); + worktree + .read(cx) + .diagnostic_summaries() + .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary)) + }) } pub fn active_entry(&self) -> Option { diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 4a646cb17ba4dcad8fea012ba1dc31ca78156a4a..89eeb4d91749ccca4528490b306ae082ab50d6e6 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -472,9 +472,13 @@ impl Worktree { pub fn diagnostic_summaries<'a>( &'a self, - cx: &'a AppContext, - ) -> impl Iterator { - std::iter::empty() + ) -> impl Iterator, DiagnosticSummary)> + 'a { + match self { + Worktree::Local(worktree) => &worktree.diagnostic_summaries, + Worktree::Remote(worktree) => &worktree.diagnostic_summaries, + } + .iter() + .map(|(path, summary)| (path.clone(), summary.clone())) } pub fn loading_buffers<'a>(&'a mut self) -> &'a mut LoadingBuffers { @@ -879,16 +883,19 @@ impl Worktree { let (remote_id, operation) = buffer.update(cx, |buffer, cx| { ( buffer.remote_id(), - buffer.update_diagnostics(params.version, diagnostics, cx), + buffer.update_diagnostics(params.version, diagnostics.clone(), cx), ) }); self.send_buffer_update(remote_id, operation?, cx); - return Ok(()); + break; } } } - this.diagnostics.insert(worktree_path, diagnostics); + let this = self.as_local_mut().unwrap(); + this.diagnostic_summaries + .insert(worktree_path.clone(), DiagnosticSummary::new(&diagnostics)); + this.diagnostics.insert(worktree_path.clone(), diagnostics); Ok(()) } From 0b1c27956bbca515bd4848f0308015e5454f4e6b Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 13 Dec 2021 17:44:15 -0800 Subject: [PATCH 068/196] Add Project::open_buffer method --- crates/project/src/project.rs | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 78c59404a6213b64dee3fe0e0f2eac0e3eb31254..cd3f09c98be1fafafb9819d672dc7ffc928e8895 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -2,13 +2,13 @@ pub mod fs; mod ignore; mod worktree; -use anyhow::Result; +use anyhow::{anyhow, Result}; use client::{Client, UserStore}; use clock::ReplicaId; use futures::Future; use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet}; use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task}; -use language::{DiagnosticEntry, LanguageRegistry}; +use language::{Buffer, DiagnosticEntry, LanguageRegistry}; use lsp::DiagnosticSeverity; use std::{ path::Path, @@ -113,6 +113,18 @@ impl Project { .cloned() } + pub fn open_buffer( + &self, + path: ProjectPath, + cx: &mut ModelContext, + ) -> Task>> { + if let Some(worktree) = self.worktree_for_id(path.worktree_id) { + worktree.update(cx, |worktree, cx| worktree.open_buffer(path.path, cx)) + } else { + cx.spawn(|_, _| async move { Err(anyhow!("no such worktree")) }) + } + } + pub fn add_local_worktree( &mut self, abs_path: &Path, From 4efdc53d9f5971b7ce0797a00cb5083a5f814c6a Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 13 Dec 2021 17:44:20 -0800 Subject: [PATCH 069/196] WIP --- Cargo.lock | 3 ++ crates/diagnostics/Cargo.toml | 3 ++ crates/diagnostics/src/diagnostics.rs | 54 ++++++++++++++++++++++++--- crates/editor/src/editor.rs | 2 +- crates/editor/src/multi_buffer.rs | 6 +-- crates/language/src/buffer.rs | 9 +++++ 6 files changed, 68 insertions(+), 9 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0a2cf5ad042a0e63c76bd047d69ec63b44c58e24..05e25f42b895dde584bc6b61837e3354f48d18f1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1414,8 +1414,11 @@ dependencies = [ name = "diagnostics" version = "0.1.0" dependencies = [ + "anyhow", + "collections", "editor", "gpui", + "language", "postage", "project", "workspace", diff --git a/crates/diagnostics/Cargo.toml b/crates/diagnostics/Cargo.toml index 6f9979a22e9ac92965a8fb2242d53a087742f456..eebe4b209159e0dd19b237659f3bf229758d7998 100644 --- a/crates/diagnostics/Cargo.toml +++ b/crates/diagnostics/Cargo.toml @@ -7,7 +7,10 @@ edition = "2021" path = "src/diagnostics.rs" [dependencies] +anyhow = "1.0" +collections = { path = "../collections" } editor = { path = "../editor" } +language = { path = "../language" } gpui = { path = "../gpui" } project = { path = "../project" } workspace = { path = "../workspace" } diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 1162dccc7861f9abed86db83d41a7ea17924a266..e076f34dd690f30318ea0b9640168cc69c0ed422 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -1,5 +1,7 @@ -use editor::{Editor, MultiBuffer}; +use collections::HashMap; +use editor::{Editor, ExcerptProperties, MultiBuffer}; use gpui::{elements::*, Entity, ModelHandle, RenderContext, View, ViewContext, ViewHandle}; +use language::Point; use postage::watch; use project::Project; @@ -14,10 +16,52 @@ impl ProjectDiagnostics { settings: watch::Receiver, cx: &mut ViewContext, ) -> Self { - let mut buffer = cx.add_model(|cx| MultiBuffer::new(project.read(cx).replica_id(cx))); - for (project_path, diagnostic_summary) in project.read(cx).diagnostic_summaries(cx) { - // - } + let buffer = cx.add_model(|cx| MultiBuffer::new(project.read(cx).replica_id(cx))); + + let project_paths = project + .read(cx) + .diagnostic_summaries(cx) + .map(|e| e.0) + .collect::>(); + + cx.spawn(|this, mut cx| { + let project = project.clone(); + async move { + let mut excerpts = Vec::new(); + for project_path in project_paths { + let buffer = project + .update(&mut cx, |project, cx| project.open_buffer(project_path, cx)) + .await?; + let snapshot = buffer.read_with(&cx, |b, _| b.snapshot()); + + let mut grouped_diagnostics = HashMap::default(); + for entry in snapshot.all_diagnostics() { + let mut group = grouped_diagnostics + .entry(entry.diagnostic.group_id) + .or_insert((Point::zero(), Vec::new())); + if entry.diagnostic.is_primary { + group.0 = entry.range.start; + } + group.1.push(entry); + } + let mut sorted_diagnostic_groups = + grouped_diagnostics.into_values().collect::>(); + sorted_diagnostic_groups.sort_by_key(|group| group.0); + + let mut prev_end_row = None; + let mut pending_excerpt = None; + for diagnostic in snapshot.all_diagnostics::() { + excerpts.push(ExcerptProperties { + buffer: &buffer, + range: todo!(), + header_height: todo!(), + }); + } + } + Result::Ok::<_, anyhow::Error>(()) + } + }) + .detach(); Self { editor: cx.add_view(|cx| { diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 0eecd17ce65eae20c873ef9dbef42f8c03b300e0..4bb119550406c0384668edb5e47cd904805adb22 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -26,10 +26,10 @@ use language::{ BracketPair, Buffer, Diagnostic, DiagnosticSeverity, Language, Point, Selection, SelectionGoal, TransactionId, }; -pub use multi_buffer::MultiBuffer; use multi_buffer::{ Anchor, AnchorRangeExt, MultiBufferChunks, MultiBufferSnapshot, ToOffset, ToPoint, }; +pub use multi_buffer::{ExcerptProperties, MultiBuffer}; use postage::watch; use serde::{Deserialize, Serialize}; use smallvec::SmallVec; diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index d79c0749061291fd4a19f052948d1d865ee6b928..c7e8eeacf5e7e56bd7a7b6fadcf72410bf3d90a2 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -65,9 +65,9 @@ pub struct MultiBufferSnapshot { } pub struct ExcerptProperties<'a, T> { - buffer: &'a ModelHandle, - range: Range, - header_height: u8, + pub buffer: &'a ModelHandle, + pub range: Range, + pub header_height: u8, } #[derive(Clone)] diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 4b518467aa11e78bced91dcda7fb839817db9931..92539c49ba0c89d1259ec4953a211cb9c0a4e942 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -1674,6 +1674,15 @@ impl BufferSnapshot { }) } + pub fn all_diagnostics<'a, O>(&'a self) -> impl 'a + Iterator> + where + O: 'a + FromAnchor, + { + self.diagnostics + .iter() + .map(|diagnostic| diagnostic.resolve(self)) + } + pub fn diagnostics_in_range<'a, T, O>( &'a self, search_range: Range, From 9e15c57f912ea27672aec1e6ffae334fd4f2af24 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 14 Dec 2021 11:32:05 +0100 Subject: [PATCH 070/196] Display a rudimentary project diagnostic view on `alt-shift-d` --- Cargo.lock | 1 + crates/diagnostics/src/diagnostics.rs | 125 ++++++++++++++++++++------ crates/zed/Cargo.toml | 1 + crates/zed/src/main.rs | 1 + 4 files changed, 101 insertions(+), 27 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 05e25f42b895dde584bc6b61837e3354f48d18f1..987bd265728de9fdf8aa3207f7114cc8b4275200 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5716,6 +5716,7 @@ dependencies = [ "contacts_panel", "crossbeam-channel", "ctor", + "diagnostics", "dirs", "easy-parallel", "editor", diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index e076f34dd690f30318ea0b9640168cc69c0ed422..a562a33c35e135c0c40201b39363184f7ee17f19 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -1,23 +1,32 @@ use collections::HashMap; use editor::{Editor, ExcerptProperties, MultiBuffer}; -use gpui::{elements::*, Entity, ModelHandle, RenderContext, View, ViewContext, ViewHandle}; +use gpui::{ + action, elements::*, keymap::Binding, AppContext, Entity, ModelContext, ModelHandle, + MutableAppContext, RenderContext, View, ViewContext, ViewHandle, +}; use language::Point; use postage::watch; use project::Project; +use workspace::Workspace; + +action!(Toggle); + +pub fn init(cx: &mut MutableAppContext) { + cx.add_bindings([Binding::new("alt-shift-D", Toggle, None)]); + cx.add_action(ProjectDiagnosticsEditor::toggle); +} struct ProjectDiagnostics { - editor: ViewHandle, + excerpts: ModelHandle, project: ModelHandle, } -impl ProjectDiagnostics { - fn new( - project: ModelHandle, - settings: watch::Receiver, - cx: &mut ViewContext, - ) -> Self { - let buffer = cx.add_model(|cx| MultiBuffer::new(project.read(cx).replica_id(cx))); +struct ProjectDiagnosticsEditor { + editor: ViewHandle, +} +impl ProjectDiagnostics { + fn new(project: ModelHandle, cx: &mut ModelContext) -> Self { let project_paths = project .read(cx) .diagnostic_summaries(cx) @@ -27,7 +36,6 @@ impl ProjectDiagnostics { cx.spawn(|this, mut cx| { let project = project.clone(); async move { - let mut excerpts = Vec::new(); for project_path in project_paths { let buffer = project .update(&mut cx, |project, cx| project.open_buffer(project_path, cx)) @@ -48,14 +56,20 @@ impl ProjectDiagnostics { grouped_diagnostics.into_values().collect::>(); sorted_diagnostic_groups.sort_by_key(|group| group.0); - let mut prev_end_row = None; - let mut pending_excerpt = None; for diagnostic in snapshot.all_diagnostics::() { - excerpts.push(ExcerptProperties { - buffer: &buffer, - range: todo!(), - header_height: todo!(), - }); + this.update(&mut cx, |this, cx| { + this.excerpts.update(cx, |excerpts, cx| { + excerpts.push_excerpt( + ExcerptProperties { + buffer: &buffer, + range: diagnostic.range, + header_height: 1, + }, + cx, + ); + cx.notify(); + }); + }) } } Result::Ok::<_, anyhow::Error>(()) @@ -64,13 +78,7 @@ impl ProjectDiagnostics { .detach(); Self { - editor: cx.add_view(|cx| { - Editor::for_buffer( - buffer.clone(), - editor::settings_builder(buffer.downgrade(), settings), - cx, - ) - }), + excerpts: cx.add_model(|cx| MultiBuffer::new(project.read(cx).replica_id(cx))), project, } } @@ -80,12 +88,75 @@ impl Entity for ProjectDiagnostics { type Event = (); } -impl View for ProjectDiagnostics { +impl Entity for ProjectDiagnosticsEditor { + type Event = (); +} + +impl View for ProjectDiagnosticsEditor { fn ui_name() -> &'static str { - "ProjectDiagnostics" + "ProjectDiagnosticsEditor" } - fn render(&mut self, _: &mut RenderContext) -> ElementBox { + fn render(&mut self, cx: &mut RenderContext) -> ElementBox { ChildView::new(self.editor.id()).boxed() } } + +impl ProjectDiagnosticsEditor { + fn toggle(workspace: &mut Workspace, _: &Toggle, cx: &mut ViewContext) { + dbg!("HEY!!!!"); + let diagnostics = + cx.add_model(|cx| ProjectDiagnostics::new(workspace.project().clone(), cx)); + workspace.add_item(diagnostics, cx); + } +} + +impl workspace::Item for ProjectDiagnostics { + type View = ProjectDiagnosticsEditor; + + fn build_view( + handle: ModelHandle, + settings: watch::Receiver, + cx: &mut ViewContext, + ) -> Self::View { + let excerpts = handle.read(cx).excerpts.clone(); + let editor = cx.add_view(|cx| { + Editor::for_buffer( + excerpts.clone(), + editor::settings_builder(excerpts.downgrade(), settings), + cx, + ) + }); + ProjectDiagnosticsEditor { editor } + } + + fn project_path(&self) -> Option { + None + } +} + +impl workspace::ItemView for ProjectDiagnosticsEditor { + fn title(&self, _: &AppContext) -> String { + "Project Diagnostics".to_string() + } + + fn project_path(&self, cx: &AppContext) -> Option { + None + } + + fn save( + &mut self, + cx: &mut ViewContext, + ) -> anyhow::Result>> { + todo!() + } + + fn save_as( + &mut self, + worktree: ModelHandle, + path: &std::path::Path, + cx: &mut ViewContext, + ) -> gpui::Task> { + todo!() + } +} diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index e03ef6dcf9ccd02989c294f94cc495f2f97986be..275f8b0deb841ac001e77b94b061b8dc145d4e41 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -32,6 +32,7 @@ text = { path = "../text" } chat_panel = { path = "../chat_panel" } client = { path = "../client" } clock = { path = "../clock" } +diagnostics = { path = "../diagnostics" } fsevent = { path = "../fsevent" } fuzzy = { path = "../fuzzy" } editor = { path = "../editor" } diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index c6374c66e71f25dd5fc8015504086eb0dff916d0..3847244ba63239bc985750c6c64536d26afe73c8 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -59,6 +59,7 @@ fn main() { contacts_panel::init(cx); chat_panel::init(cx); project_panel::init(cx); + diagnostics::init(cx); let app_state = Arc::new(AppState { languages: languages.clone(), From 04ffca95c66909d9c4dc6019b53e2de9b43c0480 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 14 Dec 2021 11:32:49 +0100 Subject: [PATCH 071/196] Keep a separate diagnostic update count and parse count in MultiBuffer --- crates/editor/src/multi_buffer.rs | 33 +++++++++++++++++++++---------- 1 file changed, 23 insertions(+), 10 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index c7e8eeacf5e7e56bd7a7b6fadcf72410bf3d90a2..264bd3f562d09e5618896b703369515c89606cec 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -62,6 +62,8 @@ struct BufferState { #[derive(Clone, Default)] pub struct MultiBufferSnapshot { excerpts: SumTree, + parse_count: usize, + diagnostics_update_count: usize, } pub struct ExcerptProperties<'a, T> { @@ -473,20 +475,31 @@ impl MultiBuffer { fn sync(&self, cx: &AppContext) { let mut snapshot = self.snapshot.borrow_mut(); let mut excerpts_to_edit = Vec::new(); + let mut reparsed = false; + let mut diagnostics_updated = false; for buffer_state in self.buffers.values() { let buffer = buffer_state.buffer.read(cx); - let buffer_changed = buffer.version().gt(&buffer_state.last_version); - if buffer_changed - || buffer.parse_count() > buffer_state.last_parse_count - || buffer.diagnostics_update_count() > buffer_state.last_diagnostics_update_count - { + let buffer_edited = buffer.version().gt(&buffer_state.last_version); + let buffer_reparsed = buffer.parse_count() > buffer_state.last_parse_count; + let buffer_diagnostics_updated = + buffer.diagnostics_update_count() > buffer_state.last_diagnostics_update_count; + if buffer_edited || buffer_reparsed || buffer_diagnostics_updated { excerpts_to_edit.extend( buffer_state .excerpts .iter() - .map(|excerpt_id| (excerpt_id, buffer_state, buffer_changed)), + .map(|excerpt_id| (excerpt_id, buffer_state, buffer_edited)), ); } + + reparsed |= buffer_reparsed; + diagnostics_updated |= buffer_diagnostics_updated; + } + if reparsed { + snapshot.parse_count += 1; + } + if diagnostics_updated { + snapshot.diagnostics_update_count += 1; } excerpts_to_edit.sort_unstable_by_key(|(excerpt_id, _, _)| *excerpt_id); @@ -494,13 +507,13 @@ impl MultiBuffer { let mut new_excerpts = SumTree::new(); let mut cursor = snapshot.excerpts.cursor::<(Option<&ExcerptId>, usize)>(); - for (id, buffer_state, buffer_changed) in excerpts_to_edit { + for (id, buffer_state, buffer_edited) in excerpts_to_edit { new_excerpts.push_tree(cursor.slice(&Some(id), Bias::Left, &()), &()); let old_excerpt = cursor.item().unwrap(); let buffer = buffer_state.buffer.read(cx); let mut new_excerpt; - if buffer_changed { + if buffer_edited { edits.extend( buffer .edits_since_in_range::( @@ -1028,7 +1041,7 @@ impl MultiBufferSnapshot { } pub fn parse_count(&self) -> usize { - self.as_singleton().unwrap().parse_count() + self.parse_count } pub fn enclosing_bracket_ranges( @@ -1040,7 +1053,7 @@ impl MultiBufferSnapshot { } pub fn diagnostics_update_count(&self) -> usize { - self.as_singleton().unwrap().diagnostics_update_count() + self.diagnostics_update_count } pub fn language(&self) -> Option<&Arc> { From 174b37cdf0213f5894b245c2fe496873ecf51295 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 14 Dec 2021 11:33:53 +0100 Subject: [PATCH 072/196] Assume all excerpts in the multi buffer have the same language for now --- crates/editor/src/multi_buffer.rs | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 264bd3f562d09e5618896b703369515c89606cec..6047c1caf556b92e85351472966fceb82c6e63c9 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -452,12 +452,14 @@ impl MultiBuffer { } pub fn language<'a>(&self, cx: &'a AppContext) -> Option<&'a Arc> { - self.as_singleton().unwrap().read(cx).language() + self.buffers + .values() + .next() + .and_then(|state| state.buffer.read(cx).language()) } pub fn file<'a>(&self, cx: &'a AppContext) -> Option<&'a dyn File> { - self.as_singleton() - .and_then(|buffer| buffer.read(cx).file()) + self.as_singleton().unwrap().read(cx).file() } pub fn is_dirty(&self, cx: &AppContext) -> bool { @@ -1057,7 +1059,10 @@ impl MultiBufferSnapshot { } pub fn language(&self) -> Option<&Arc> { - self.as_singleton().unwrap().language() + self.excerpts + .iter() + .next() + .and_then(|excerpt| excerpt.buffer.language()) } pub fn diagnostic_group<'a, O>( From 163ce95171da166763f538e0da8877f7e8b7d2f3 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 14 Dec 2021 11:34:26 +0100 Subject: [PATCH 073/196] Implement `MultiBufferSnapshot::remote_selections_in_range` --- crates/editor/src/editor.rs | 14 +++--- crates/editor/src/multi_buffer.rs | 72 ++++++++++++++++++++----------- 2 files changed, 53 insertions(+), 33 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 4bb119550406c0384668edb5e47cd904805adb22..c01a365d7f3dacd11033181649131b67798aded9 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -3081,16 +3081,14 @@ impl Editor { .collect(), ); - for (replica_id, selections) in display_map + for (replica_id, selection) in display_map .buffer_snapshot - .remote_selections_in_range(start..end) + .remote_selections_in_range(&(start..end)) { - result.insert( - replica_id, - selections - .map(|s| display_selection(&s, &display_map)) - .collect(), - ); + result + .entry(replica_id) + .or_insert(Vec::new()) + .push(display_selection(&selection, &display_map)); } result diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 6047c1caf556b92e85351472966fceb82c6e63c9..a2d8558f905e46a2a95ea44f3ee3aa2de1994a6c 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -1110,31 +1110,53 @@ impl MultiBufferSnapshot { pub fn remote_selections_in_range<'a>( &'a self, - range: Range, - ) -> impl 'a + Iterator>)> { - // TODO - let excerpt_id = self.excerpts.first().unwrap().id.clone(); - self.as_singleton() - .unwrap() - .remote_selections_in_range(range.start.text_anchor..range.end.text_anchor) - .map(move |(replica_id, selections)| { - let excerpt_id = excerpt_id.clone(); - ( - replica_id, - selections.map(move |s| Selection { - id: s.id, - start: Anchor { - excerpt_id: excerpt_id.clone(), - text_anchor: s.start.clone(), - }, - end: Anchor { - excerpt_id: excerpt_id.clone(), - text_anchor: s.end.clone(), - }, - reversed: s.reversed, - goal: s.goal, - }), - ) + range: &'a Range, + ) -> impl 'a + Iterator)> { + let mut cursor = self.excerpts.cursor::>(); + cursor.seek(&Some(&range.start.excerpt_id), Bias::Left, &()); + cursor + .take_while(move |excerpt| excerpt.id <= range.end.excerpt_id) + .flat_map(move |excerpt| { + let mut query_range = excerpt.range.start.clone()..excerpt.range.end.clone(); + if excerpt.id == range.start.excerpt_id { + query_range.start = range.start.text_anchor.clone(); + } + if excerpt.id == range.end.excerpt_id { + query_range.end = range.end.text_anchor.clone(); + } + + excerpt + .buffer + .remote_selections_in_range(query_range) + .flat_map(move |(replica_id, selections)| { + selections.map(move |selection| { + let mut start = Anchor { + excerpt_id: excerpt.id.clone(), + text_anchor: selection.start.clone(), + }; + let mut end = Anchor { + excerpt_id: excerpt.id.clone(), + text_anchor: selection.end.clone(), + }; + if range.start.cmp(&start, self).unwrap().is_gt() { + start = range.start.clone(); + } + if range.end.cmp(&end, self).unwrap().is_lt() { + end = range.end.clone(); + } + + ( + replica_id, + Selection { + id: selection.id, + start, + end, + reversed: selection.reversed, + goal: selection.goal, + }, + ) + }) + }) }) } } From 920daa8a8f969c719ece2f759b9ba54a20862437 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 14 Dec 2021 12:01:19 +0100 Subject: [PATCH 074/196] Remove stray log statement --- crates/diagnostics/src/diagnostics.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index a562a33c35e135c0c40201b39363184f7ee17f19..71b138c4e9485e72bdd70af77386cb8fd889d465 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -104,7 +104,6 @@ impl View for ProjectDiagnosticsEditor { impl ProjectDiagnosticsEditor { fn toggle(workspace: &mut Workspace, _: &Toggle, cx: &mut ViewContext) { - dbg!("HEY!!!!"); let diagnostics = cx.add_model(|cx| ProjectDiagnostics::new(workspace.project().clone(), cx)); workspace.add_item(diagnostics, cx); From 1b67f19edca3903d497f4565192b383d7a09adef Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 14 Dec 2021 12:13:19 +0100 Subject: [PATCH 075/196] Implement `MultiBuffer::set_active_selections` --- crates/editor/src/multi_buffer.rs | 80 +++++++++++++++++++++++++------ 1 file changed, 66 insertions(+), 14 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index a2d8558f905e46a2a95ea44f3ee3aa2de1994a6c..57b6bcc0969e298230967305a4ffb701a24c38b2 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -12,7 +12,7 @@ use language::{ use std::{ cell::{Ref, RefCell}, cmp, io, - iter::{FromIterator, Peekable}, + iter::{self, FromIterator, Peekable}, ops::{Range, Sub}, sync::Arc, time::{Duration, Instant, SystemTime}, @@ -346,19 +346,71 @@ impl MultiBuffer { selections: &[Selection], cx: &mut ModelContext, ) { - // TODO - let this = self.read(cx); - self.as_singleton().unwrap().update(cx, |buffer, cx| { - let buffer_snapshot = buffer.snapshot(); - let selections = selections.iter().map(|selection| Selection { - id: selection.id, - start: buffer_snapshot.anchor_before(selection.start.to_offset(&this)), - end: buffer_snapshot.anchor_before(selection.end.to_offset(&this)), - reversed: selection.reversed, - goal: selection.goal, + let mut selections_by_buffer: HashMap>> = + Default::default(); + let snapshot = self.read(cx); + let mut cursor = snapshot.excerpts.cursor::>(); + for selection in selections { + cursor.seek(&Some(&selection.start.excerpt_id), Bias::Left, &()); + while let Some(excerpt) = cursor.item() { + if excerpt.id > selection.end.excerpt_id { + break; + } + + let mut start = excerpt.range.start.clone(); + let mut end = excerpt.range.end.clone(); + if excerpt.id == selection.start.excerpt_id { + start = selection.start.text_anchor.clone(); + } + if excerpt.id == selection.end.excerpt_id { + end = selection.end.text_anchor.clone(); + } + selections_by_buffer + .entry(excerpt.buffer_id) + .or_default() + .push(Selection { + id: selection.id, + start, + end, + reversed: selection.reversed, + goal: selection.goal, + }); + + cursor.next(&()); + } + } + + for (buffer_id, mut selections) in selections_by_buffer { + self.buffers[&buffer_id].buffer.update(cx, |buffer, cx| { + selections.sort_unstable_by(|a, b| a.start.cmp(&b.start, buffer).unwrap()); + let mut selections = selections.into_iter().peekable(); + let merged_selections = Arc::from_iter(iter::from_fn(|| { + let mut selection = selections.next()?; + while let Some(next_selection) = selections.peek() { + if selection + .end + .cmp(&next_selection.start, buffer) + .unwrap() + .is_ge() + { + let next_selection = selections.next().unwrap(); + if next_selection + .end + .cmp(&selection.end, buffer) + .unwrap() + .is_ge() + { + selection.end = next_selection.end; + } + } else { + break; + } + } + Some(selection) + })); + buffer.set_active_selections(merged_selections, cx); }); - buffer.set_active_selections(Arc::from_iter(selections), cx); - }); + } } pub fn remove_active_selections(&mut self, cx: &mut ModelContext) { @@ -976,7 +1028,7 @@ impl MultiBufferSnapshot { let mut summaries = Vec::new(); while let Some(anchor) = anchors.peek() { let excerpt_id = &anchor.excerpt_id; - let excerpt_anchors = std::iter::from_fn(|| { + let excerpt_anchors = iter::from_fn(|| { let anchor = anchors.peek()?; if anchor.excerpt_id == *excerpt_id { Some(&anchors.next().unwrap().text_anchor) From 2d1ff8f606daa6ca8cf1b047f571df4ea5ec2119 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 14 Dec 2021 12:28:27 +0100 Subject: [PATCH 076/196] Clip anchors created on MultiBuffer's trailing newlines or headers --- crates/editor/src/multi_buffer.rs | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 57b6bcc0969e298230967305a4ffb701a24c38b2..c0fc0b349669810f0ec09e7fce0c028e305a3e0f 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -1080,9 +1080,14 @@ impl MultiBufferSnapshot { let mut cursor = self.excerpts.cursor::<(usize, Option<&ExcerptId>)>(); cursor.seek(&offset, bias, &()); if let Some(excerpt) = cursor.item() { - let overshoot = - (offset - cursor.start().0).saturating_sub(excerpt.header_height as usize); + let start_after_header = cursor.start().0 + excerpt.header_height as usize; + let mut end_before_newline = cursor.end(&()).0; + if excerpt.has_trailing_newline { + end_before_newline -= 1; + } + let buffer_start = excerpt.range.start.to_offset(&excerpt.buffer); + let overshoot = cmp::min(offset, end_before_newline).saturating_sub(start_after_header); Anchor { excerpt_id: excerpt.id.clone(), text_anchor: excerpt.buffer.anchor_at(buffer_start + overshoot, bias), From 119d44caf7e47e5a45773d30778f9fbf7d986e5f Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 14 Dec 2021 14:17:33 +0100 Subject: [PATCH 077/196] Remove test-only `transaction_group_interval` method from `MultiBuffer` --- crates/editor/src/editor.rs | 6 ++++-- crates/editor/src/multi_buffer.rs | 9 +-------- 2 files changed, 5 insertions(+), 10 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index c01a365d7f3dacd11033181649131b67798aded9..c82a3c9c984950de3cb73921efb1eecc75526817 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -3841,7 +3841,9 @@ mod tests { #[gpui::test] fn test_undo_redo_with_selection_restoration(cx: &mut MutableAppContext) { let mut now = Instant::now(); - let buffer = MultiBuffer::build_simple("123456", cx); + let buffer = cx.add_model(|cx| language::Buffer::new(0, "123456", cx)); + let group_interval = buffer.read(cx).transaction_group_interval(); + let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); let settings = EditorSettings::test(cx); let (_, editor) = cx.add_window(Default::default(), |cx| { build_editor(buffer.clone(), settings, cx) @@ -3862,7 +3864,7 @@ mod tests { assert_eq!(editor.text(cx), "12cde6"); assert_eq!(editor.selected_ranges(cx), vec![5..5]); - now += buffer.read(cx).transaction_group_interval(cx) + Duration::from_millis(1); + now += group_interval + Duration::from_millis(1); editor.select_ranges([2..2], None, cx); // Simulate an edit in another editor diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index c0fc0b349669810f0ec09e7fce0c028e305a3e0f..655002f8b9909ed8ab31b071653db372f2798b65 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -15,7 +15,7 @@ use std::{ iter::{self, FromIterator, Peekable}, ops::{Range, Sub}, sync::Arc, - time::{Duration, Instant, SystemTime}, + time::{Instant, SystemTime}, }; use sum_tree::{Bias, Cursor, SumTree}; use text::{ @@ -136,13 +136,6 @@ impl MultiBuffer { self.replica_id } - pub fn transaction_group_interval(&self, cx: &AppContext) -> Duration { - self.as_singleton() - .unwrap() - .read(cx) - .transaction_group_interval() - } - pub fn snapshot(&self, cx: &AppContext) -> MultiBufferSnapshot { self.sync(cx); self.snapshot.borrow().clone() From 523cbe781b94803c9d9a869e82c19cc25d6f8561 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 14 Dec 2021 17:39:40 +0100 Subject: [PATCH 078/196] Return the transaction id after grouping in `end_transaction_at` Co-Authored-By: Nathan Sobo --- crates/text/src/text.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 30fd985319243c094a6e2a29995376b9fc16e8c4..2b2282474a81291a17ab9828c36df33270290b44 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -189,7 +189,7 @@ impl History { } } - fn group(&mut self) { + fn group(&mut self) -> Option { let mut new_len = self.undo_stack.len(); let mut transactions = self.undo_stack.iter_mut(); @@ -221,6 +221,7 @@ impl History { } self.undo_stack.truncate(new_len); + self.undo_stack.last().map(|t| t.id) } fn push_undo(&mut self, edit_id: clock::Local) { @@ -1081,9 +1082,8 @@ impl Buffer { pub fn end_transaction_at(&mut self, now: Instant) -> Option<(TransactionId, clock::Global)> { if let Some(transaction) = self.history.end_transaction(now) { - let id = transaction.id; let since = transaction.start.clone(); - self.history.group(); + let id = self.history.group().unwrap(); Some((id, since)) } else { None From 08e9f3e1e3fe69c46aa25401a3c4da875a6c3c0d Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 14 Dec 2021 17:43:41 +0100 Subject: [PATCH 079/196] Maintain a different undo/redo stack in `MultiBuffer` This only applies to singleton mode. --- crates/editor/src/multi_buffer.rs | 293 +++++++++++++++++++++++++++--- crates/language/src/buffer.rs | 34 ++++ crates/text/src/text.rs | 40 ++++ 3 files changed, 345 insertions(+), 22 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 655002f8b9909ed8ab31b071653db372f2798b65..beca029a9fab0928145709a05c6c0cb6300b9553 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -3,7 +3,7 @@ mod anchor; pub use anchor::{Anchor, AnchorRangeExt}; use anyhow::Result; use clock::ReplicaId; -use collections::HashMap; +use collections::{HashMap, HashSet}; use gpui::{AppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task}; use language::{ Buffer, BufferChunks, BufferSnapshot, Chunk, DiagnosticEntry, Event, File, Language, Selection, @@ -15,7 +15,7 @@ use std::{ iter::{self, FromIterator, Peekable}, ops::{Range, Sub}, sync::Arc, - time::{Instant, SystemTime}, + time::{Duration, Instant, SystemTime}, }; use sum_tree::{Bias, Cursor, SumTree}; use text::{ @@ -25,6 +25,7 @@ use text::{ AnchorRangeExt as _, Edit, Point, PointUtf16, TextSummary, }; use theme::SyntaxTheme; +use util::post_inc; const NEWLINES: &'static [u8] = &[b'\n'; u8::MAX as usize]; @@ -36,6 +37,22 @@ pub struct MultiBuffer { subscriptions: Topic, singleton: bool, replica_id: ReplicaId, + history: History, +} + +struct History { + next_transaction_id: usize, + undo_stack: Vec, + redo_stack: Vec, + transaction_depth: usize, + group_interval: Duration, +} + +struct Transaction { + id: usize, + buffer_transactions: HashSet<(usize, text::TransactionId)>, + first_edit_at: Instant, + last_edit_at: Instant, } pub trait ToOffset: 'static { @@ -110,6 +127,13 @@ impl MultiBuffer { subscriptions: Default::default(), singleton: false, replica_id, + history: History { + next_transaction_id: Default::default(), + undo_stack: Default::default(), + redo_stack: Default::default(), + transaction_depth: 0, + group_interval: Duration::from_millis(300), + }, } } @@ -310,17 +334,18 @@ impl MultiBuffer { now: Instant, cx: &mut ModelContext, ) -> Option { - // TODO - self.as_singleton() - .unwrap() - .update(cx, |buffer, _| buffer.start_transaction_at(now)) + if let Some(buffer) = self.as_singleton() { + return buffer.update(cx, |buffer, _| buffer.start_transaction_at(now)); + } + + for BufferState { buffer, .. } in self.buffers.values() { + buffer.update(cx, |buffer, _| buffer.start_transaction_at(now)); + } + self.history.start_transaction(now) } pub fn end_transaction(&mut self, cx: &mut ModelContext) -> Option { - // TODO - self.as_singleton() - .unwrap() - .update(cx, |buffer, cx| buffer.end_transaction(cx)) + self.end_transaction_at(Instant::now(), cx) } pub(crate) fn end_transaction_at( @@ -328,10 +353,25 @@ impl MultiBuffer { now: Instant, cx: &mut ModelContext, ) -> Option { - // TODO - self.as_singleton() - .unwrap() - .update(cx, |buffer, cx| buffer.end_transaction_at(now, cx)) + if let Some(buffer) = self.as_singleton() { + return buffer.update(cx, |buffer, cx| buffer.end_transaction_at(now, cx)); + } + + let mut buffer_transactions = HashSet::default(); + for BufferState { buffer, .. } in self.buffers.values() { + if let Some(transaction_id) = + buffer.update(cx, |buffer, cx| buffer.end_transaction_at(now, cx)) + { + buffer_transactions.insert((buffer.id(), transaction_id)); + } + } + + if self.history.end_transaction(now, buffer_transactions) { + let transaction_id = self.history.group().unwrap(); + Some(transaction_id) + } else { + None + } } pub fn set_active_selections( @@ -415,17 +455,49 @@ impl MultiBuffer { } pub fn undo(&mut self, cx: &mut ModelContext) -> Option { - // TODO - self.as_singleton() - .unwrap() - .update(cx, |buffer, cx| buffer.undo(cx)) + if let Some(buffer) = self.as_singleton() { + return buffer.update(cx, |buffer, cx| buffer.undo(cx)); + } + + while let Some(transaction) = self.history.pop_undo() { + let mut undone = false; + for (buffer_id, buffer_transaction_id) in &transaction.buffer_transactions { + if let Some(BufferState { buffer, .. }) = self.buffers.get(&buffer_id) { + undone |= buffer.update(cx, |buf, cx| { + buf.undo_transaction(*buffer_transaction_id, cx) + }); + } + } + + if undone { + return Some(transaction.id); + } + } + + None } pub fn redo(&mut self, cx: &mut ModelContext) -> Option { - // TODO - self.as_singleton() - .unwrap() - .update(cx, |buffer, cx| buffer.redo(cx)) + if let Some(buffer) = self.as_singleton() { + return buffer.update(cx, |buffer, cx| buffer.redo(cx)); + } + + while let Some(transaction) = self.history.pop_redo() { + let mut redone = false; + for (buffer_id, buffer_transaction_id) in &transaction.buffer_transactions { + if let Some(BufferState { buffer, .. }) = self.buffers.get(&buffer_id) { + redone |= buffer.update(cx, |buf, cx| { + buf.redo_transaction(*buffer_transaction_id, cx) + }); + } + } + + if redone { + return Some(transaction.id); + } + } + + None } pub fn push_excerpt( @@ -436,6 +508,7 @@ impl MultiBuffer { where O: text::ToOffset, { + assert_eq!(self.history.transaction_depth, 0); self.sync(cx); let buffer = &props.buffer; @@ -1211,6 +1284,93 @@ impl MultiBufferSnapshot { } } +impl History { + fn start_transaction(&mut self, now: Instant) -> Option { + self.transaction_depth += 1; + if self.transaction_depth == 1 { + let id = post_inc(&mut self.next_transaction_id); + self.undo_stack.push(Transaction { + id, + buffer_transactions: Default::default(), + first_edit_at: now, + last_edit_at: now, + }); + Some(id) + } else { + None + } + } + + fn end_transaction( + &mut self, + now: Instant, + buffer_transactions: HashSet<(usize, TransactionId)>, + ) -> bool { + assert_ne!(self.transaction_depth, 0); + self.transaction_depth -= 1; + if self.transaction_depth == 0 { + if buffer_transactions.is_empty() { + self.undo_stack.pop(); + false + } else { + let transaction = self.undo_stack.last_mut().unwrap(); + transaction.last_edit_at = now; + transaction.buffer_transactions.extend(buffer_transactions); + true + } + } else { + false + } + } + + fn pop_undo(&mut self) -> Option<&Transaction> { + assert_eq!(self.transaction_depth, 0); + if let Some(transaction) = self.undo_stack.pop() { + self.redo_stack.push(transaction); + self.redo_stack.last() + } else { + None + } + } + + fn pop_redo(&mut self) -> Option<&Transaction> { + assert_eq!(self.transaction_depth, 0); + if let Some(transaction) = self.redo_stack.pop() { + self.undo_stack.push(transaction); + self.undo_stack.last() + } else { + None + } + } + + fn group(&mut self) -> Option { + let mut new_len = self.undo_stack.len(); + let mut transactions = self.undo_stack.iter_mut(); + + if let Some(mut transaction) = transactions.next_back() { + while let Some(prev_transaction) = transactions.next_back() { + if transaction.first_edit_at - prev_transaction.last_edit_at <= self.group_interval + { + transaction = prev_transaction; + new_len -= 1; + } else { + break; + } + } + } + + let (transactions_to_keep, transactions_to_merge) = self.undo_stack.split_at_mut(new_len); + if let Some(last_transaction) = transactions_to_keep.last_mut() { + if let Some(transaction) = transactions_to_merge.last() { + last_transaction.last_edit_at = transaction.last_edit_at; + } + } + + self.undo_stack.truncate(new_len); + self.undo_stack.last().map(|t| t.id) + } +} + impl Excerpt { fn new( id: ExcerptId, @@ -1848,4 +2008,93 @@ mod tests { assert_eq!(text.to_string(), snapshot.text()); } } + + #[gpui::test] + fn test_history(cx: &mut MutableAppContext) { + let buffer_1 = cx.add_model(|cx| Buffer::new(0, "1234", cx)); + let buffer_2 = cx.add_model(|cx| Buffer::new(0, "5678", cx)); + let multibuffer = cx.add_model(|_| MultiBuffer::new(0)); + let group_interval = multibuffer.read(cx).history.group_interval; + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.push_excerpt( + ExcerptProperties { + buffer: &buffer_1, + range: 0..buffer_1.read(cx).len(), + header_height: 0, + }, + cx, + ); + multibuffer.push_excerpt( + ExcerptProperties { + buffer: &buffer_2, + range: 0..buffer_2.read(cx).len(), + header_height: 0, + }, + cx, + ); + }); + + let mut now = Instant::now(); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.start_transaction_at(now, cx); + multibuffer.edit( + [ + Point::new(0, 0)..Point::new(0, 0), + Point::new(1, 0)..Point::new(1, 0), + ], + "A", + cx, + ); + multibuffer.edit( + [ + Point::new(0, 1)..Point::new(0, 1), + Point::new(1, 1)..Point::new(1, 1), + ], + "B", + cx, + ); + multibuffer.end_transaction_at(now, cx); + assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678\n"); + + now += 2 * group_interval; + multibuffer.start_transaction_at(now, cx); + multibuffer.edit([2..2], "C", cx); + multibuffer.end_transaction_at(now, cx); + assert_eq!(multibuffer.read(cx).text(), "ABC1234\nAB5678\n"); + + multibuffer.undo(cx); + assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678\n"); + + multibuffer.undo(cx); + assert_eq!(multibuffer.read(cx).text(), "1234\n5678\n"); + + multibuffer.redo(cx); + assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678\n"); + + multibuffer.redo(cx); + assert_eq!(multibuffer.read(cx).text(), "ABC1234\nAB5678\n"); + + buffer_1.update(cx, |buffer_1, cx| buffer_1.undo(cx)); + assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678\n"); + + multibuffer.undo(cx); + assert_eq!(multibuffer.read(cx).text(), "1234\n5678\n"); + + multibuffer.redo(cx); + assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678\n"); + + multibuffer.redo(cx); + assert_eq!(multibuffer.read(cx).text(), "ABC1234\nAB5678\n"); + + multibuffer.undo(cx); + assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678\n"); + + buffer_1.update(cx, |buffer_1, cx| buffer_1.redo(cx)); + assert_eq!(multibuffer.read(cx).text(), "ABC1234\nAB5678\n"); + + multibuffer.undo(cx); + assert_eq!(multibuffer.read(cx).text(), "ABC1234\nAB5678\n"); + }); + } } diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 92539c49ba0c89d1259ec4953a211cb9c0a4e942..d1683241b3ed4458b4d2ff6db7dc071864bc8f29 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -1375,6 +1375,23 @@ impl Buffer { } } + pub fn undo_transaction( + &mut self, + transaction_id: TransactionId, + cx: &mut ModelContext, + ) -> bool { + let was_dirty = self.is_dirty(); + let old_version = self.version.clone(); + + if let Some(operation) = self.text.undo_transaction(transaction_id) { + self.send_operation(Operation::Buffer(operation), cx); + self.did_edit(&old_version, was_dirty, cx); + true + } else { + false + } + } + pub fn redo(&mut self, cx: &mut ModelContext) -> Option { let was_dirty = self.is_dirty(); let old_version = self.version.clone(); @@ -1387,6 +1404,23 @@ impl Buffer { None } } + + pub fn redo_transaction( + &mut self, + transaction_id: TransactionId, + cx: &mut ModelContext, + ) -> bool { + let was_dirty = self.is_dirty(); + let old_version = self.version.clone(); + + if let Some(operation) = self.text.redo_transaction(transaction_id) { + self.send_operation(Operation::Buffer(operation), cx); + self.did_edit(&old_version, was_dirty, cx); + true + } else { + false + } + } } #[cfg(any(test, feature = "test-support"))] diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 2b2282474a81291a17ab9828c36df33270290b44..8114835b8219d83397d846802258f6302a73b5ec 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -240,6 +240,17 @@ impl History { } } + fn remove_from_undo(&mut self, transaction_id: TransactionId) -> Option<&Transaction> { + assert_eq!(self.transaction_depth, 0); + if let Some(transaction_ix) = self.undo_stack.iter().rposition(|t| t.id == transaction_id) { + let transaction = self.undo_stack.remove(transaction_ix); + self.redo_stack.push(transaction); + self.redo_stack.last() + } else { + None + } + } + fn pop_redo(&mut self) -> Option<&Transaction> { assert_eq!(self.transaction_depth, 0); if let Some(transaction) = self.redo_stack.pop() { @@ -249,6 +260,17 @@ impl History { None } } + + fn remove_from_redo(&mut self, transaction_id: TransactionId) -> Option<&Transaction> { + assert_eq!(self.transaction_depth, 0); + if let Some(transaction_ix) = self.redo_stack.iter().rposition(|t| t.id == transaction_id) { + let transaction = self.redo_stack.remove(transaction_ix); + self.undo_stack.push(transaction); + self.undo_stack.last() + } else { + None + } + } } #[derive(Clone, Default, Debug)] @@ -1108,6 +1130,15 @@ impl Buffer { } } + pub fn undo_transaction(&mut self, transaction_id: TransactionId) -> Option { + if let Some(transaction) = self.history.remove_from_undo(transaction_id).cloned() { + let op = self.undo_or_redo(transaction).unwrap(); + Some(op) + } else { + None + } + } + pub fn redo(&mut self) -> Option<(TransactionId, Operation)> { if let Some(transaction) = self.history.pop_redo().cloned() { let transaction_id = transaction.id; @@ -1118,6 +1149,15 @@ impl Buffer { } } + pub fn redo_transaction(&mut self, transaction_id: TransactionId) -> Option { + if let Some(transaction) = self.history.remove_from_redo(transaction_id).cloned() { + let op = self.undo_or_redo(transaction).unwrap(); + Some(op) + } else { + None + } + } + fn undo_or_redo(&mut self, transaction: Transaction) -> Result { let mut counts = HashMap::default(); for edit_id in transaction.edits { From 358a6ff66c7526e19dee3b98df9734ae9d3520e2 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 14 Dec 2021 17:51:14 +0100 Subject: [PATCH 080/196] Implement `MultiBufferSnapshot::contains_str_at` --- crates/editor/src/multi_buffer.rs | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index beca029a9fab0928145709a05c6c0cb6300b9553..cb714541dd3212d032ab2755af669b592c48f58c 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -735,8 +735,14 @@ impl MultiBufferSnapshot { where T: ToOffset, { - let offset = position.to_offset(self); - self.as_singleton().unwrap().contains_str_at(offset, needle) + let position = position.to_offset(self); + position == self.clip_offset(position, Bias::Left) + && self + .bytes_in_range(position..self.len()) + .flatten() + .copied() + .take(needle.len()) + .eq(needle.bytes()) } fn as_singleton(&self) -> Option<&BufferSnapshot> { From bf9daf1529edbbc8baa17d811ee953ffe048ce6d Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 14 Dec 2021 09:58:28 -0800 Subject: [PATCH 081/196] Allow left-biased anchors at the beginnings of excerpts Co-Authored-By: Antonio Scandurra --- crates/editor/src/editor.rs | 53 ++++++++++++++++++++++++++++++- crates/editor/src/multi_buffer.rs | 15 +++------ 2 files changed, 57 insertions(+), 11 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index c82a3c9c984950de3cb73921efb1eecc75526817..5a1641b75b539d64616f17217696892354857781 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -3958,7 +3958,6 @@ mod tests { view.update_selection(DisplayPoint::new(0, 0), 0, Vector2F::zero(), cx); }); - eprintln!(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"); assert_eq!( editor.update(cx, |view, cx| view.selected_display_ranges(cx)), [ @@ -5857,6 +5856,58 @@ mod tests { }); } + #[gpui::test] + fn test_multi_buffer_editing(cx: &mut gpui::MutableAppContext) { + let settings = EditorSettings::test(cx); + let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(3, 4, 'a'), cx)); + let multibuffer = cx.add_model(|cx| { + let mut multibuffer = MultiBuffer::new(0); + multibuffer.push_excerpt( + ExcerptProperties { + buffer: &buffer, + range: Point::new(0, 0)..Point::new(0, 4), + header_height: 0, + }, + cx, + ); + multibuffer.push_excerpt( + ExcerptProperties { + buffer: &buffer, + range: Point::new(1, 0)..Point::new(1, 4), + header_height: 0, + }, + cx, + ); + multibuffer + }); + + assert_eq!(multibuffer.read(cx).read(cx).text(), "aaaa\nbbbb\n"); + + let (_, view) = cx.add_window(Default::default(), |cx| { + build_editor(multibuffer, settings, cx) + }); + view.update(cx, |view, cx| { + view.select_display_ranges( + &[ + DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0), + DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0), + ], + cx, + ) + .unwrap(); + + view.handle_input(&Input("X".to_string()), cx); + assert_eq!(view.text(cx), "Xaaaa\nXbbbb\n"); + assert_eq!( + view.selected_display_ranges(cx), + &[ + DisplayPoint::new(0, 1)..DisplayPoint::new(0, 1), + DisplayPoint::new(1, 1)..DisplayPoint::new(1, 1), + ] + ) + }); + } + #[gpui::test] async fn test_extra_newline_insertion(mut cx: gpui::TestAppContext) { let settings = cx.read(EditorSettings::test); diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index cb714541dd3212d032ab2755af669b592c48f58c..e99d4b59af2b64ffd0d2d9200d4c0583d2c8965e 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -55,11 +55,11 @@ struct Transaction { last_edit_at: Instant, } -pub trait ToOffset: 'static { +pub trait ToOffset: 'static + std::fmt::Debug { fn to_offset(&self, snapshot: &MultiBufferSnapshot) -> usize; } -pub trait ToPoint: 'static { +pub trait ToPoint: 'static + std::fmt::Debug { fn to_point(&self, snapshot: &MultiBufferSnapshot) -> Point; } @@ -171,7 +171,7 @@ impl MultiBuffer { } pub fn as_singleton(&self) -> Option<&ModelHandle> { - if self.buffers.len() == 1 { + if self.singleton { return Some(&self.buffers.values().next().unwrap().buffer); } else { None @@ -1150,16 +1150,11 @@ impl MultiBufferSnapshot { pub fn anchor_at(&self, position: T, bias: Bias) -> Anchor { let offset = position.to_offset(self); let mut cursor = self.excerpts.cursor::<(usize, Option<&ExcerptId>)>(); - cursor.seek(&offset, bias, &()); + cursor.seek(&offset, Bias::Right, &()); if let Some(excerpt) = cursor.item() { let start_after_header = cursor.start().0 + excerpt.header_height as usize; - let mut end_before_newline = cursor.end(&()).0; - if excerpt.has_trailing_newline { - end_before_newline -= 1; - } - let buffer_start = excerpt.range.start.to_offset(&excerpt.buffer); - let overshoot = cmp::min(offset, end_before_newline).saturating_sub(start_after_header); + let overshoot = offset.saturating_sub(start_after_header); Anchor { excerpt_id: excerpt.id.clone(), text_anchor: excerpt.buffer.anchor_at(buffer_start + overshoot, bias), From 4ed96bb5a64029d60050e3072c90cdea964810df Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 14 Dec 2021 11:47:22 -0800 Subject: [PATCH 082/196] Fix assertion in multibuffer history test Co-Authored-By: Antonio Scandurra Co-Authored-By: Nathan Sobo --- crates/editor/src/multi_buffer.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index e99d4b59af2b64ffd0d2d9200d4c0583d2c8965e..a3149d180fe664d0a40d5cafb046403213382a8e 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -2095,7 +2095,7 @@ mod tests { assert_eq!(multibuffer.read(cx).text(), "ABC1234\nAB5678\n"); multibuffer.undo(cx); - assert_eq!(multibuffer.read(cx).text(), "ABC1234\nAB5678\n"); + assert_eq!(multibuffer.read(cx).text(), "C1234\n5678\n"); }); } } From e38c1814d593b666844c3da5e77435d66adce96e Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 14 Dec 2021 12:15:26 -0800 Subject: [PATCH 083/196] Update selections on text insertion using anchors The delta-based approach doesn't work for multi-excerpt buffers. Co-Authored-By: Nathan Sobo --- crates/editor/src/editor.rs | 134 ++++++++++++++++++++++++++---------- 1 file changed, 98 insertions(+), 36 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 5a1641b75b539d64616f17217696892354857781..42bfd30c0962f2b78b6ed556c3c5654c222327fa 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -22,6 +22,7 @@ use gpui::{ MutableAppContext, RenderContext, View, ViewContext, WeakModelHandle, WeakViewHandle, }; use items::BufferItemHandle; +use itertools::Itertools as _; use language::{ BracketPair, Buffer, Diagnostic, DiagnosticSeverity, Language, Point, Selection, SelectionGoal, TransactionId, @@ -1267,29 +1268,26 @@ impl Editor { fn insert(&mut self, text: &str, cx: &mut ViewContext) { self.start_transaction(cx); let old_selections = self.local_selections::(cx); - let mut new_selections = Vec::new(); - self.buffer.update(cx, |buffer, cx| { + let new_selections = self.buffer.update(cx, |buffer, cx| { + let snapshot = buffer.read(cx); + let new_selections = old_selections + .iter() + .map(|selection| Selection { + id: selection.id, + start: snapshot.anchor_after(selection.start), + end: snapshot.anchor_after(selection.end), + reversed: false, + goal: SelectionGoal::None, + }) + .collect::>(); + + drop(snapshot); let edit_ranges = old_selections.iter().map(|s| s.start..s.end); buffer.edit_with_autoindent(edit_ranges, text, cx); - let text_len = text.len() as isize; - let mut delta = 0_isize; - new_selections = old_selections - .into_iter() - .map(|selection| { - let start = selection.start as isize; - let end = selection.end as isize; - let cursor = (start + delta + text_len) as usize; - let deleted_count = end - start; - delta += text_len - deleted_count; - Selection { - id: selection.id, - start: cursor, - end: cursor, - reversed: false, - goal: SelectionGoal::None, - } - }) - .collect(); + + let snapshot = buffer.read(cx); + self.resolve_selections::(new_selections.iter(), &snapshot) + .collect() }); self.update_selections(new_selections, Some(Autoscroll::Fit), cx); @@ -3099,21 +3097,8 @@ impl Editor { D: 'a + TextDimension + Ord + Sub, { let buffer = self.buffer.read(cx).snapshot(cx); - - let mut summaries = buffer - .summaries_for_anchors::(self.selections.iter().flat_map(|s| [&s.start, &s.end])) - .into_iter(); - let mut selections = self - .selections - .iter() - .map(|s| Selection { - id: s.id, - start: summaries.next().unwrap(), - end: summaries.next().unwrap(), - reversed: s.reversed, - goal: s.goal, - }) + .resolve_selections::(self.selections.iter(), &buffer) .peekable(); let mut pending_selection = self.pending_selection::(&buffer); @@ -3144,6 +3129,28 @@ impl Editor { .collect() } + fn resolve_selections<'a, D, I>( + &self, + selections: I, + snapshot: &MultiBufferSnapshot, + ) -> impl 'a + Iterator> + where + D: TextDimension + Ord + Sub, + I: 'a + IntoIterator>, + { + let (to_summarize, selections) = selections.into_iter().tee(); + let mut summaries = snapshot + .summaries_for_anchors::(to_summarize.flat_map(|s| [&s.start, &s.end])) + .into_iter(); + selections.map(move |s| Selection { + id: s.id, + start: summaries.next().unwrap(), + end: summaries.next().unwrap(), + reversed: s.reversed, + goal: s.goal, + }) + } + fn pending_selection>( &self, snapshot: &MultiBufferSnapshot, @@ -5857,7 +5864,7 @@ mod tests { } #[gpui::test] - fn test_multi_buffer_editing(cx: &mut gpui::MutableAppContext) { + fn test_editing_disjoint_excerpts(cx: &mut gpui::MutableAppContext) { let settings = EditorSettings::test(cx); let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(3, 4, 'a'), cx)); let multibuffer = cx.add_model(|cx| { @@ -5908,6 +5915,61 @@ mod tests { }); } + #[gpui::test] + fn test_editing_overlapping_excerpts(cx: &mut gpui::MutableAppContext) { + let settings = EditorSettings::test(cx); + let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(3, 4, 'a'), cx)); + let multibuffer = cx.add_model(|cx| { + let mut multibuffer = MultiBuffer::new(0); + multibuffer.push_excerpt( + ExcerptProperties { + buffer: &buffer, + range: Point::new(0, 0)..Point::new(1, 4), + header_height: 0, + }, + cx, + ); + multibuffer.push_excerpt( + ExcerptProperties { + buffer: &buffer, + range: Point::new(1, 0)..Point::new(2, 4), + header_height: 0, + }, + cx, + ); + multibuffer + }); + + assert_eq!( + multibuffer.read(cx).read(cx).text(), + "aaaa\nbbbb\nbbbb\ncccc\n" + ); + + let (_, view) = cx.add_window(Default::default(), |cx| { + build_editor(multibuffer, settings, cx) + }); + view.update(cx, |view, cx| { + view.select_display_ranges( + &[ + DisplayPoint::new(1, 1)..DisplayPoint::new(1, 1), + DisplayPoint::new(2, 3)..DisplayPoint::new(2, 3), + ], + cx, + ) + .unwrap(); + + view.handle_input(&Input("X".to_string()), cx); + assert_eq!(view.text(cx), "aaaa\nbXbbXb\nbXbbXb\ncccc\n"); + assert_eq!( + view.selected_display_ranges(cx), + &[ + DisplayPoint::new(1, 2)..DisplayPoint::new(1, 2), + DisplayPoint::new(2, 5)..DisplayPoint::new(2, 5), + ] + ) + }); + } + #[gpui::test] async fn test_extra_newline_insertion(mut cx: gpui::TestAppContext) { let settings = cx.read(EditorSettings::test); From 06e241117c8884dde26ccb144ac23920afcf842d Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 14 Dec 2021 13:04:51 -0800 Subject: [PATCH 084/196] Fix assertions in test for selection restoration after undo/redo Co-Authored-By: Nathan Sobo --- crates/editor/src/editor.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 42bfd30c0962f2b78b6ed556c3c5654c222327fa..880fe674983416e10a69b1650b93a416bde6bc47 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -3901,12 +3901,12 @@ mod tests { // Redo the first two transactions together. editor.redo(&Redo, cx); assert_eq!(editor.text(cx), "12cde6"); - assert_eq!(editor.selected_ranges(cx), vec![4..4]); + assert_eq!(editor.selected_ranges(cx), vec![5..5]); // Redo the last transaction on its own. editor.redo(&Redo, cx); assert_eq!(editor.text(cx), "ab2cde6"); - assert_eq!(editor.selected_ranges(cx), vec![5..5]); + assert_eq!(editor.selected_ranges(cx), vec![6..6]); // Test empty transactions. editor.start_transaction_at(now, cx); From 60e2c6bc526d8ed49c0c31f7851ae946dc0eae45 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 14 Dec 2021 13:37:05 -0800 Subject: [PATCH 085/196] Fix multibuffer anchors before the ends of excerpts --- crates/editor/src/multi_buffer.rs | 71 ++++++++++++++++++++++++++++++- 1 file changed, 70 insertions(+), 1 deletion(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index a3149d180fe664d0a40d5cafb046403213382a8e..1b42415e5cbe9ea191fe576d6949a4c18cef7820 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -1151,6 +1151,9 @@ impl MultiBufferSnapshot { let offset = position.to_offset(self); let mut cursor = self.excerpts.cursor::<(usize, Option<&ExcerptId>)>(); cursor.seek(&offset, Bias::Right, &()); + if cursor.item().is_none() && offset == cursor.start().0 && bias == Bias::Left { + cursor.prev(&()); + } if let Some(excerpt) = cursor.item() { let start_after_header = cursor.start().0 + excerpt.header_height as usize; let buffer_start = excerpt.range.start.to_offset(&excerpt.buffer); @@ -1662,7 +1665,6 @@ mod tests { fn test_excerpt_buffer(cx: &mut MutableAppContext) { let buffer_1 = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'g'), cx)); - let multibuffer = cx.add_model(|_| MultiBuffer::new(0)); let subscription = multibuffer.update(cx, |multibuffer, cx| { @@ -1765,6 +1767,73 @@ mod tests { ); } + #[gpui::test] + fn test_singleton_multibuffer_anchors(cx: &mut MutableAppContext) { + let buffer = cx.add_model(|cx| Buffer::new(0, "abcd", cx)); + let multibuffer = cx.add_model(|cx| MultiBuffer::singleton(buffer.clone(), cx)); + let old_snapshot = multibuffer.read(cx).snapshot(cx); + buffer.update(cx, |buffer, cx| { + buffer.edit([0..0], "X", cx); + buffer.edit([5..5], "Y", cx); + }); + let new_snapshot = multibuffer.read(cx).snapshot(cx); + + assert_eq!(old_snapshot.text(), "abcd"); + assert_eq!(new_snapshot.text(), "XabcdY"); + + assert_eq!(old_snapshot.anchor_before(0).to_offset(&new_snapshot), 0); + assert_eq!(old_snapshot.anchor_after(0).to_offset(&new_snapshot), 1); + assert_eq!(old_snapshot.anchor_before(4).to_offset(&new_snapshot), 5); + assert_eq!(old_snapshot.anchor_after(4).to_offset(&new_snapshot), 6); + } + + #[gpui::test] + fn test_multibuffer_anchors(cx: &mut MutableAppContext) { + let buffer_1 = cx.add_model(|cx| Buffer::new(0, "abcd", cx)); + let buffer_2 = cx.add_model(|cx| Buffer::new(0, "efghi", cx)); + let multibuffer = cx.add_model(|cx| { + let mut multibuffer = MultiBuffer::new(0); + multibuffer.push_excerpt( + ExcerptProperties { + buffer: &buffer_1, + range: 0..4, + header_height: 1, + }, + cx, + ); + multibuffer.push_excerpt( + ExcerptProperties { + buffer: &buffer_2, + range: 0..5, + header_height: 1, + }, + cx, + ); + multibuffer + }); + let old_snapshot = multibuffer.read(cx).snapshot(cx); + + buffer_1.update(cx, |buffer, cx| { + buffer.edit([0..0], "W", cx); + buffer.edit([5..5], "X", cx); + }); + buffer_2.update(cx, |buffer, cx| { + buffer.edit([0..0], "Y", cx); + buffer.edit([6..0], "Z", cx); + }); + let new_snapshot = multibuffer.read(cx).snapshot(cx); + + assert_eq!(old_snapshot.text(), "\nabcd\n\nefghi\n"); + assert_eq!(new_snapshot.text(), "\nWabcdX\n\nYefghiZ\n"); + + assert_eq!(old_snapshot.anchor_before(0).to_offset(&new_snapshot), 0); + assert_eq!(old_snapshot.anchor_after(0).to_offset(&new_snapshot), 1); + assert_eq!(old_snapshot.anchor_before(1).to_offset(&new_snapshot), 0); + assert_eq!(old_snapshot.anchor_after(1).to_offset(&new_snapshot), 1); + assert_eq!(old_snapshot.anchor_before(7).to_offset(&new_snapshot), 9); + assert_eq!(old_snapshot.anchor_after(7).to_offset(&new_snapshot), 10); + } + #[gpui::test(iterations = 100)] fn test_random_excerpts(cx: &mut MutableAppContext, mut rng: StdRng) { let operations = env::var("OPERATIONS") From ad05c0cc7a15ed3f191cc566e2cf445c4713f7c3 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Tue, 14 Dec 2021 16:24:48 -0700 Subject: [PATCH 086/196] Implement MultiBufferSnapshot::excerpt_headers_in_range Co-Authored-By: Max Brunsfeld --- crates/diagnostics/src/diagnostics.rs | 9 +++ crates/editor/src/editor.rs | 4 + crates/editor/src/multi_buffer.rs | 109 +++++++++++++++++++++++--- 3 files changed, 110 insertions(+), 12 deletions(-) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 71b138c4e9485e72bdd70af77386cb8fd889d465..58efa50c5ecadb426cef87d49ad0d1c0b3caee0b 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use collections::HashMap; use editor::{Editor, ExcerptProperties, MultiBuffer}; use gpui::{ @@ -64,6 +66,13 @@ impl ProjectDiagnostics { buffer: &buffer, range: diagnostic.range, header_height: 1, + render_header: Some(Arc::new({ + let message = diagnostic.diagnostic.message.clone(); + move |_| { + Text::new(message.clone(), Default::default()) + .boxed() + } + })), }, cx, ); diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 880fe674983416e10a69b1650b93a416bde6bc47..5a578115d5328e550c6388f1a884c0b74d43f2f9 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -5874,6 +5874,7 @@ mod tests { buffer: &buffer, range: Point::new(0, 0)..Point::new(0, 4), header_height: 0, + render_header: None, }, cx, ); @@ -5882,6 +5883,7 @@ mod tests { buffer: &buffer, range: Point::new(1, 0)..Point::new(1, 4), header_height: 0, + render_header: None, }, cx, ); @@ -5926,6 +5928,7 @@ mod tests { buffer: &buffer, range: Point::new(0, 0)..Point::new(1, 4), header_height: 0, + render_header: None, }, cx, ); @@ -5934,6 +5937,7 @@ mod tests { buffer: &buffer, range: Point::new(1, 0)..Point::new(2, 4), header_height: 0, + render_header: None, }, cx, ); diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 1b42415e5cbe9ea191fe576d6949a4c18cef7820..dace217839c254b8dbaa8b6ad95da5a7cd7d5a25 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -4,7 +4,7 @@ pub use anchor::{Anchor, AnchorRangeExt}; use anyhow::Result; use clock::ReplicaId; use collections::{HashMap, HashSet}; -use gpui::{AppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task}; +use gpui::{AppContext, ElementBox, Entity, ModelContext, ModelHandle, MutableAppContext, Task}; use language::{ Buffer, BufferChunks, BufferSnapshot, Chunk, DiagnosticEntry, Event, File, Language, Selection, ToOffset as _, ToPoint as _, TransactionId, @@ -83,10 +83,13 @@ pub struct MultiBufferSnapshot { diagnostics_update_count: usize, } +pub type RenderHeaderFn = Arc ElementBox>; + pub struct ExcerptProperties<'a, T> { pub buffer: &'a ModelHandle, pub range: Range, pub header_height: u8, + pub render_header: Option, } #[derive(Clone)] @@ -95,6 +98,7 @@ struct Excerpt { buffer_id: usize, buffer: BufferSnapshot, range: Range, + render_header: Option, text_summary: TextSummary, header_height: u8, has_trailing_newline: bool, @@ -145,6 +149,7 @@ impl MultiBuffer { buffer: &buffer, range: text::Anchor::min()..text::Anchor::max(), header_height: 0, + render_header: None, }, cx, ); @@ -511,11 +516,16 @@ impl MultiBuffer { assert_eq!(self.history.transaction_depth, 0); self.sync(cx); - let buffer = &props.buffer; - cx.subscribe(buffer, Self::on_buffer_event).detach(); + let buffer = props.buffer.clone(); + cx.subscribe(&buffer, Self::on_buffer_event).detach(); + + let buffer_snapshot = buffer.read(cx).snapshot(); + let range = buffer_snapshot.anchor_before(&props.range.start) + ..buffer_snapshot.anchor_after(&props.range.end); + let last_version = buffer_snapshot.version().clone(); + let last_parse_count = buffer_snapshot.parse_count(); + let last_diagnostics_update_count = buffer_snapshot.diagnostics_update_count(); - let buffer = props.buffer.read(cx); - let range = buffer.anchor_before(&props.range.start)..buffer.anchor_after(&props.range.end); let mut snapshot = self.snapshot.borrow_mut(); let prev_id = snapshot.excerpts.last().map(|e| &e.id); let id = ExcerptId::between(prev_id.unwrap_or(&ExcerptId::min()), &ExcerptId::max()); @@ -523,10 +533,11 @@ impl MultiBuffer { let edit_start = snapshot.excerpts.summary().text.bytes; let excerpt = Excerpt::new( id.clone(), - props.buffer.id(), - buffer.snapshot(), + buffer.id(), + buffer_snapshot, range, props.header_height, + props.render_header, !self.singleton, ); let edit = Edit { @@ -537,10 +548,10 @@ impl MultiBuffer { self.buffers .entry(props.buffer.id()) .or_insert_with(|| BufferState { - buffer: props.buffer.clone(), - last_version: buffer.version(), - last_parse_count: buffer.parse_count(), - last_diagnostics_update_count: buffer.diagnostics_update_count(), + buffer, + last_version, + last_parse_count, + last_diagnostics_update_count, excerpts: Default::default(), }) .excerpts @@ -659,6 +670,7 @@ impl MultiBuffer { buffer.snapshot(), old_excerpt.range.clone(), old_excerpt.header_height, + old_excerpt.render_header.clone(), !self.singleton, ); } else { @@ -704,6 +716,38 @@ impl MultiBufferSnapshot { .collect() } + pub fn excerpt_headers_in_range<'a>( + &'a self, + range: Range, + ) -> impl 'a + Iterator, RenderHeaderFn)> { + let mut cursor = self.excerpts.cursor::(); + cursor.seek(&Point::new(range.start, 0), Bias::Right, &()); + + if let Some(excerpt) = cursor.item() { + if range.start >= cursor.start().row + excerpt.header_height as u32 { + cursor.next(&()); + } + } + + iter::from_fn(move || { + while let Some(excerpt) = cursor.item() { + if cursor.start().row >= range.end { + break; + } + + if let Some(render) = excerpt.render_header.clone() { + let start = cursor.start().row; + let end = start + excerpt.header_height as u32; + cursor.next(&()); + return Some((start..end, render)); + } else { + cursor.next(&()); + } + } + None + }) + } + pub fn reversed_chars_at<'a, T: ToOffset>( &'a self, position: T, @@ -1382,6 +1426,7 @@ impl Excerpt { buffer: BufferSnapshot, range: Range, header_height: u8, + render_header: Option, has_trailing_newline: bool, ) -> Self { let mut text_summary = @@ -1409,6 +1454,7 @@ impl Excerpt { range, text_summary, header_height, + render_header, has_trailing_newline, } } @@ -1638,7 +1684,7 @@ impl ToPoint for Point { #[cfg(test)] mod tests { use super::*; - use gpui::MutableAppContext; + use gpui::{elements::Empty, Element, MutableAppContext}; use language::Buffer; use rand::prelude::*; use std::env; @@ -1674,6 +1720,7 @@ mod tests { buffer: &buffer_1, range: Point::new(1, 2)..Point::new(2, 5), header_height: 2, + render_header: Some(Arc::new(|_| Empty::new().named("header 1"))), }, cx, ); @@ -1690,6 +1737,7 @@ mod tests { buffer: &buffer_1, range: Point::new(3, 3)..Point::new(4, 4), header_height: 1, + render_header: Some(Arc::new(|_| Empty::new().named("header 2"))), }, cx, ); @@ -1698,6 +1746,7 @@ mod tests { buffer: &buffer_2, range: Point::new(3, 1)..Point::new(3, 3), header_height: 3, + render_header: Some(Arc::new(|_| Empty::new().named("header 3"))), }, cx, ); @@ -1729,6 +1778,37 @@ mod tests { ) ); + { + let snapshot = multibuffer.read(cx).read(cx); + assert_eq!( + snapshot + .excerpt_headers_in_range(0..snapshot.max_point().row + 1) + .map(|(rows, render)| (rows, render(cx).name().unwrap().to_string())) + .collect::>(), + &[ + (0..2, "header 1".into()), + (4..5, "header 2".into()), + (7..10, "header 3".into()) + ] + ); + + assert_eq!( + snapshot + .excerpt_headers_in_range(1..5) + .map(|(rows, render)| (rows, render(cx).name().unwrap().to_string())) + .collect::>(), + &[(0..2, "header 1".into()), (4..5, "header 2".into())] + ); + + assert_eq!( + snapshot + .excerpt_headers_in_range(2..8) + .map(|(rows, render)| (rows, render(cx).name().unwrap().to_string())) + .collect::>(), + &[(4..5, "header 2".into()), (7..10, "header 3".into())] + ); + } + buffer_1.update(cx, |buffer, cx| { buffer.edit( [ @@ -1798,6 +1878,7 @@ mod tests { buffer: &buffer_1, range: 0..4, header_height: 1, + render_header: None, }, cx, ); @@ -1806,6 +1887,7 @@ mod tests { buffer: &buffer_2, range: 0..5, header_height: 1, + render_header: None, }, cx, ); @@ -1881,6 +1963,7 @@ mod tests { buffer: &buffer_handle, range: start_ix..end_ix, header_height, + render_header: None, }, cx, ) @@ -2091,6 +2174,7 @@ mod tests { buffer: &buffer_1, range: 0..buffer_1.read(cx).len(), header_height: 0, + render_header: None, }, cx, ); @@ -2099,6 +2183,7 @@ mod tests { buffer: &buffer_2, range: 0..buffer_2.read(cx).len(), header_height: 0, + render_header: None, }, cx, ); From e1a2897d534d9e3c9e5c1748e259aa8d30355a08 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Tue, 14 Dec 2021 17:00:57 -0700 Subject: [PATCH 087/196] Render basic diagnostic messages in project diagnostics view Co-Authored-By: Max Brunsfeld --- crates/diagnostics/src/diagnostics.rs | 146 ++++++++++++++------------ crates/editor/src/display_map.rs | 19 +++- crates/editor/src/editor.rs | 5 - crates/editor/src/element.rs | 77 +++++++++----- 4 files changed, 147 insertions(+), 100 deletions(-) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 58efa50c5ecadb426cef87d49ad0d1c0b3caee0b..59f3acd3683eefe1706149f37f40043952adef8b 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -1,10 +1,10 @@ use std::sync::Arc; use collections::HashMap; -use editor::{Editor, ExcerptProperties, MultiBuffer}; +use editor::{diagnostic_style, Editor, ExcerptProperties, MultiBuffer}; use gpui::{ - action, elements::*, keymap::Binding, AppContext, Entity, ModelContext, ModelHandle, - MutableAppContext, RenderContext, View, ViewContext, ViewHandle, + action, elements::*, keymap::Binding, AppContext, Entity, ModelHandle, MutableAppContext, + RenderContext, View, ViewContext, ViewHandle, }; use language::Point; use postage::watch; @@ -19,16 +19,63 @@ pub fn init(cx: &mut MutableAppContext) { } struct ProjectDiagnostics { - excerpts: ModelHandle, project: ModelHandle, } struct ProjectDiagnosticsEditor { editor: ViewHandle, + excerpts: ModelHandle, } impl ProjectDiagnostics { - fn new(project: ModelHandle, cx: &mut ModelContext) -> Self { + fn new(project: ModelHandle) -> Self { + Self { project } + } +} + +impl Entity for ProjectDiagnostics { + type Event = (); +} + +impl Entity for ProjectDiagnosticsEditor { + type Event = (); +} + +impl View for ProjectDiagnosticsEditor { + fn ui_name() -> &'static str { + "ProjectDiagnosticsEditor" + } + + fn render(&mut self, _: &mut RenderContext) -> ElementBox { + ChildView::new(self.editor.id()).boxed() + } +} + +impl ProjectDiagnosticsEditor { + fn toggle(workspace: &mut Workspace, _: &Toggle, cx: &mut ViewContext) { + let diagnostics = cx.add_model(|_| ProjectDiagnostics::new(workspace.project().clone())); + workspace.add_item(diagnostics, cx); + } +} + +impl workspace::Item for ProjectDiagnostics { + type View = ProjectDiagnosticsEditor; + + fn build_view( + handle: ModelHandle, + settings: watch::Receiver, + cx: &mut ViewContext, + ) -> Self::View { + let project = handle.read(cx).project.clone(); + let excerpts = cx.add_model(|cx| MultiBuffer::new(project.read(cx).replica_id(cx))); + let editor = cx.add_view(|cx| { + Editor::for_buffer( + excerpts.clone(), + editor::settings_builder(excerpts.downgrade(), settings.clone()), + cx, + ) + }); + let project_paths = project .read(cx) .diagnostic_summaries(cx) @@ -58,19 +105,35 @@ impl ProjectDiagnostics { grouped_diagnostics.into_values().collect::>(); sorted_diagnostic_groups.sort_by_key(|group| group.0); - for diagnostic in snapshot.all_diagnostics::() { + for entry in snapshot.all_diagnostics::() { this.update(&mut cx, |this, cx| { this.excerpts.update(cx, |excerpts, cx| { excerpts.push_excerpt( ExcerptProperties { buffer: &buffer, - range: diagnostic.range, - header_height: 1, + range: entry.range, + header_height: entry + .diagnostic + .message + .matches('\n') + .count() + as u8 + + 1, render_header: Some(Arc::new({ - let message = diagnostic.diagnostic.message.clone(); + let message = entry.diagnostic.message.clone(); + let settings = settings.clone(); + move |_| { - Text::new(message.clone(), Default::default()) - .boxed() + let editor_style = &settings.borrow().theme.editor; + let mut text_style = editor_style.text.clone(); + text_style.color = diagnostic_style( + entry.diagnostic.severity, + true, + &editor_style, + ) + .text; + + Text::new(message.clone(), text_style).boxed() } })), }, @@ -86,56 +149,7 @@ impl ProjectDiagnostics { }) .detach(); - Self { - excerpts: cx.add_model(|cx| MultiBuffer::new(project.read(cx).replica_id(cx))), - project, - } - } -} - -impl Entity for ProjectDiagnostics { - type Event = (); -} - -impl Entity for ProjectDiagnosticsEditor { - type Event = (); -} - -impl View for ProjectDiagnosticsEditor { - fn ui_name() -> &'static str { - "ProjectDiagnosticsEditor" - } - - fn render(&mut self, cx: &mut RenderContext) -> ElementBox { - ChildView::new(self.editor.id()).boxed() - } -} - -impl ProjectDiagnosticsEditor { - fn toggle(workspace: &mut Workspace, _: &Toggle, cx: &mut ViewContext) { - let diagnostics = - cx.add_model(|cx| ProjectDiagnostics::new(workspace.project().clone(), cx)); - workspace.add_item(diagnostics, cx); - } -} - -impl workspace::Item for ProjectDiagnostics { - type View = ProjectDiagnosticsEditor; - - fn build_view( - handle: ModelHandle, - settings: watch::Receiver, - cx: &mut ViewContext, - ) -> Self::View { - let excerpts = handle.read(cx).excerpts.clone(); - let editor = cx.add_view(|cx| { - Editor::for_buffer( - excerpts.clone(), - editor::settings_builder(excerpts.downgrade(), settings), - cx, - ) - }); - ProjectDiagnosticsEditor { editor } + ProjectDiagnosticsEditor { editor, excerpts } } fn project_path(&self) -> Option { @@ -148,22 +162,22 @@ impl workspace::ItemView for ProjectDiagnosticsEditor { "Project Diagnostics".to_string() } - fn project_path(&self, cx: &AppContext) -> Option { + fn project_path(&self, _: &AppContext) -> Option { None } fn save( &mut self, - cx: &mut ViewContext, + _: &mut ViewContext, ) -> anyhow::Result>> { todo!() } fn save_as( &mut self, - worktree: ModelHandle, - path: &std::path::Path, - cx: &mut ViewContext, + _: ModelHandle, + _: &std::path::Path, + _: &mut ViewContext, ) -> gpui::Task> { todo!() } diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 1f420681a109279ff7a867137335011ae4085203..838b136f75f1920b4c6ec6cfdd558fe5f65457bf 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -3,7 +3,9 @@ mod fold_map; mod tab_map; mod wrap_map; -use crate::{Anchor, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint}; +use crate::{ + multi_buffer::RenderHeaderFn, Anchor, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint, +}; use block_map::{BlockMap, BlockPoint}; use fold_map::{FoldMap, ToFoldPoint as _}; use gpui::{fonts::FontId, ElementBox, Entity, ModelContext, ModelHandle}; @@ -327,6 +329,21 @@ impl DisplaySnapshot { self.blocks_snapshot.blocks_in_range(rows) } + pub fn excerpt_headers_in_range<'a>( + &'a self, + rows: Range, + ) -> impl 'a + Iterator, RenderHeaderFn)> { + let start_row = DisplayPoint::new(rows.start, 0).to_point(self).row; + let end_row = DisplayPoint::new(rows.end, 0).to_point(self).row; + self.buffer_snapshot + .excerpt_headers_in_range(start_row..end_row) + .map(move |(rows, render)| { + let start_row = Point::new(rows.start, 0).to_display_point(self).row(); + let end_row = Point::new(rows.end, 0).to_display_point(self).row(); + (start_row..end_row, render) + }) + } + pub fn intersects_fold(&self, offset: T) -> bool { self.folds_snapshot.intersects_fold(offset) } diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 5a578115d5328e550c6388f1a884c0b74d43f2f9..a82b02435d461ad01ce5dd3f7e2cb11b903461c9 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -590,11 +590,6 @@ impl Editor { scroll_position.y() - self.scroll_top_anchor.to_display_point(&map).row() as f32, ); - debug_assert_eq!( - compute_scroll_position(&map, self.scroll_position, &self.scroll_top_anchor), - scroll_position - ); - cx.notify(); } diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 3572d839b6ea10e01aa49f8d0984b2ce99a5db6c..768b591003de4edaed161998461b9dd9e6c16077 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -631,34 +631,55 @@ impl EditorElement { line_layouts: &[text_layout::Line], cx: &mut LayoutContext, ) -> Vec<(u32, ElementBox)> { - snapshot - .blocks_in_range(rows.clone()) - .map(|(start_row, block)| { - let anchor_row = block - .position() - .to_point(&snapshot.buffer_snapshot) - .to_display_point(snapshot) - .row(); - - let anchor_x = if rows.contains(&anchor_row) { - line_layouts[(anchor_row - rows.start) as usize] - .x_for_index(block.column() as usize) - } else { - layout_line(anchor_row, snapshot, style, cx.text_layout_cache) - .x_for_index(block.column() as usize) - }; - - let mut element = block.render(&BlockContext { cx, anchor_x }); - element.layout( - SizeConstraint { - min: Vector2F::zero(), - max: vec2f(text_width, block.height() as f32 * line_height), - }, - cx, - ); - (start_row, element) - }) - .collect() + let mut blocks = Vec::new(); + + blocks.extend( + snapshot + .blocks_in_range(rows.clone()) + .map(|(start_row, block)| { + let anchor_row = block + .position() + .to_point(&snapshot.buffer_snapshot) + .to_display_point(snapshot) + .row(); + + let anchor_x = if rows.contains(&anchor_row) { + line_layouts[(anchor_row - rows.start) as usize] + .x_for_index(block.column() as usize) + } else { + layout_line(anchor_row, snapshot, style, cx.text_layout_cache) + .x_for_index(block.column() as usize) + }; + + let mut element = block.render(&BlockContext { cx, anchor_x }); + element.layout( + SizeConstraint { + min: Vector2F::zero(), + max: vec2f(text_width, block.height() as f32 * line_height), + }, + cx, + ); + (start_row, element) + }), + ); + + blocks.extend( + snapshot + .excerpt_headers_in_range(rows.clone()) + .map(|(rows, render)| { + let mut element = render(cx); + element.layout( + SizeConstraint { + min: Vector2F::zero(), + max: vec2f(text_width, rows.len() as f32 * line_height), + }, + cx, + ); + (rows.start, element) + }), + ); + + blocks } } From 6c5b27af1d197dd4e32afe8da9010be3879749ec Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Tue, 14 Dec 2021 18:26:42 -0700 Subject: [PATCH 088/196] Group diagnostics by primary Render primary message above the excerpt and supporting messages as block decorations with a `Below` disposition. This is still super rough. Co-Authored-By: Max Brunsfeld --- crates/chat_panel/src/chat_panel.rs | 6 +- crates/diagnostics/src/diagnostics.rs | 104 ++++++++++++-------- crates/editor/src/display_map.rs | 9 +- crates/editor/src/display_map/block_map.rs | 21 ++-- crates/editor/src/editor.rs | 85 +++++++++------- crates/editor/src/element.rs | 3 +- crates/editor/src/multi_buffer.rs | 2 + crates/editor/src/multi_buffer/anchor.rs | 7 ++ crates/file_finder/src/file_finder.rs | 5 +- crates/go_to_line/src/go_to_line.rs | 6 +- crates/language/src/buffer.rs | 9 +- crates/language/src/diagnostic_set.rs | 40 ++++++++ crates/theme_selector/src/theme_selector.rs | 6 +- 13 files changed, 205 insertions(+), 98 deletions(-) diff --git a/crates/chat_panel/src/chat_panel.rs b/crates/chat_panel/src/chat_panel.rs index 44c4bd6295adb4e43025ee1e2e00854761ec97b0..7aca6daa956bc709f206e39e822696405345859a 100644 --- a/crates/chat_panel/src/chat_panel.rs +++ b/crates/chat_panel/src/chat_panel.rs @@ -13,7 +13,7 @@ use gpui::{ ViewContext, ViewHandle, }; use postage::{prelude::Stream, watch}; -use std::sync::Arc; +use std::{rc::Rc, sync::Arc}; use time::{OffsetDateTime, UtcOffset}; use util::{ResultExt, TryFutureExt}; use workspace::Settings; @@ -56,14 +56,14 @@ impl ChatPanel { 4, { let settings = settings.clone(); - move |_| { + Rc::new(move |_| { let settings = settings.borrow(); EditorSettings { tab_size: settings.tab_size, style: settings.theme.chat_panel.input_editor.as_editor(), soft_wrap: editor::SoftWrap::EditorWidth, } - } + }) }, cx, ) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 59f3acd3683eefe1706149f37f40043952adef8b..4067bb5894537a69ad6dd7f5eaa2fafb7114efd1 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -1,7 +1,10 @@ -use std::sync::Arc; +use std::{cmp, sync::Arc}; -use collections::HashMap; -use editor::{diagnostic_style, Editor, ExcerptProperties, MultiBuffer}; +use editor::{ + diagnostic_block_renderer, diagnostic_style, + display_map::{BlockDisposition, BlockProperties}, + Anchor, Editor, ExcerptProperties, MultiBuffer, +}; use gpui::{ action, elements::*, keymap::Binding, AppContext, Entity, ModelHandle, MutableAppContext, RenderContext, View, ViewContext, ViewHandle, @@ -68,13 +71,9 @@ impl workspace::Item for ProjectDiagnostics { ) -> Self::View { let project = handle.read(cx).project.clone(); let excerpts = cx.add_model(|cx| MultiBuffer::new(project.read(cx).replica_id(cx))); - let editor = cx.add_view(|cx| { - Editor::for_buffer( - excerpts.clone(), - editor::settings_builder(excerpts.downgrade(), settings.clone()), - cx, - ) - }); + let build_settings = editor::settings_builder(excerpts.downgrade(), settings.clone()); + let editor = + cx.add_view(|cx| Editor::for_buffer(excerpts.clone(), build_settings.clone(), cx)); let project_paths = project .read(cx) @@ -91,58 +90,85 @@ impl workspace::Item for ProjectDiagnostics { .await?; let snapshot = buffer.read_with(&cx, |b, _| b.snapshot()); - let mut grouped_diagnostics = HashMap::default(); - for entry in snapshot.all_diagnostics() { - let mut group = grouped_diagnostics - .entry(entry.diagnostic.group_id) - .or_insert((Point::zero(), Vec::new())); - if entry.diagnostic.is_primary { - group.0 = entry.range.start; - } - group.1.push(entry); - } - let mut sorted_diagnostic_groups = - grouped_diagnostics.into_values().collect::>(); - sorted_diagnostic_groups.sort_by_key(|group| group.0); - - for entry in snapshot.all_diagnostics::() { - this.update(&mut cx, |this, cx| { - this.excerpts.update(cx, |excerpts, cx| { - excerpts.push_excerpt( + this.update(&mut cx, |this, cx| { + let mut blocks = Vec::new(); + this.excerpts.update(cx, |excerpts, excerpts_cx| { + for group in snapshot.diagnostic_groups::() { + let excerpt_start = cmp::min( + group.primary.range.start.row, + group + .supporting + .first() + .map_or(u32::MAX, |entry| entry.range.start.row), + ); + let excerpt_end = cmp::max( + group.primary.range.end.row, + group + .supporting + .last() + .map_or(0, |entry| entry.range.end.row), + ); + + let primary_diagnostic = group.primary.diagnostic; + let excerpt_id = excerpts.push_excerpt( ExcerptProperties { buffer: &buffer, - range: entry.range, - header_height: entry - .diagnostic + range: Point::new(excerpt_start, 0) + ..Point::new( + excerpt_end, + snapshot.line_len(excerpt_end), + ), + header_height: primary_diagnostic .message .matches('\n') .count() as u8 + 1, render_header: Some(Arc::new({ - let message = entry.diagnostic.message.clone(); let settings = settings.clone(); move |_| { let editor_style = &settings.borrow().theme.editor; let mut text_style = editor_style.text.clone(); text_style.color = diagnostic_style( - entry.diagnostic.severity, + primary_diagnostic.severity, true, &editor_style, ) .text; - Text::new(message.clone(), text_style).boxed() + Text::new( + primary_diagnostic.message.clone(), + text_style, + ) + .boxed() } })), }, - cx, + excerpts_cx, ); - cx.notify(); - }); - }) - } + + for entry in group.supporting { + let buffer_anchor = snapshot.anchor_before(entry.range.start); + blocks.push(BlockProperties { + position: Anchor::new(excerpt_id.clone(), buffer_anchor), + height: entry.diagnostic.message.matches('\n').count() + as u8 + + 1, + render: diagnostic_block_renderer( + entry.diagnostic, + true, + build_settings.clone(), + ), + disposition: BlockDisposition::Below, + }); + } + } + }); + this.editor.update(cx, |editor, cx| { + editor.insert_blocks(blocks, cx); + }); + }) } Result::Ok::<_, anyhow::Error>(()) } diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 838b136f75f1920b4c6ec6cfdd558fe5f65457bf..2c55f0f988868dfff04856b15938182eeb0ea780 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -8,7 +8,7 @@ use crate::{ }; use block_map::{BlockMap, BlockPoint}; use fold_map::{FoldMap, ToFoldPoint as _}; -use gpui::{fonts::FontId, ElementBox, Entity, ModelContext, ModelHandle}; +use gpui::{fonts::FontId, Entity, ModelContext, ModelHandle}; use language::{Point, Subscription as BufferSubscription}; use std::{ collections::{HashMap, HashSet}, @@ -21,7 +21,7 @@ use wrap_map::WrapMap; pub use block_map::{ AlignedBlock, BlockBufferRows as DisplayBufferRows, BlockChunks as DisplayChunks, BlockContext, - BlockDisposition, BlockId, BlockProperties, + BlockDisposition, BlockId, BlockProperties, RenderBlock, }; pub trait ToDisplayPoint { @@ -146,10 +146,7 @@ impl DisplayMap { block_map.insert(blocks) } - pub fn replace_blocks(&mut self, styles: HashMap) - where - F: 'static + Fn(&BlockContext) -> ElementBox, - { + pub fn replace_blocks(&mut self, styles: HashMap) { self.block_map.replace(styles); } diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index cd25e5889924733f06cc667e7613f190006c0fa9..44d6d95e0fda3a914324eeb5b1edf7a84a9209ac 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -45,11 +45,13 @@ struct BlockRow(u32); #[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)] struct WrapRow(u32); +pub type RenderBlock = Arc ElementBox>; + pub struct Block { id: BlockId, position: Anchor, height: u8, - render: Mutex ElementBox>>, + render: Mutex, disposition: BlockDisposition, } @@ -306,13 +308,10 @@ impl BlockMap { *transforms = new_transforms; } - pub fn replace(&mut self, mut element_builders: HashMap) - where - F: 'static + Fn(&BlockContext) -> ElementBox, - { + pub fn replace(&mut self, mut renderers: HashMap) { for block in &self.blocks { - if let Some(build_element) = element_builders.remove(&block.id) { - *block.render.lock() = Arc::new(build_element); + if let Some(render) = renderers.remove(&block.id) { + *block.render.lock() = render; } } } @@ -832,6 +831,14 @@ impl Deref for AlignedBlock { } } +impl<'a> Deref for BlockContext<'a> { + type Target = AppContext; + + fn deref(&self) -> &Self::Target { + &self.cx + } +} + impl Debug for Block { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("Block") diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index a82b02435d461ad01ce5dd3f7e2cb11b903461c9..70c81d84a3411ab6b24f0c9d11e3ce7947f3d9da 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -27,16 +27,13 @@ use language::{ BracketPair, Buffer, Diagnostic, DiagnosticSeverity, Language, Point, Selection, SelectionGoal, TransactionId, }; -use multi_buffer::{ - Anchor, AnchorRangeExt, MultiBufferChunks, MultiBufferSnapshot, ToOffset, ToPoint, -}; -pub use multi_buffer::{ExcerptProperties, MultiBuffer}; +pub use multi_buffer::{Anchor, ExcerptProperties, MultiBuffer}; +use multi_buffer::{AnchorRangeExt, MultiBufferChunks, MultiBufferSnapshot, ToOffset, ToPoint}; use postage::watch; use serde::{Deserialize, Serialize}; use smallvec::SmallVec; use smol::Timer; use std::{ - cell::RefCell, cmp, collections::HashMap, iter::{self, FromIterator}, @@ -359,6 +356,8 @@ pub enum SoftWrap { Column(u32), } +type BuildSettings = Rc EditorSettings>; + pub struct Editor { handle: WeakViewHandle, buffer: ModelHandle, @@ -377,7 +376,7 @@ pub struct Editor { scroll_position: Vector2F, scroll_top_anchor: Anchor, autoscroll_request: Option, - build_settings: Rc EditorSettings>>, + build_settings: BuildSettings, focused: bool, show_local_cursors: bool, blink_epoch: usize, @@ -433,10 +432,7 @@ struct ClipboardSelection { } impl Editor { - pub fn single_line( - build_settings: impl 'static + Fn(&AppContext) -> EditorSettings, - cx: &mut ViewContext, - ) -> Self { + pub fn single_line(build_settings: BuildSettings, cx: &mut ViewContext) -> Self { let buffer = cx.add_model(|cx| Buffer::new(0, String::new(), cx)); let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); let mut view = Self::for_buffer(buffer, build_settings, cx); @@ -446,7 +442,7 @@ impl Editor { pub fn auto_height( max_lines: usize, - build_settings: impl 'static + Fn(&AppContext) -> EditorSettings, + build_settings: BuildSettings, cx: &mut ViewContext, ) -> Self { let buffer = cx.add_model(|cx| Buffer::new(0, String::new(), cx)); @@ -458,10 +454,10 @@ impl Editor { pub fn for_buffer( buffer: ModelHandle, - build_settings: impl 'static + Fn(&AppContext) -> EditorSettings, + build_settings: BuildSettings, cx: &mut ViewContext, ) -> Self { - Self::new(buffer, Rc::new(RefCell::new(build_settings)), cx) + Self::new(buffer, build_settings, cx) } pub fn clone(&self, cx: &mut ViewContext) -> Self { @@ -473,10 +469,10 @@ impl Editor { pub fn new( buffer: ModelHandle, - build_settings: Rc EditorSettings>>, + build_settings: BuildSettings, cx: &mut ViewContext, ) -> Self { - let settings = build_settings.borrow_mut()(cx); + let settings = build_settings(cx); let display_map = cx.add_model(|cx| { DisplayMap::new( buffer.clone(), @@ -1440,7 +1436,7 @@ impl Editor { pub fn tab(&mut self, _: &Tab, cx: &mut ViewContext) { self.start_transaction(cx); - let tab_size = self.build_settings.borrow()(cx).tab_size; + let tab_size = (self.build_settings)(cx).tab_size; let mut selections = self.local_selections::(cx); let mut last_indent = None; self.buffer.update(cx, |buffer, cx| { @@ -1512,7 +1508,7 @@ impl Editor { pub fn outdent(&mut self, _: &Outdent, cx: &mut ViewContext) { self.start_transaction(cx); - let tab_size = self.build_settings.borrow()(cx).tab_size; + let tab_size = (self.build_settings)(cx).tab_size; let selections = self.local_selections::(cx); let mut deletion_ranges = Vec::new(); let mut last_outdent = None; @@ -2900,13 +2896,14 @@ impl Editor { active_diagnostics.is_valid = is_valid; let mut new_styles = HashMap::new(); for (block_id, diagnostic) in &active_diagnostics.blocks { - let build_settings = self.build_settings.clone(); - let diagnostic = diagnostic.clone(); - new_styles.insert(*block_id, move |cx: &BlockContext| { - let diagnostic = diagnostic.clone(); - let settings = build_settings.borrow()(cx.cx); - render_diagnostic(diagnostic, &settings.style, is_valid, cx.anchor_x) - }); + new_styles.insert( + *block_id, + diagnostic_block_renderer( + diagnostic.clone(), + is_valid, + self.build_settings.clone(), + ), + ); } self.display_map .update(cx, |display_map, _| display_map.replace_blocks(new_styles)); @@ -2950,11 +2947,7 @@ impl Editor { BlockProperties { position: entry.range.start, height: message_height, - render: Arc::new(move |cx| { - let settings = build_settings.borrow()(cx.cx); - let diagnostic = diagnostic.clone(); - render_diagnostic(diagnostic, &settings.style, true, cx.anchor_x) - }), + render: diagnostic_block_renderer(diagnostic, true, build_settings), disposition: BlockDisposition::Below, } }), @@ -3431,6 +3424,18 @@ impl Editor { } } + pub fn insert_blocks

( + &mut self, + blocks: impl IntoIterator>, + cx: &mut ViewContext, + ) -> Vec + where + P: ToOffset + Clone, + { + self.display_map + .update(cx, |display_map, cx| display_map.insert_blocks(blocks, cx)) + } + pub fn longest_row(&self, cx: &mut MutableAppContext) -> u32 { self.display_map .update(cx, |map, cx| map.snapshot(cx)) @@ -3645,7 +3650,7 @@ impl Entity for Editor { impl View for Editor { fn render(&mut self, cx: &mut RenderContext) -> ElementBox { - let settings = self.build_settings.borrow_mut()(cx); + let settings = (self.build_settings)(cx); self.display_map.update(cx, |map, cx| { map.set_font( settings.style.text.font_id, @@ -3757,6 +3762,18 @@ impl SelectionExt for Selection { } } +pub fn diagnostic_block_renderer( + diagnostic: Diagnostic, + is_valid: bool, + build_settings: BuildSettings, +) -> RenderBlock { + Arc::new(move |cx: &BlockContext| { + let diagnostic = diagnostic.clone(); + let settings = build_settings(cx); + render_diagnostic(diagnostic, &settings.style, is_valid, cx.anchor_x) + }) +} + fn render_diagnostic( diagnostic: Diagnostic, style: &EditorStyle, @@ -3792,8 +3809,8 @@ pub fn diagnostic_style( pub fn settings_builder( buffer: WeakModelHandle, settings: watch::Receiver, -) -> impl Fn(&AppContext) -> EditorSettings { - move |cx| { +) -> BuildSettings { + Rc::new(move |cx| { let settings = settings.borrow(); let font_cache = cx.font_cache(); let font_family_id = settings.buffer_font_family; @@ -3828,7 +3845,7 @@ pub fn settings_builder( soft_wrap, style: theme, } - } + }) } #[cfg(test)] @@ -6086,7 +6103,7 @@ mod tests { settings: EditorSettings, cx: &mut ViewContext, ) -> Editor { - Editor::for_buffer(buffer, move |_| settings.clone(), cx) + Editor::for_buffer(buffer, Rc::new(move |_| settings.clone()), cx) } } diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 768b591003de4edaed161998461b9dd9e6c16077..f0f07426fd78f858a843f50e63a3bd25cd47bd07 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -1179,6 +1179,7 @@ fn scale_horizontal_mouse_autoscroll_delta(delta: f32) -> f32 { mod tests { use super::*; use crate::{Editor, EditorSettings, MultiBuffer}; + use std::rc::Rc; use util::test::sample_text; #[gpui::test] @@ -1190,7 +1191,7 @@ mod tests { buffer, { let settings = settings.clone(); - move |_| settings.clone() + Rc::new(move |_| settings.clone()) }, cx, ) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index dace217839c254b8dbaa8b6ad95da5a7cd7d5a25..96e65ca484f7d63019d63dc409ad50b4adb50bcb 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -559,6 +559,8 @@ impl MultiBuffer { self.subscriptions.publish_mut([edit]); + cx.notify(); + id } diff --git a/crates/editor/src/multi_buffer/anchor.rs b/crates/editor/src/multi_buffer/anchor.rs index 2cc4817a92b9f8d22de3e2e3640ea14eec669061..8fea4799e826afbacbd6f8ec4b31ce749da5e3d8 100644 --- a/crates/editor/src/multi_buffer/anchor.rs +++ b/crates/editor/src/multi_buffer/anchor.rs @@ -14,6 +14,13 @@ pub struct Anchor { } impl Anchor { + pub fn new(excerpt_id: ExcerptId, text_anchor: text::Anchor) -> Self { + Self { + excerpt_id, + text_anchor, + } + } + pub fn min() -> Self { Self { excerpt_id: ExcerptId::min(), diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index 8fef0b6bdf7e29692a08a233b883350e63275b20..fd1a60041faaa510de1c88f1ec3d95ad2d4f0379 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -16,6 +16,7 @@ use project::{Project, ProjectPath}; use std::{ cmp, path::Path, + rc::Rc, sync::{ atomic::{self, AtomicBool}, Arc, @@ -270,14 +271,14 @@ impl FileFinder { Editor::single_line( { let settings = settings.clone(); - move |_| { + Rc::new(move |_| { let settings = settings.borrow(); EditorSettings { style: settings.theme.selector.input_editor.as_editor(), tab_size: settings.tab_size, soft_wrap: editor::SoftWrap::None, } - } + }) }, cx, ) diff --git a/crates/go_to_line/src/go_to_line.rs b/crates/go_to_line/src/go_to_line.rs index 85c5a6439b350807023e17d9b6e025aa54a2440f..a037b78d924804377bb8052389adb696d11db61d 100644 --- a/crates/go_to_line/src/go_to_line.rs +++ b/crates/go_to_line/src/go_to_line.rs @@ -1,3 +1,5 @@ +use std::rc::Rc; + use editor::{display_map::ToDisplayPoint, Autoscroll, Editor, EditorSettings}; use gpui::{ action, elements::*, geometry::vector::Vector2F, keymap::Binding, Axis, Entity, @@ -49,14 +51,14 @@ impl GoToLine { Editor::single_line( { let settings = settings.clone(); - move |_| { + Rc::new(move |_| { let settings = settings.borrow(); EditorSettings { tab_size: settings.tab_size, style: settings.theme.selector.input_editor.as_editor(), soft_wrap: editor::SoftWrap::None, } - } + }) }, cx, ) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index d1683241b3ed4458b4d2ff6db7dc071864bc8f29..f2964374b643c7dc323f20876534a56232727a54 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -1,4 +1,4 @@ -use crate::diagnostic_set::DiagnosticEntry; +use crate::diagnostic_set::{DiagnosticEntry, DiagnosticGroup}; pub use crate::{ diagnostic_set::DiagnosticSet, highlight_map::{HighlightId, HighlightMap}, @@ -1728,6 +1728,13 @@ impl BufferSnapshot { self.diagnostics.range(search_range, self, true) } + pub fn diagnostic_groups(&self) -> Vec> + where + O: FromAnchor + Ord + Copy, + { + self.diagnostics.groups(self) + } + pub fn diagnostic_group<'a, O>( &'a self, group_id: usize, diff --git a/crates/language/src/diagnostic_set.rs b/crates/language/src/diagnostic_set.rs index caef7569c53cf1315530c5534747cfaf8b329548..58ef94a0d552a588a05b4892b89b2504f78fbdc8 100644 --- a/crates/language/src/diagnostic_set.rs +++ b/crates/language/src/diagnostic_set.rs @@ -1,4 +1,5 @@ use crate::Diagnostic; +use collections::HashMap; use std::{ cmp::{Ordering, Reverse}, iter, @@ -18,6 +19,11 @@ pub struct DiagnosticEntry { pub diagnostic: Diagnostic, } +pub struct DiagnosticGroup { + pub primary: DiagnosticEntry, + pub supporting: Vec>, +} + #[derive(Clone, Debug)] pub struct Summary { start: Anchor, @@ -98,6 +104,40 @@ impl DiagnosticSet { }) } + pub fn groups(&self, buffer: &text::BufferSnapshot) -> Vec> + where + O: FromAnchor + Ord + Copy, + { + let mut groups = + HashMap::>, Vec>)>::default(); + + for entry in self.diagnostics.iter() { + let entry = entry.resolve(buffer); + let (ref mut primary, ref mut supporting) = groups + .entry(entry.diagnostic.group_id) + .or_insert((None, Vec::new())); + if entry.diagnostic.is_primary { + *primary = Some(entry); + } else { + supporting.push(entry); + } + } + + let mut groups = groups + .into_values() + .map(|(primary, mut supporting)| { + supporting.sort_unstable_by_key(|entry| entry.range.start); + DiagnosticGroup { + primary: primary.unwrap(), + supporting, + } + }) + .collect::>(); + groups.sort_unstable_by_key(|group| group.primary.range.start); + + groups + } + pub fn group<'a, O: FromAnchor>( &'a self, group_id: usize, diff --git a/crates/theme_selector/src/theme_selector.rs b/crates/theme_selector/src/theme_selector.rs index 545b512a8bfd4ec5a6ba9d3c83e0113e4e9441bd..b611330afd93d2896c4b2a65b67c3ba86cb595d6 100644 --- a/crates/theme_selector/src/theme_selector.rs +++ b/crates/theme_selector/src/theme_selector.rs @@ -9,7 +9,7 @@ use gpui::{ }; use parking_lot::Mutex; use postage::watch; -use std::{cmp, sync::Arc}; +use std::{cmp, rc::Rc, sync::Arc}; use theme::ThemeRegistry; use workspace::{Settings, Workspace}; @@ -64,14 +64,14 @@ impl ThemeSelector { Editor::single_line( { let settings = settings.clone(); - move |_| { + Rc::new(move |_| { let settings = settings.borrow(); EditorSettings { tab_size: settings.tab_size, style: settings.theme.selector.input_editor.as_editor(), soft_wrap: editor::SoftWrap::None, } - } + }) }, cx, ) From 7bcce23dc9f69dba4e9fbaebfa21bd3fadfbfbf9 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Wed, 15 Dec 2021 08:48:50 +0100 Subject: [PATCH 089/196] Fix compile error in server integration tests --- crates/server/src/rpc.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index c46e631c8b37ed33b2d5ee46ccedea42ccc0db7e..4ca7b34717cea15f8ce55283a41528c5d55bd0c4 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -932,6 +932,7 @@ mod tests { use std::{ ops::Deref, path::Path, + rc::Rc, sync::{ atomic::{AtomicBool, Ordering::SeqCst}, Arc, @@ -1046,7 +1047,7 @@ mod tests { .unwrap(); let editor_b = cx_b.add_view(window_b, |cx| { - Editor::for_buffer(buffer_b, |cx| EditorSettings::test(cx), cx) + Editor::for_buffer(buffer_b, Rc::new(|cx| EditorSettings::test(cx)), cx) }); // TODO // // Create a selection set as client B and see that selection set as client A. From 9cbb680fb2c0221fe5ebb868a954dde0a5b3deb0 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Wed, 15 Dec 2021 08:58:47 +0100 Subject: [PATCH 090/196] Fix panic on creation of a left-biased anchor at the end of MultiBuffer --- crates/editor/src/multi_buffer.rs | 23 ++++++++++++++++------- 1 file changed, 16 insertions(+), 7 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 96e65ca484f7d63019d63dc409ad50b4adb50bcb..96002cb6df22c5c530c6b6a111a537c84d3d6360 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -1193,7 +1193,7 @@ impl MultiBufferSnapshot { self.anchor_at(position, Bias::Right) } - pub fn anchor_at(&self, position: T, bias: Bias) -> Anchor { + pub fn anchor_at(&self, position: T, mut bias: Bias) -> Anchor { let offset = position.to_offset(self); let mut cursor = self.excerpts.cursor::<(usize, Option<&ExcerptId>)>(); cursor.seek(&offset, Bias::Right, &()); @@ -1202,8 +1202,13 @@ impl MultiBufferSnapshot { } if let Some(excerpt) = cursor.item() { let start_after_header = cursor.start().0 + excerpt.header_height as usize; + let mut overshoot = offset.saturating_sub(start_after_header); + if excerpt.has_trailing_newline && offset == cursor.end(&()).0 { + overshoot -= 1; + bias = Bias::Right; + } + let buffer_start = excerpt.range.start.to_offset(&excerpt.buffer); - let overshoot = offset.saturating_sub(start_after_header); Anchor { excerpt_id: excerpt.id.clone(), text_anchor: excerpt.buffer.anchor_at(buffer_start + overshoot, bias), @@ -1910,12 +1915,16 @@ mod tests { assert_eq!(old_snapshot.text(), "\nabcd\n\nefghi\n"); assert_eq!(new_snapshot.text(), "\nWabcdX\n\nYefghiZ\n"); - assert_eq!(old_snapshot.anchor_before(0).to_offset(&new_snapshot), 0); - assert_eq!(old_snapshot.anchor_after(0).to_offset(&new_snapshot), 1); - assert_eq!(old_snapshot.anchor_before(1).to_offset(&new_snapshot), 0); - assert_eq!(old_snapshot.anchor_after(1).to_offset(&new_snapshot), 1); + assert_eq!(old_snapshot.anchor_before(0).to_offset(&new_snapshot), 1); + assert_eq!(old_snapshot.anchor_after(0).to_offset(&new_snapshot), 2); + assert_eq!(old_snapshot.anchor_before(1).to_offset(&new_snapshot), 1); + assert_eq!(old_snapshot.anchor_after(1).to_offset(&new_snapshot), 2); + assert_eq!(old_snapshot.anchor_before(2).to_offset(&new_snapshot), 3); + assert_eq!(old_snapshot.anchor_after(2).to_offset(&new_snapshot), 3); assert_eq!(old_snapshot.anchor_before(7).to_offset(&new_snapshot), 9); assert_eq!(old_snapshot.anchor_after(7).to_offset(&new_snapshot), 10); + assert_eq!(old_snapshot.anchor_before(13).to_offset(&new_snapshot), 16); + assert_eq!(old_snapshot.anchor_after(13).to_offset(&new_snapshot), 17); } #[gpui::test(iterations = 100)] @@ -1934,7 +1943,7 @@ mod tests { match rng.gen_range(0..100) { 0..=19 if !buffers.is_empty() => { let buffer = buffers.choose(&mut rng).unwrap(); - buffer.update(cx, |buf, cx| buf.randomly_edit(&mut rng, 1, cx)); + buffer.update(cx, |buf, cx| buf.randomly_edit(&mut rng, 5, cx)); } _ => { let buffer_handle = if buffers.is_empty() || rng.gen_bool(0.4) { From e23965e7c95fcb3a312ed0881523bb958c0fb425 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Wed, 15 Dec 2021 10:00:50 +0100 Subject: [PATCH 091/196] Implement `MultiBuffer::reversed_chars_at` --- crates/editor/src/multi_buffer.rs | 55 ++++++++++++++++++++++++++++--- crates/text/src/text.rs | 5 +++ 2 files changed, 56 insertions(+), 4 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 96002cb6df22c5c530c6b6a111a537c84d3d6360..d28beda8d7de25e9f5e8f278803d3e5ed03f2f58 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -14,6 +14,7 @@ use std::{ cmp, io, iter::{self, FromIterator, Peekable}, ops::{Range, Sub}, + str, sync::Arc, time::{Duration, Instant, SystemTime}, }; @@ -754,9 +755,47 @@ impl MultiBufferSnapshot { &'a self, position: T, ) -> impl Iterator + 'a { - // TODO - let offset = position.to_offset(self); - self.as_singleton().unwrap().reversed_chars_at(offset) + let mut offset = position.to_offset(self); + let mut cursor = self.excerpts.cursor::(); + cursor.seek(&offset, Bias::Left, &()); + let mut excerpt_chunks = cursor.item().map(|excerpt| { + let start_after_header = cursor.start() + excerpt.header_height as usize; + let mut end_before_footer = cursor.start() + excerpt.text_summary.bytes; + if excerpt.has_trailing_newline { + end_before_footer -= 1; + } + + let start = excerpt.range.start.to_offset(&excerpt.buffer); + let end = + start + (cmp::min(offset, end_before_footer).saturating_sub(start_after_header)); + excerpt.buffer.reversed_chunks_in_range(start..end) + }); + iter::from_fn(move || { + if offset == *cursor.start() { + cursor.prev(&()); + let excerpt = cursor.item()?; + excerpt_chunks = Some( + excerpt + .buffer + .reversed_chunks_in_range(excerpt.range.clone()), + ); + } + + let excerpt = cursor.item().unwrap(); + if offset <= cursor.start() + excerpt.header_height as usize { + let header_height = offset - cursor.start(); + offset -= header_height; + Some(unsafe { str::from_utf8_unchecked(&NEWLINES[..header_height]) }) + } else if offset == cursor.end(&()) && excerpt.has_trailing_newline { + offset -= 1; + Some("\n") + } else { + let chunk = excerpt_chunks.as_mut().unwrap().next().unwrap(); + offset -= chunk.len(); + Some(chunk) + } + }) + .flat_map(|c| c.chars().rev()) } pub fn chars_at<'a, T: ToOffset>(&'a self, position: T) -> impl Iterator + 'a { @@ -1593,7 +1632,7 @@ impl<'a> Iterator for MultiBufferChunks<'a> { if self.header_height > 0 { let chunk = Chunk { text: unsafe { - std::str::from_utf8_unchecked(&NEWLINES[..self.header_height as usize]) + str::from_utf8_unchecked(&NEWLINES[..self.header_height as usize]) }, ..Default::default() }; @@ -2152,6 +2191,14 @@ mod tests { start_ix..end_ix ); } + + for _ in 0..10 { + let end_ix = snapshot.clip_offset(rng.gen_range(0..=snapshot.len()), Bias::Right); + assert_eq!( + expected_text[..end_ix].chars().rev().collect::(), + snapshot.reversed_chars_at(end_ix).collect::() + ); + } } let snapshot = list.read(cx).snapshot(cx); diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 8114835b8219d83397d846802258f6302a73b5ec..055e80b29a78fafa4eb67c726dc985d1cb97bcc2 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -1332,6 +1332,11 @@ impl BufferSnapshot { self.visible_text.reversed_chars_at(offset) } + pub fn reversed_chunks_in_range(&self, range: Range) -> rope::Chunks { + let range = range.start.to_offset(self)..range.end.to_offset(self); + self.visible_text.reversed_chunks_in_range(range) + } + pub fn bytes_in_range<'a, T: ToOffset>(&'a self, range: Range) -> rope::Bytes<'a> { let start = range.start.to_offset(self); let end = range.end.to_offset(self); From 95137ecb2a39496ef6a88cf726b2277cff8f40e6 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Wed, 15 Dec 2021 13:20:11 +0100 Subject: [PATCH 092/196] WIP --- crates/editor/src/multi_buffer.rs | 132 +++++++++++++++++++++++++++--- 1 file changed, 120 insertions(+), 12 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index d28beda8d7de25e9f5e8f278803d3e5ed03f2f58..f2d846c38a1a48b2438d686ce6bd55b278f11f10 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -11,8 +11,8 @@ use language::{ }; use std::{ cell::{Ref, RefCell}, - cmp, io, - iter::{self, FromIterator, Peekable}, + cmp, fmt, io, + iter::{self, FromIterator}, ops::{Range, Sub}, str, sync::Arc, @@ -56,11 +56,11 @@ struct Transaction { last_edit_at: Instant, } -pub trait ToOffset: 'static + std::fmt::Debug { +pub trait ToOffset: 'static + fmt::Debug { fn to_offset(&self, snapshot: &MultiBufferSnapshot) -> usize; } -pub trait ToPoint: 'static + std::fmt::Debug { +pub trait ToPoint: 'static + fmt::Debug { fn to_point(&self, snapshot: &MultiBufferSnapshot) -> Point; } @@ -121,7 +121,10 @@ pub struct MultiBufferChunks<'a> { } pub struct MultiBufferBytes<'a> { - chunks: Peekable>, + range: Range, + excerpts: Cursor<'a, Excerpt, usize>, + excerpt_chunks: Option>, + excerpt_chunk: &'a [u8], } impl MultiBuffer { @@ -920,9 +923,17 @@ impl MultiBufferSnapshot { } pub fn bytes_in_range<'a, T: ToOffset>(&'a self, range: Range) -> MultiBufferBytes<'a> { - MultiBufferBytes { - chunks: self.chunks(range, None).peekable(), - } + let range = range.start.to_offset(self)..range.end.to_offset(self); + let mut excerpts = self.excerpts.cursor::(); + excerpts.seek(&range.start, Bias::Right, &()); + let mut bytes = MultiBufferBytes { + range, + excerpts, + excerpt_chunks: None, + excerpt_chunk: &[], + }; + bytes.reset_excerpt_bytes(); + bytes } pub fn chunks<'a, T: ToOffset>( @@ -1518,6 +1529,19 @@ impl Excerpt { } } +impl fmt::Debug for Excerpt { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("Excerpt") + .field("id", &self.id) + .field("buffer_id", &self.buffer_id) + .field("range", &self.range) + .field("text_summary", &self.text_summary) + .field("header_height", &self.header_height) + .field("has_trailing_newline", &self.has_trailing_newline) + .finish() + } +} + impl sum_tree::Item for Excerpt { type Summary = ExcerptSummary; @@ -1682,17 +1706,85 @@ impl<'a> Iterator for MultiBufferChunks<'a> { } } +impl<'a> MultiBufferBytes<'a> { + fn peek(&mut self) -> Option<&'a [u8]> { + if self.range.is_empty() { + return None; + } + + let excerpt = self.excerpts.item()?; + let header_end = self.excerpts.start() + excerpt.header_height as usize; + let mut footer_start = self.excerpts.start() + excerpt.text_summary.bytes; + if excerpt.has_trailing_newline { + footer_start -= 1; + } + + dbg!(self.range.start, header_end, footer_start); + if self.range.start < header_end { + let header_height = cmp::min(header_end - self.range.start, self.range.len()); + Some(&NEWLINES[..header_height]) + } else if self.range.start == footer_start { + Some(&NEWLINES[..1]) + } else { + Some(self.excerpt_chunk) + } + } + + fn consume(&mut self, len: usize) { + self.range.start += len; + if self.range.is_empty() { + return; + } + + if let Some(excerpt) = self.excerpts.item() { + let header_end = self.excerpts.start() + excerpt.header_height as usize; + + if self.range.start == self.excerpts.end(&()) { + self.excerpts.next(&()); + self.reset_excerpt_bytes(); + } else if self.range.start > header_end { + self.excerpt_chunks.as_mut().unwrap().next(); + } + } + } + + fn reset_excerpt_bytes(&mut self) { + self.excerpt_chunks = self.excerpts.item().map(|excerpt| { + let start_after_header = self.excerpts.start() + excerpt.header_height as usize; + let mut end_before_footer = self.excerpts.start() + excerpt.text_summary.bytes; + if excerpt.has_trailing_newline { + end_before_footer -= 1; + } + + let buffer_start = excerpt.range.start.to_offset(&excerpt.buffer); + let start = buffer_start + self.range.start.saturating_sub(start_after_header); + let end = buffer_start + + cmp::min(self.range.end, end_before_footer).saturating_sub(start_after_header); + excerpt.buffer.bytes_in_range(start..end) + }); + } +} + impl<'a> Iterator for MultiBufferBytes<'a> { type Item = &'a [u8]; fn next(&mut self) -> Option { - self.chunks.next().map(|chunk| chunk.text.as_bytes()) + let result = self.peek()?; + self.consume(result.len()); + Some(result) } } impl<'a> io::Read for MultiBufferBytes<'a> { - fn read(&mut self, _: &mut [u8]) -> io::Result { - todo!() + fn read(&mut self, buf: &mut [u8]) -> io::Result { + if let Some(chunk) = self.peek() { + let len = cmp::min(buf.len(), chunk.len()); + buf[..len].copy_from_slice(&chunk[..len]); + self.consume(len); + Ok(len) + } else { + Ok(0) + } } } @@ -2195,8 +2287,24 @@ mod tests { for _ in 0..10 { let end_ix = snapshot.clip_offset(rng.gen_range(0..=snapshot.len()), Bias::Right); assert_eq!( + snapshot.reversed_chars_at(end_ix).collect::(), expected_text[..end_ix].chars().rev().collect::(), - snapshot.reversed_chars_at(end_ix).collect::() + ); + } + + for _ in 0..10 { + let end_ix = rng.gen_range(0..=snapshot.len()); + let start_ix = rng.gen_range(0..=end_ix); + dbg!(&expected_text); + assert_eq!( + snapshot + .bytes_in_range(start_ix..end_ix) + .flatten() + .copied() + .collect::>(), + expected_text.as_bytes()[start_ix..end_ix].to_vec(), + "bytes_in_range({:?})", + start_ix..end_ix, ); } } From fbba417f090795df0932c24c99db5d74e2adfb4b Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Wed, 15 Dec 2021 06:49:10 -0700 Subject: [PATCH 093/196] Implement MultiBuffer::bytes_in_range Co-Authored-By: Antonio Scandurra --- crates/editor/src/multi_buffer.rs | 174 ++++++++++++++++++------------ 1 file changed, 106 insertions(+), 68 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index f2d846c38a1a48b2438d686ce6bd55b278f11f10..c9412920b5821ad7c64d4b2053918745d8494db5 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -123,8 +123,14 @@ pub struct MultiBufferChunks<'a> { pub struct MultiBufferBytes<'a> { range: Range, excerpts: Cursor<'a, Excerpt, usize>, - excerpt_chunks: Option>, - excerpt_chunk: &'a [u8], + excerpt_bytes: Option>, + chunk: &'a [u8], +} + +struct ExcerptBytes<'a> { + header_height: usize, + content_bytes: language::rope::Bytes<'a>, + footer_height: usize, } impl MultiBuffer { @@ -926,14 +932,25 @@ impl MultiBufferSnapshot { let range = range.start.to_offset(self)..range.end.to_offset(self); let mut excerpts = self.excerpts.cursor::(); excerpts.seek(&range.start, Bias::Right, &()); - let mut bytes = MultiBufferBytes { + + let mut chunk = &[][..]; + let excerpt_bytes = if let Some(excerpt) = excerpts.item() { + let mut excerpt_bytes = excerpt.bytes_in_range( + range.start - excerpts.start() + ..cmp::min(range.end - excerpts.start(), excerpt.text_summary.bytes), + ); + chunk = excerpt_bytes.next().unwrap_or(&[][..]); + Some(excerpt_bytes) + } else { + None + }; + + MultiBufferBytes { range, excerpts, - excerpt_chunks: None, - excerpt_chunk: &[], - }; - bytes.reset_excerpt_bytes(); - bytes + excerpt_bytes, + chunk, + } } pub fn chunks<'a, T: ToOffset>( @@ -1527,6 +1544,34 @@ impl Excerpt { longest_row_chars: 0, } } + + fn bytes_in_range(&self, range: Range) -> ExcerptBytes { + let content_start = self.range.start.to_offset(&self.buffer); + let bytes_start = content_start + range.start.saturating_sub(self.header_height as usize); + let mut bytes_end = content_start + + cmp::min(range.end, self.text_summary.bytes) + .saturating_sub(self.header_height as usize); + + let header_height = cmp::min( + (self.header_height as usize).saturating_sub(range.start), + range.len(), + ); + let mut footer_height = 0; + if self.has_trailing_newline && range.end == self.text_summary.bytes { + bytes_end -= 1; + if !range.is_empty() { + footer_height = 1; + } + } + + let content_bytes = self.buffer.bytes_in_range(bytes_start..bytes_end); + + ExcerptBytes { + header_height, + content_bytes, + footer_height, + } + } } impl fmt::Debug for Excerpt { @@ -1707,84 +1752,78 @@ impl<'a> Iterator for MultiBufferChunks<'a> { } impl<'a> MultiBufferBytes<'a> { - fn peek(&mut self) -> Option<&'a [u8]> { - if self.range.is_empty() { - return None; - } - - let excerpt = self.excerpts.item()?; - let header_end = self.excerpts.start() + excerpt.header_height as usize; - let mut footer_start = self.excerpts.start() + excerpt.text_summary.bytes; - if excerpt.has_trailing_newline { - footer_start -= 1; - } - - dbg!(self.range.start, header_end, footer_start); - if self.range.start < header_end { - let header_height = cmp::min(header_end - self.range.start, self.range.len()); - Some(&NEWLINES[..header_height]) - } else if self.range.start == footer_start { - Some(&NEWLINES[..1]) - } else { - Some(self.excerpt_chunk) - } - } - fn consume(&mut self, len: usize) { - self.range.start += len; - if self.range.is_empty() { - return; - } + assert!(len > 0); - if let Some(excerpt) = self.excerpts.item() { - let header_end = self.excerpts.start() + excerpt.header_height as usize; + self.range.start += len; + self.chunk = &self.chunk[len..]; - if self.range.start == self.excerpts.end(&()) { + if !self.range.is_empty() && self.chunk.is_empty() { + if let Some(chunk) = self.excerpt_bytes.as_mut().and_then(|bytes| bytes.next()) { + self.chunk = chunk; + } else { self.excerpts.next(&()); - self.reset_excerpt_bytes(); - } else if self.range.start > header_end { - self.excerpt_chunks.as_mut().unwrap().next(); + if let Some(excerpt) = self.excerpts.item() { + let mut excerpt_bytes = excerpt.bytes_in_range( + 0..cmp::min( + self.range.end - self.excerpts.start(), + excerpt.text_summary.bytes, + ), + ); + self.chunk = excerpt_bytes.next().unwrap(); + self.excerpt_bytes = Some(excerpt_bytes); + } } } } - - fn reset_excerpt_bytes(&mut self) { - self.excerpt_chunks = self.excerpts.item().map(|excerpt| { - let start_after_header = self.excerpts.start() + excerpt.header_height as usize; - let mut end_before_footer = self.excerpts.start() + excerpt.text_summary.bytes; - if excerpt.has_trailing_newline { - end_before_footer -= 1; - } - - let buffer_start = excerpt.range.start.to_offset(&excerpt.buffer); - let start = buffer_start + self.range.start.saturating_sub(start_after_header); - let end = buffer_start - + cmp::min(self.range.end, end_before_footer).saturating_sub(start_after_header); - excerpt.buffer.bytes_in_range(start..end) - }); - } } impl<'a> Iterator for MultiBufferBytes<'a> { type Item = &'a [u8]; fn next(&mut self) -> Option { - let result = self.peek()?; - self.consume(result.len()); - Some(result) + let chunk = self.chunk; + if chunk.is_empty() { + None + } else { + self.consume(chunk.len()); + Some(chunk) + } } } impl<'a> io::Read for MultiBufferBytes<'a> { fn read(&mut self, buf: &mut [u8]) -> io::Result { - if let Some(chunk) = self.peek() { - let len = cmp::min(buf.len(), chunk.len()); - buf[..len].copy_from_slice(&chunk[..len]); - self.consume(len); - Ok(len) - } else { - Ok(0) + let len = cmp::min(buf.len(), self.chunk.len()); + buf[..len].copy_from_slice(&self.chunk[..len]); + self.consume(len); + Ok(len) + } +} + +impl<'a> Iterator for ExcerptBytes<'a> { + type Item = &'a [u8]; + + fn next(&mut self) -> Option { + if self.header_height > 0 { + let result = &NEWLINES[..self.header_height]; + self.header_height = 0; + return Some(result); } + + if let Some(chunk) = self.content_bytes.next() { + if !chunk.is_empty() { + return Some(chunk); + } + } + + if self.footer_height > 0 { + let result = &NEWLINES[..self.footer_height]; + self.footer_height = 0; + return Some(result); + } + + None } } @@ -2295,7 +2334,6 @@ mod tests { for _ in 0..10 { let end_ix = rng.gen_range(0..=snapshot.len()); let start_ix = rng.gen_range(0..=end_ix); - dbg!(&expected_text); assert_eq!( snapshot .bytes_in_range(start_ix..end_ix) From 437145afbe00177d10ee90c2ba224aa9e1e1ba85 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Wed, 15 Dec 2021 06:52:37 -0700 Subject: [PATCH 094/196] Remove assertion and don't consume 0 bytes Co-Authored-By: Antonio Scandurra --- crates/editor/src/multi_buffer.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index c9412920b5821ad7c64d4b2053918745d8494db5..52785a12027a4f57840e5310c6bfd651f69a1fa0 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -1753,8 +1753,6 @@ impl<'a> Iterator for MultiBufferChunks<'a> { impl<'a> MultiBufferBytes<'a> { fn consume(&mut self, len: usize) { - assert!(len > 0); - self.range.start += len; self.chunk = &self.chunk[len..]; @@ -1796,7 +1794,9 @@ impl<'a> io::Read for MultiBufferBytes<'a> { fn read(&mut self, buf: &mut [u8]) -> io::Result { let len = cmp::min(buf.len(), self.chunk.len()); buf[..len].copy_from_slice(&self.chunk[..len]); - self.consume(len); + if len > 0 { + self.consume(len); + } Ok(len) } } From 59121a238adca185fddf10d2b6ad29d6bd6cb544 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Wed, 15 Dec 2021 15:07:42 +0100 Subject: [PATCH 095/196] Forward notifications from `Buffer` in `MultiBuffer` Co-Authored-By: Nathan Sobo --- crates/editor/src/multi_buffer.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 52785a12027a4f57840e5310c6bfd651f69a1fa0..1dffed7739238aecf3a781c29ccfa055d77149c6 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -527,6 +527,7 @@ impl MultiBuffer { self.sync(cx); let buffer = props.buffer.clone(); + cx.observe(&buffer, |_, _, cx| cx.notify()).detach(); cx.subscribe(&buffer, Self::on_buffer_event).detach(); let buffer_snapshot = buffer.read(cx).snapshot(); From ae0fa75abe96ea1b4c3aeb3f4010c796fb51aacd Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Wed, 15 Dec 2021 16:05:26 +0100 Subject: [PATCH 096/196] Start testing the integration of display layers with `MultiBuffer`s Co-Authored-By: Nathan Sobo --- crates/editor/src/display_map/fold_map.rs | 6 +- crates/editor/src/multi_buffer.rs | 118 +++++++++++++++++----- 2 files changed, 98 insertions(+), 26 deletions(-) diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index 662c381f617979f06ea52808f94526faf5a9cfeb..e53b1b4218744e06d0f4935512d08b59b4e39eca 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -1251,7 +1251,11 @@ mod tests { let len = rng.gen_range(0..10); let text = RandomCharIter::new(&mut rng).take(len).collect::(); - let buffer = MultiBuffer::build_simple(&text, cx); + let buffer = if rng.gen() { + MultiBuffer::build_simple(&text, cx) + } else { + MultiBuffer::build_random(1, &mut rng, cx) + }; let mut buffer_snapshot = buffer.read(cx).snapshot(cx); let mut map = FoldMap::new(buffer_snapshot.clone()).0; diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 1dffed7739238aecf3a781c29ccfa055d77149c6..e12fcf49ca1dfe2221fb6e725e37f4357e3adffc 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -166,11 +166,65 @@ impl MultiBuffer { this } + #[cfg(any(test, feature = "test-support"))] pub fn build_simple(text: &str, cx: &mut MutableAppContext) -> ModelHandle { let buffer = cx.add_model(|cx| Buffer::new(0, text, cx)); cx.add_model(|cx| Self::singleton(buffer, cx)) } + #[cfg(any(test, feature = "test-support"))] + pub fn build_random( + excerpts: usize, + mut rng: &mut impl rand::Rng, + cx: &mut MutableAppContext, + ) -> ModelHandle { + use rand::prelude::*; + use text::RandomCharIter; + + cx.add_model(|cx| { + let mut multibuffer = MultiBuffer::new(0); + let mut buffers = Vec::new(); + for _ in 0..excerpts { + let buffer_handle = if rng.gen() || buffers.is_empty() { + let text = RandomCharIter::new(&mut rng).take(10).collect::(); + buffers.push(cx.add_model(|cx| Buffer::new(0, text, cx))); + let buffer = buffers.last().unwrap(); + log::info!( + "Creating new buffer {} with text: {:?}", + buffer.id(), + buffer.read(cx).text() + ); + buffers.last().unwrap() + } else { + buffers.choose(rng).unwrap() + }; + + let buffer = buffer_handle.read(cx); + let end_ix = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Bias::Right); + let start_ix = buffer.clip_offset(rng.gen_range(0..=end_ix), Bias::Left); + let header_height = rng.gen_range(0..=5); + log::info!( + "Inserting excerpt from buffer {} with header height {} and range {:?}: {:?}", + buffer_handle.id(), + header_height, + start_ix..end_ix, + &buffer.text()[start_ix..end_ix] + ); + + multibuffer.push_excerpt( + ExcerptProperties { + buffer: buffer_handle, + range: start_ix..end_ix, + header_height, + render_header: None, + }, + cx, + ); + } + multibuffer + }) + } + pub fn replica_id(&self) -> ReplicaId { self.replica_id } @@ -705,16 +759,31 @@ impl MultiBuffer { #[cfg(any(test, feature = "test-support"))] impl MultiBuffer { - pub fn randomly_edit( + pub fn randomly_edit( &mut self, - rng: &mut R, + rng: &mut impl rand::Rng, count: usize, cx: &mut ModelContext, ) { - self.as_singleton() - .unwrap() - .update(cx, |buffer, cx| buffer.randomly_edit(rng, count, cx)); - self.sync(cx); + use text::RandomCharIter; + + let snapshot = self.read(cx); + let mut old_ranges: Vec> = Vec::new(); + for _ in 0..count { + let last_end = old_ranges.last().map_or(0, |last_range| last_range.end + 1); + if last_end > snapshot.len() { + break; + } + let end_ix = snapshot.clip_offset(rng.gen_range(0..=last_end), Bias::Right); + let start_ix = snapshot.clip_offset(rng.gen_range(0..=end_ix), Bias::Left); + old_ranges.push(start_ix..end_ix); + } + let new_text_len = rng.gen_range(0..10); + let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect(); + log::info!("mutating multi-buffer at {:?}: {:?}", old_ranges, new_text); + drop(snapshot); + + self.edit(old_ranges.iter().cloned(), new_text.as_str(), cx); } } @@ -858,20 +927,20 @@ impl MultiBufferSnapshot { let mut cursor = self.excerpts.cursor::(); cursor.seek(&offset, Bias::Right, &()); if let Some(excerpt) = cursor.item() { - let start_after_header = *cursor.start() + excerpt.header_height as usize; - if offset < start_after_header { - *cursor.start() + let header_end = *cursor.start() + excerpt.header_height as usize; + if offset < header_end { + header_end } else { let excerpt_start = excerpt.range.start.to_offset(&excerpt.buffer); let buffer_offset = excerpt .buffer - .clip_offset(excerpt_start + (offset - start_after_header), bias); + .clip_offset(excerpt_start + (offset - header_end), bias); let offset_in_excerpt = if buffer_offset > excerpt_start { buffer_offset - excerpt_start } else { 0 }; - start_after_header + offset_in_excerpt + header_end + offset_in_excerpt } } else { self.excerpts.summary().text.bytes @@ -882,20 +951,20 @@ impl MultiBufferSnapshot { let mut cursor = self.excerpts.cursor::(); cursor.seek(&point, Bias::Right, &()); if let Some(excerpt) = cursor.item() { - let start_after_header = *cursor.start() + Point::new(excerpt.header_height as u32, 0); - if point < start_after_header { - *cursor.start() + let header_end = *cursor.start() + Point::new(excerpt.header_height as u32, 0); + if point < header_end { + header_end } else { let excerpt_start = excerpt.range.start.to_point(&excerpt.buffer); let buffer_point = excerpt .buffer - .clip_point(excerpt_start + (point - start_after_header), bias); + .clip_point(excerpt_start + (point - header_end), bias); let point_in_excerpt = if buffer_point > excerpt_start { buffer_point - excerpt_start } else { Point::zero() }; - start_after_header + point_in_excerpt + header_end + point_in_excerpt } } else { self.excerpts.summary().text.lines @@ -906,23 +975,22 @@ impl MultiBufferSnapshot { let mut cursor = self.excerpts.cursor::(); cursor.seek(&point, Bias::Right, &()); if let Some(excerpt) = cursor.item() { - let start_after_header = - *cursor.start() + PointUtf16::new(excerpt.header_height as u32, 0); - if point < start_after_header { - *cursor.start() + let header_end = *cursor.start() + PointUtf16::new(excerpt.header_height as u32, 0); + if point < header_end { + header_end } else { let excerpt_start = excerpt .buffer .offset_to_point_utf16(excerpt.range.start.to_offset(&excerpt.buffer)); let buffer_point = excerpt .buffer - .clip_point_utf16(excerpt_start + (point - start_after_header), bias); + .clip_point_utf16(excerpt_start + (point - header_end), bias); let point_in_excerpt = if buffer_point > excerpt_start { buffer_point - excerpt_start } else { PointUtf16::new(0, 0) }; - start_after_header + point_in_excerpt + header_end + point_in_excerpt } } else { self.excerpts.summary().text.lines_utf16 @@ -979,7 +1047,7 @@ impl MultiBufferSnapshot { let overshoot = offset - start_offset; let header_height = excerpt.header_height as usize; if overshoot < header_height { - *start_point + *start_point + Point::new(overshoot as u32, 0) } else { let excerpt_start_offset = excerpt.range.start.to_offset(&excerpt.buffer); let excerpt_start_point = excerpt.range.start.to_point(&excerpt.buffer); @@ -1003,7 +1071,7 @@ impl MultiBufferSnapshot { let overshoot = point - start_point; let header_height = Point::new(excerpt.header_height as u32, 0); if overshoot < header_height { - *start_offset + start_offset + overshoot.row as usize } else { let excerpt_start_offset = excerpt.range.start.to_offset(&excerpt.buffer); let excerpt_start_point = excerpt.range.start.to_point(&excerpt.buffer); @@ -1026,7 +1094,7 @@ impl MultiBufferSnapshot { let overshoot = point - start_point; let header_height = PointUtf16::new(excerpt.header_height as u32, 0); if overshoot < header_height { - *start_offset + start_offset + overshoot.row as usize } else { let excerpt_start_offset = excerpt.range.start.to_offset(&excerpt.buffer); let excerpt_start_point = excerpt From 7bbaa1d9303c1c792573063d7ab1142aa18417cc Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Wed, 15 Dec 2021 16:39:09 +0100 Subject: [PATCH 097/196] Don't insert a newline after the last excerpt of a `MultiBuffer` Co-Authored-By: Nathan Sobo --- crates/editor/src/display_map/fold_map.rs | 2 +- crates/editor/src/multi_buffer.rs | 34 +++++++++++++++-------- 2 files changed, 23 insertions(+), 13 deletions(-) diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index e53b1b4218744e06d0f4935512d08b59b4e39eca..6e954e60faa8fd5984a1ef67bed41b3392b33ebb 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -1273,12 +1273,12 @@ mod tests { let subscription = buffer.subscribe(); let edit_count = rng.gen_range(1..=5); buffer.randomly_edit(&mut rng, edit_count, cx); + buffer_snapshot = buffer.snapshot(cx); let edits = subscription.consume().into_inner(); log::info!("editing {:?}", edits); buffer_edits.extend(edits); }), }; - buffer_snapshot = buffer.read(cx).snapshot(cx); let (snapshot, edits) = map.read(buffer_snapshot.clone(), buffer_edits); snapshot_edits.push((snapshot.clone(), edits)); diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index e12fcf49ca1dfe2221fb6e725e37f4357e3adffc..1ed34c4c90a9d6ee210590d6572614e4109ef4fb 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -592,10 +592,18 @@ impl MultiBuffer { let last_diagnostics_update_count = buffer_snapshot.diagnostics_update_count(); let mut snapshot = self.snapshot.borrow_mut(); - let prev_id = snapshot.excerpts.last().map(|e| &e.id); - let id = ExcerptId::between(prev_id.unwrap_or(&ExcerptId::min()), &ExcerptId::max()); - + let mut prev_id = None; let edit_start = snapshot.excerpts.summary().text.bytes; + snapshot.excerpts.update_last( + |excerpt| { + excerpt.has_trailing_newline = true; + excerpt.text_summary += TextSummary::from("\n"); + prev_id = Some(excerpt.id.clone()); + }, + &(), + ); + + let id = ExcerptId::between(&prev_id.unwrap_or(ExcerptId::min()), &ExcerptId::max()); let excerpt = Excerpt::new( id.clone(), buffer.id(), @@ -603,12 +611,8 @@ impl MultiBuffer { range, props.header_height, props.render_header, - !self.singleton, + false, ); - let edit = Edit { - old: edit_start..edit_start, - new: edit_start..edit_start + excerpt.text_summary.bytes, - }; snapshot.excerpts.push(excerpt, &()); self.buffers .entry(props.buffer.id()) @@ -621,8 +625,10 @@ impl MultiBuffer { }) .excerpts .push(id.clone()); - - self.subscriptions.publish_mut([edit]); + self.subscriptions.publish_mut([Edit { + old: edit_start..edit_start, + new: edit_start..snapshot.excerpts.summary().text.bytes, + }]); cx.notify(); @@ -738,7 +744,7 @@ impl MultiBuffer { old_excerpt.range.clone(), old_excerpt.header_height, old_excerpt.render_header.clone(), - !self.singleton, + old_excerpt.has_trailing_newline, ); } else { new_excerpt = old_excerpt.clone(); @@ -2245,6 +2251,10 @@ mod tests { expected_text.extend(buffer.text_for_range(buffer_range.clone())); expected_text.push('\n'); } + // Remove final trailing newline. + if !expected_excerpts.is_empty() { + expected_text.pop(); + } assert_eq!(snapshot.text(), expected_text); @@ -2421,7 +2431,7 @@ mod tests { let edits = subscription.consume().into_inner(); log::info!( - "applying edits since old text: {:?}: {:?}", + "applying subscription edits to old text: {:?}: {:?}", old_snapshot.text(), edits, ); From bcdb4ffd88a5260c1b91497e193aaaa65eb9adaa Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Wed, 15 Dec 2021 16:45:01 +0100 Subject: [PATCH 098/196] Allow edits at the end of `MultiBuffer` Co-Authored-By: Nathan Sobo --- crates/editor/src/display_map/fold_map.rs | 2 +- crates/editor/src/multi_buffer.rs | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index 6e954e60faa8fd5984a1ef67bed41b3392b33ebb..972e152c184bcddb00c5688876859ac4a4f302da 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -1254,7 +1254,7 @@ mod tests { let buffer = if rng.gen() { MultiBuffer::build_simple(&text, cx) } else { - MultiBuffer::build_random(1, &mut rng, cx) + MultiBuffer::build_random(rng.gen_range(1..=5), &mut rng, cx) }; let mut buffer_snapshot = buffer.read(cx).snapshot(cx); let mut map = FoldMap::new(buffer_snapshot.clone()).0; diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 1ed34c4c90a9d6ee210590d6572614e4109ef4fb..bc15424ae905cfc8a2d343d2a3572e798ce8ca74 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -305,6 +305,9 @@ impl MultiBuffer { let start = range.start.to_offset(&snapshot); let end = range.end.to_offset(&snapshot); cursor.seek(&start, Bias::Right, &()); + if cursor.item().is_none() && start == *cursor.start() { + cursor.prev(&()); + } let start_excerpt = cursor.item().expect("start offset out of bounds"); let start_overshoot = (start - cursor.start()).saturating_sub(start_excerpt.header_height as usize); @@ -312,6 +315,9 @@ impl MultiBuffer { start_excerpt.range.start.to_offset(&start_excerpt.buffer) + start_overshoot; cursor.seek(&end, Bias::Right, &()); + if cursor.item().is_none() && end == *cursor.start() { + cursor.prev(&()); + } let end_excerpt = cursor.item().expect("end offset out of bounds"); let end_overshoot = (end - cursor.start()).saturating_sub(end_excerpt.header_height as usize); From 5118f27a9062101a2022333acd2bb2201138853e Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Wed, 15 Dec 2021 17:22:04 +0100 Subject: [PATCH 099/196] Overhaul `MultiBuffer::chunks` Co-Authored-By: Nathan Sobo --- crates/editor/src/display_map/tab_map.rs | 16 +- crates/editor/src/multi_buffer.rs | 210 ++++++++++++----------- 2 files changed, 123 insertions(+), 103 deletions(-) diff --git a/crates/editor/src/display_map/tab_map.rs b/crates/editor/src/display_map/tab_map.rs index 5f62582581f0d303808d44ed13549257ffa4c965..14e54c9523a3eb2b232bbe1688796e54abbee94f 100644 --- a/crates/editor/src/display_map/tab_map.rs +++ b/crates/editor/src/display_map/tab_map.rs @@ -451,11 +451,15 @@ mod tests { } #[gpui::test(iterations = 100)] - fn test_random(cx: &mut gpui::MutableAppContext, mut rng: StdRng) { + fn test_random_tabs(cx: &mut gpui::MutableAppContext, mut rng: StdRng) { let tab_size = rng.gen_range(1..=4); let len = rng.gen_range(0..30); - let text = RandomCharIter::new(&mut rng).take(len).collect::(); - let buffer = MultiBuffer::build_simple(&text, cx); + let buffer = if rng.gen() { + let text = RandomCharIter::new(&mut rng).take(len).collect::(); + MultiBuffer::build_simple(&text, cx) + } else { + MultiBuffer::build_random(rng.gen_range(1..=5), &mut rng, cx) + }; let buffer_snapshot = buffer.read(cx).snapshot(cx); log::info!("Buffer text: {:?}", buffer_snapshot.text()); @@ -488,13 +492,15 @@ mod tests { .chunks_in_range(text.point_to_offset(start.0)..text.point_to_offset(end.0)) .collect::(); let expected_summary = TextSummary::from(expected_text.as_str()); - log::info!("slicing {:?}..{:?} (text: {:?})", start, end, text); assert_eq!( expected_text, tabs_snapshot .chunks(start..end, None) .map(|c| c.text) - .collect::() + .collect::(), + "chunks({:?}..{:?})", + start, + end ); let mut actual_summary = tabs_snapshot.text_summary_for_range(start..end); diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index bc15424ae905cfc8a2d343d2a3572e798ce8ca74..59f6db3ced0b171dedb8567db1ca678319c6e8c3 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -113,10 +113,8 @@ struct ExcerptSummary { pub struct MultiBufferChunks<'a> { range: Range, - cursor: Cursor<'a, Excerpt, usize>, - header_height: u8, - has_trailing_newline: bool, - excerpt_chunks: Option>, + excerpts: Cursor<'a, Excerpt, usize>, + excerpt_chunks: Option>, theme: Option<&'a SyntaxTheme>, } @@ -127,6 +125,12 @@ pub struct MultiBufferBytes<'a> { chunk: &'a [u8], } +struct ExcerptChunks<'a> { + header_height: usize, + content_chunks: BufferChunks<'a>, + footer_height: usize, +} + struct ExcerptBytes<'a> { header_height: usize, content_bytes: language::rope::Bytes<'a>, @@ -1039,16 +1043,15 @@ impl MultiBufferSnapshot { range: Range, theme: Option<&'a SyntaxTheme>, ) -> MultiBufferChunks<'a> { - let mut result = MultiBufferChunks { - range: 0..range.end.to_offset(self), - cursor: self.excerpts.cursor::(), - header_height: 0, + let range = range.start.to_offset(self)..range.end.to_offset(self); + let mut chunks = MultiBufferChunks { + range: range.clone(), + excerpts: self.excerpts.cursor(), excerpt_chunks: None, - has_trailing_newline: false, theme, }; - result.seek(range.start.to_offset(self)); - result + chunks.seek(range.start); + chunks } pub fn offset_to_point(&self, offset: usize) -> Point { @@ -1626,6 +1629,38 @@ impl Excerpt { } } + fn chunks_in_range<'a>( + &'a self, + range: Range, + theme: Option<&'a SyntaxTheme>, + ) -> ExcerptChunks<'a> { + let content_start = self.range.start.to_offset(&self.buffer); + let chunks_start = content_start + range.start.saturating_sub(self.header_height as usize); + let mut chunks_end = content_start + + cmp::min(range.end, self.text_summary.bytes) + .saturating_sub(self.header_height as usize); + + let header_height = cmp::min( + (self.header_height as usize).saturating_sub(range.start), + range.len(), + ); + let mut footer_height = 0; + if self.has_trailing_newline && range.end == self.text_summary.bytes { + chunks_end -= 1; + if !range.is_empty() { + footer_height = 1; + } + } + + let content_chunks = self.buffer.chunks(chunks_start..chunks_end, theme); + + ExcerptChunks { + header_height, + content_chunks, + footer_height, + } + } + fn bytes_in_range(&self, range: Range) -> ExcerptBytes { let content_start = self.range.start.to_offset(&self.buffer); let bytes_start = content_start + range.start.saturating_sub(self.header_height as usize); @@ -1738,38 +1773,18 @@ impl<'a> MultiBufferChunks<'a> { pub fn seek(&mut self, offset: usize) { self.range.start = offset; - self.cursor.seek_forward(&offset, Bias::Right, &()); - self.header_height = 0; - self.excerpt_chunks = None; - if let Some(excerpt) = self.cursor.item() { - let buffer_range = excerpt.range.to_offset(&excerpt.buffer); - self.header_height = excerpt.header_height; - self.has_trailing_newline = excerpt.has_trailing_newline; - - let buffer_start; - let start_overshoot = self.range.start - self.cursor.start(); - if start_overshoot < excerpt.header_height as usize { - self.header_height -= start_overshoot as u8; - buffer_start = buffer_range.start; - } else { - buffer_start = - buffer_range.start + start_overshoot - excerpt.header_height as usize; - self.header_height = 0; - } - - let buffer_end; - let end_overshoot = self.range.end - self.cursor.start(); - if end_overshoot < excerpt.header_height as usize { - self.header_height -= excerpt.header_height - end_overshoot as u8; - buffer_end = buffer_start; - } else { - buffer_end = cmp::min( - buffer_range.end, - buffer_range.start + end_overshoot - excerpt.header_height as usize, - ); - } - - self.excerpt_chunks = Some(excerpt.buffer.chunks(buffer_start..buffer_end, self.theme)); + self.excerpts.seek(&offset, Bias::Right, &()); + if let Some(excerpt) = self.excerpts.item() { + self.excerpt_chunks = Some(excerpt.chunks_in_range( + self.range.start - self.excerpts.start() + ..cmp::min( + self.range.end - self.excerpts.start(), + excerpt.text_summary.bytes, + ), + self.theme, + )); + } else { + self.excerpt_chunks = None; } } } @@ -1778,56 +1793,22 @@ impl<'a> Iterator for MultiBufferChunks<'a> { type Item = Chunk<'a>; fn next(&mut self) -> Option { - loop { - if self.header_height > 0 { - let chunk = Chunk { - text: unsafe { - str::from_utf8_unchecked(&NEWLINES[..self.header_height as usize]) - }, - ..Default::default() - }; - self.range.start += self.header_height as usize; - self.header_height = 0; - return Some(chunk); - } - - if let Some(excerpt_chunks) = self.excerpt_chunks.as_mut() { - if let Some(chunk) = excerpt_chunks.next() { - self.range.start += chunk.text.len(); - return Some(chunk); - } - self.excerpt_chunks.take(); - if self.has_trailing_newline && self.cursor.end(&()) <= self.range.end { - self.range.start += 1; - return Some(Chunk { - text: "\n", - ..Default::default() - }); - } - } - - self.cursor.next(&()); - if *self.cursor.start() >= self.range.end { - return None; - } - - let excerpt = self.cursor.item()?; - let buffer_range = excerpt.range.to_offset(&excerpt.buffer); - - let buffer_end = cmp::min( - buffer_range.end, - buffer_range.start + self.range.end - - excerpt.header_height as usize - - self.cursor.start(), - ); - - self.header_height = excerpt.header_height; - self.has_trailing_newline = excerpt.has_trailing_newline; - self.excerpt_chunks = Some( - excerpt - .buffer - .chunks(buffer_range.start..buffer_end, self.theme), - ); + if self.range.is_empty() { + None + } else if let Some(chunk) = self.excerpt_chunks.as_mut()?.next() { + self.range.start += chunk.text.len(); + Some(chunk) + } else { + self.excerpts.next(&()); + let excerpt = self.excerpts.item()?; + self.excerpt_chunks = Some(excerpt.chunks_in_range( + 0..cmp::min( + self.range.end - self.excerpts.start(), + excerpt.text_summary.bytes, + ), + self.theme, + )); + self.next() } } } @@ -1908,6 +1889,38 @@ impl<'a> Iterator for ExcerptBytes<'a> { } } +impl<'a> Iterator for ExcerptChunks<'a> { + type Item = Chunk<'a>; + + fn next(&mut self) -> Option { + if self.header_height > 0 { + let text = unsafe { str::from_utf8_unchecked(&NEWLINES[..self.header_height]) }; + self.header_height = 0; + return Some(Chunk { + text, + ..Default::default() + }); + } + + if let Some(chunk) = self.content_chunks.next() { + if !chunk.text.is_empty() { + return Some(chunk); + } + } + + if self.footer_height > 0 { + let text = unsafe { str::from_utf8_unchecked(&NEWLINES[..self.footer_height]) }; + self.footer_height = 0; + return Some(Chunk { + text, + ..Default::default() + }); + } + + None + } +} + impl ToOffset for Point { fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize { snapshot.point_to_offset(*self) @@ -1943,7 +1956,7 @@ impl ToPoint for Point { mod tests { use super::*; use gpui::{elements::Empty, Element, MutableAppContext}; - use language::Buffer; + use language::{Buffer, Rope}; use rand::prelude::*; use std::env; use text::{Point, RandomCharIter}; @@ -2386,9 +2399,10 @@ mod tests { ); } + let text_rope = Rope::from(expected_text.as_str()); for _ in 0..10 { - let end_ix = snapshot.clip_offset(rng.gen_range(0..=snapshot.len()), Bias::Right); - let start_ix = snapshot.clip_offset(rng.gen_range(0..=end_ix), Bias::Left); + let end_ix = text_rope.clip_offset(rng.gen_range(0..=text_rope.len()), Bias::Right); + let start_ix = text_rope.clip_offset(rng.gen_range(0..=end_ix), Bias::Left); assert_eq!( snapshot @@ -2409,7 +2423,7 @@ mod tests { } for _ in 0..10 { - let end_ix = snapshot.clip_offset(rng.gen_range(0..=snapshot.len()), Bias::Right); + let end_ix = text_rope.clip_offset(rng.gen_range(0..=text_rope.len()), Bias::Right); assert_eq!( snapshot.reversed_chars_at(end_ix).collect::(), expected_text[..end_ix].chars().rev().collect::(), @@ -2417,7 +2431,7 @@ mod tests { } for _ in 0..10 { - let end_ix = rng.gen_range(0..=snapshot.len()); + let end_ix = rng.gen_range(0..=text_rope.len()); let start_ix = rng.gen_range(0..=end_ix); assert_eq!( snapshot From 4ab307f0a1586a05dcdc86708b821b639318ef90 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Wed, 15 Dec 2021 17:51:11 +0100 Subject: [PATCH 100/196] Re-enable multi-byte random character generation Co-Authored-By: Nathan Sobo --- crates/editor/src/multi_buffer.rs | 191 +++++++++++++++------------- crates/text/src/random_char_iter.rs | 8 +- 2 files changed, 106 insertions(+), 93 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 59f6db3ced0b171dedb8567db1ca678319c6e8c3..6fcc6e4a823a5a82fc9f0a017137cbe06a6b7ead 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -2276,6 +2276,7 @@ mod tests { } assert_eq!(snapshot.text(), expected_text); + log::info!("MultiBuffer text: {:?}", expected_text); let mut excerpt_starts = excerpt_starts.into_iter(); for (buffer, range, _) in &expected_excerpts { @@ -2293,99 +2294,111 @@ mod tests { let mut buffer_point = buffer_start_point; let mut point_utf16 = excerpt_start.lines_utf16; let mut buffer_point_utf16 = buffer_start_point_utf16; - for byte in buffer.bytes_in_range(buffer_range.clone()).flatten() { - let left_offset = snapshot.clip_offset(offset, Bias::Left); - let right_offset = snapshot.clip_offset(offset, Bias::Right); - let buffer_left_offset = buffer.clip_offset(buffer_offset, Bias::Left); - let buffer_right_offset = buffer.clip_offset(buffer_offset, Bias::Right); - assert_eq!( - left_offset, - excerpt_start.bytes + (buffer_left_offset - buffer_range.start), - "clip_offset({:?}, Left). buffer: {:?}, buffer offset: {:?}", - offset, - buffer_id, - buffer_offset, - ); - assert_eq!( - right_offset, - excerpt_start.bytes + (buffer_right_offset - buffer_range.start), - "clip_offset({:?}, Right). buffer: {:?}, buffer offset: {:?}", - offset, - buffer_id, - buffer_offset, - ); + for ch in buffer + .snapshot() + .chunks(buffer_range.clone(), None) + .flat_map(|c| c.text.chars()) + { + for _ in 0..ch.len_utf8() { + let left_offset = snapshot.clip_offset(offset, Bias::Left); + let right_offset = snapshot.clip_offset(offset, Bias::Right); + let buffer_left_offset = buffer.clip_offset(buffer_offset, Bias::Left); + let buffer_right_offset = buffer.clip_offset(buffer_offset, Bias::Right); + assert_eq!( + left_offset, + excerpt_start.bytes + (buffer_left_offset - buffer_range.start), + "clip_offset({:?}, Left). buffer: {:?}, buffer offset: {:?}", + offset, + buffer_id, + buffer_offset, + ); + assert_eq!( + right_offset, + excerpt_start.bytes + (buffer_right_offset - buffer_range.start), + "clip_offset({:?}, Right). buffer: {:?}, buffer offset: {:?}", + offset, + buffer_id, + buffer_offset, + ); - let left_point = snapshot.clip_point(point, Bias::Left); - let right_point = snapshot.clip_point(point, Bias::Right); - let buffer_left_point = buffer.clip_point(buffer_point, Bias::Left); - let buffer_right_point = buffer.clip_point(buffer_point, Bias::Right); - assert_eq!( - left_point, - excerpt_start.lines + (buffer_left_point - buffer_start_point), - "clip_point({:?}, Left). buffer: {:?}, buffer point: {:?}", - point, - buffer_id, - buffer_point, - ); - assert_eq!( - right_point, - excerpt_start.lines + (buffer_right_point - buffer_start_point), - "clip_point({:?}, Right). buffer: {:?}, buffer point: {:?}", - point, - buffer_id, - buffer_point, - ); + let left_point = snapshot.clip_point(point, Bias::Left); + let right_point = snapshot.clip_point(point, Bias::Right); + let buffer_left_point = buffer.clip_point(buffer_point, Bias::Left); + let buffer_right_point = buffer.clip_point(buffer_point, Bias::Right); + assert_eq!( + left_point, + excerpt_start.lines + (buffer_left_point - buffer_start_point), + "clip_point({:?}, Left). buffer: {:?}, buffer point: {:?}", + point, + buffer_id, + buffer_point, + ); + assert_eq!( + right_point, + excerpt_start.lines + (buffer_right_point - buffer_start_point), + "clip_point({:?}, Right). buffer: {:?}, buffer point: {:?}", + point, + buffer_id, + buffer_point, + ); - let left_point_utf16 = snapshot.clip_point_utf16(point_utf16, Bias::Left); - let right_point_utf16 = snapshot.clip_point_utf16(point_utf16, Bias::Right); - let buffer_left_point_utf16 = - buffer.clip_point_utf16(buffer_point_utf16, Bias::Left); - let buffer_right_point_utf16 = - buffer.clip_point_utf16(buffer_point_utf16, Bias::Right); - assert_eq!( - left_point_utf16, - excerpt_start.lines_utf16 - + (buffer_left_point_utf16 - buffer_start_point_utf16), - "clip_point_utf16({:?}, Left). buffer: {:?}, buffer point_utf16: {:?}", - point_utf16, - buffer_id, - buffer_point_utf16, - ); - assert_eq!( - right_point_utf16, - excerpt_start.lines_utf16 - + (buffer_right_point_utf16 - buffer_start_point_utf16), - "clip_point_utf16({:?}, Right). buffer: {:?}, buffer point_utf16: {:?}", - point_utf16, - buffer_id, - buffer_point_utf16, - ); + assert_eq!( + snapshot.point_to_offset(left_point), + left_offset, + "point_to_offset({:?})", + left_point, + ); + assert_eq!( + snapshot.offset_to_point(left_offset), + left_point, + "offset_to_point({:?})", + left_offset, + ); - assert_eq!( - snapshot.point_to_offset(left_point), - left_offset, - "point_to_offset({:?})", - left_point, - ); - assert_eq!( - snapshot.offset_to_point(left_offset), - left_point, - "offset_to_point({:?})", - left_offset, - ); + offset += 1; + buffer_offset += 1; + if ch == '\n' { + point += Point::new(1, 0); + buffer_point += Point::new(1, 0); + } else { + point += Point::new(0, 1); + buffer_point += Point::new(0, 1); + } + } - offset += 1; - buffer_offset += 1; - if *byte == b'\n' { - point += Point::new(1, 0); - point_utf16 += PointUtf16::new(1, 0); - buffer_point += Point::new(1, 0); - buffer_point_utf16 += PointUtf16::new(1, 0); - } else { - point += Point::new(0, 1); - point_utf16 += PointUtf16::new(0, 1); - buffer_point += Point::new(0, 1); - buffer_point_utf16 += PointUtf16::new(0, 1); + for _ in 0..ch.len_utf16() { + let left_point_utf16 = snapshot.clip_point_utf16(point_utf16, Bias::Left); + let right_point_utf16 = snapshot.clip_point_utf16(point_utf16, Bias::Right); + let buffer_left_point_utf16 = + buffer.clip_point_utf16(buffer_point_utf16, Bias::Left); + let buffer_right_point_utf16 = + buffer.clip_point_utf16(buffer_point_utf16, Bias::Right); + assert_eq!( + left_point_utf16, + excerpt_start.lines_utf16 + + (buffer_left_point_utf16 - buffer_start_point_utf16), + "clip_point_utf16({:?}, Left). buffer: {:?}, buffer point_utf16: {:?}", + point_utf16, + buffer_id, + buffer_point_utf16, + ); + assert_eq!( + right_point_utf16, + excerpt_start.lines_utf16 + + (buffer_right_point_utf16 - buffer_start_point_utf16), + "clip_point_utf16({:?}, Right). buffer: {:?}, buffer point_utf16: {:?}", + point_utf16, + buffer_id, + buffer_point_utf16, + ); + + if ch == '\n' { + point_utf16 += PointUtf16::new(1, 0); + buffer_point_utf16 += PointUtf16::new(1, 0); + } else { + point_utf16 += PointUtf16::new(0, 1); + buffer_point_utf16 += PointUtf16::new(0, 1); + } } } } diff --git a/crates/text/src/random_char_iter.rs b/crates/text/src/random_char_iter.rs index 1f415b3f0395f100e90c7651e9f2d997adca38ed..1741df8fb7992d92d1e9d6b7613691716f48ec72 100644 --- a/crates/text/src/random_char_iter.rs +++ b/crates/text/src/random_char_iter.rs @@ -22,13 +22,13 @@ impl Iterator for RandomCharIter { match self.0.gen_range(0..100) { // whitespace - 0..=5 => ['\n'].choose(&mut self.0).copied(), + 0..=19 => [' ', '\n', '\t'].choose(&mut self.0).copied(), // two-byte greek letters - // 20..=32 => char::from_u32(self.0.gen_range(('α' as u32)..('ω' as u32 + 1))), + 20..=32 => char::from_u32(self.0.gen_range(('α' as u32)..('ω' as u32 + 1))), // // three-byte characters - // 33..=45 => ['✋', '✅', '❌', '❎', '⭐'].choose(&mut self.0).copied(), + 33..=45 => ['✋', '✅', '❌', '❎', '⭐'].choose(&mut self.0).copied(), // // four-byte characters - // 46..=58 => ['🍐', '🏀', '🍗', '🎉'].choose(&mut self.0).copied(), + 46..=58 => ['🍐', '🏀', '🍗', '🎉'].choose(&mut self.0).copied(), // ascii letters _ => Some(self.0.gen_range(b'a'..b'z' + 1).into()), } From 1bdaeda43e7dffbcb4a657ea8c6da6f25998f8d8 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Wed, 15 Dec 2021 18:29:01 +0100 Subject: [PATCH 101/196] Remove disk diagnostics that were invalidated by a buffer edit Co-Authored-By: Nathan Sobo Co-Authored-By: Max Brunsfeld --- crates/language/src/buffer.rs | 6 +++++- crates/text/src/rope.rs | 16 ++++++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index f2964374b643c7dc323f20876534a56232727a54..2d9ddd76d8a989539dd2796310bb4b618ae1167c 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -751,7 +751,9 @@ impl Buffer { .peekable(); let mut last_edit_old_end = PointUtf16::zero(); let mut last_edit_new_end = PointUtf16::zero(); - 'outer: for entry in &mut diagnostics { + let mut ix = 0; + 'outer: while ix < diagnostics.len() { + let entry = &mut diagnostics[ix]; let mut start = entry.range.start; let mut end = entry.range.end; if entry @@ -766,6 +768,7 @@ impl Buffer { last_edit_new_end = edit.new.end; edits_since_save.next(); } else if edit.old.start <= end && edit.old.end >= start { + diagnostics.remove(ix); continue 'outer; } else { break; @@ -786,6 +789,7 @@ impl Buffer { entry.range.start = content.clip_point_utf16(entry.range.start, Bias::Left); } } + ix += 1; } drop(edits_since_save); diff --git a/crates/text/src/rope.rs b/crates/text/src/rope.rs index 8b0965847542f1d30359872728d9e2779f0cb4a0..5b9cef2cc6fa2fbe24b9365b03b294a44ba4b14b 100644 --- a/crates/text/src/rope.rs +++ b/crates/text/src/rope.rs @@ -958,6 +958,22 @@ mod tests { } } + let mut point_utf16 = PointUtf16::zero(); + for unit in expected.encode_utf16() { + let left_point = actual.clip_point_utf16(point_utf16, Bias::Left); + let right_point = actual.clip_point_utf16(point_utf16, Bias::Right); + assert!(right_point >= left_point); + // Ensure translating UTF-16 points to offsets doesn't panic. + actual.point_utf16_to_offset(left_point); + actual.point_utf16_to_offset(right_point); + + if unit == b'\n' as u16 { + point_utf16 += PointUtf16::new(1, 0); + } else { + point_utf16 += PointUtf16::new(0, 1); + } + } + for _ in 0..5 { let end_ix = clip_offset(&expected, rng.gen_range(0..=expected.len()), Right); let start_ix = clip_offset(&expected, rng.gen_range(0..=end_ix), Left); From 80abd840506f6dd091e401cf52a454d878074951 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Wed, 15 Dec 2021 18:38:37 +0100 Subject: [PATCH 102/196] Create MultiBuffers with more than one fragment in more randomized tests Co-Authored-By: Nathan Sobo Co-Authored-By: Max Brunsfeld --- crates/editor/src/display_map.rs | 10 +++++++--- crates/editor/src/display_map/wrap_map.rs | 16 ++++++++++------ 2 files changed, 17 insertions(+), 9 deletions(-) diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 2c55f0f988868dfff04856b15938182eeb0ea780..9e007197288e9e7c0fdd9107f052489f5d7ed0a2 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -505,9 +505,13 @@ mod tests { log::info!("wrap width: {:?}", wrap_width); let buffer = cx.update(|cx| { - let len = rng.gen_range(0..10); - let text = RandomCharIter::new(&mut rng).take(len).collect::(); - MultiBuffer::build_simple(&text, cx) + if rng.gen() { + let len = rng.gen_range(0..10); + let text = RandomCharIter::new(&mut rng).take(len).collect::(); + MultiBuffer::build_simple(&text, cx) + } else { + MultiBuffer::build_random(rng.gen_range(1..=5), &mut rng, cx) + } }); let map = cx.add_model(|cx| { diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index d1efb09ff75a0f1f1bb2ff620c1e6171bb16a12c..b674b467c9f01726db2cd33ac4857f7b8fe79c62 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -1011,11 +1011,15 @@ mod tests { log::info!("Wrap width: {:?}", wrap_width); let buffer = cx.update(|cx| { - let len = rng.gen_range(0..10); - let text = RandomCharIter::new(&mut rng).take(len).collect::(); - MultiBuffer::build_simple(&text, cx) + if rng.gen() { + MultiBuffer::build_random(rng.gen_range(1..=5), &mut rng, cx) + } else { + let len = rng.gen_range(0..10); + let text = RandomCharIter::new(&mut rng).take(len).collect::(); + MultiBuffer::build_simple(&text, cx) + } }); - let buffer_snapshot = buffer.read_with(&cx, |buffer, cx| buffer.snapshot(cx)); + let mut buffer_snapshot = buffer.read_with(&cx, |buffer, cx| buffer.snapshot(cx)); let (mut fold_map, folds_snapshot) = FoldMap::new(buffer_snapshot.clone()); let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), tab_size); log::info!("Unwrapped text (no folds): {:?}", buffer_snapshot.text()); @@ -1080,14 +1084,14 @@ mod tests { let subscription = buffer.subscribe(); let edit_count = rng.gen_range(1..=5); buffer.randomly_edit(&mut rng, edit_count, cx); + buffer_snapshot = buffer.snapshot(cx); buffer_edits.extend(subscription.consume()); }); } } - let buffer_snapshot = buffer.read_with(&cx, |buffer, cx| buffer.snapshot(cx)); log::info!("Unwrapped text (no folds): {:?}", buffer_snapshot.text()); - let (folds_snapshot, fold_edits) = fold_map.read(buffer_snapshot, buffer_edits); + let (folds_snapshot, fold_edits) = fold_map.read(buffer_snapshot.clone(), buffer_edits); log::info!( "Unwrapped text (unexpanded tabs): {:?}", folds_snapshot.text() From cec0c5912c26865eb88008c953ce6e46c9fa00f3 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 15 Dec 2021 10:21:11 -0800 Subject: [PATCH 103/196] Create multiple excerpts in random BlockMap test Co-Authored-By: Antonio Scandurra Co-Authored-By: Nathan Sobo --- crates/editor/src/display_map/block_map.rs | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 44d6d95e0fda3a914324eeb5b1edf7a84a9209ac..be4ac30616d36371f01525d08c55d28949fdf1dc 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -1121,10 +1121,14 @@ mod tests { log::info!("Wrap width: {:?}", wrap_width); - let len = rng.gen_range(0..10); - let text = RandomCharIter::new(&mut rng).take(len).collect::(); - log::info!("initial buffer text: {:?}", text); - let buffer = MultiBuffer::build_simple(&text, cx); + let buffer = if rng.gen() { + let len = rng.gen_range(0..10); + let text = RandomCharIter::new(&mut rng).take(len).collect::(); + log::info!("initial buffer text: {:?}", text); + MultiBuffer::build_simple(&text, cx) + } else { + MultiBuffer::build_random(rng.gen_range(1..=5), &mut rng, cx) + }; let mut buffer_snapshot = buffer.read(cx).snapshot(cx); let (fold_map, folds_snapshot) = FoldMap::new(buffer_snapshot.clone()); @@ -1212,8 +1216,8 @@ mod tests { let edit_count = rng.gen_range(1..=5); let subscription = buffer.subscribe(); buffer.randomly_edit(&mut rng, edit_count, cx); - buffer_edits.extend(subscription.consume()); buffer_snapshot = buffer.snapshot(cx); + buffer_edits.extend(subscription.consume()); log::info!("buffer text: {:?}", buffer_snapshot.text()); }); } From 4bea16eb31532fdec1cf984d6766e85a84357bd5 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 15 Dec 2021 10:52:27 -0800 Subject: [PATCH 104/196] Ensure muiltibuffer anchors are contained within their excerpt ranges Co-Authored-By: Nathan Sobo Co-Authored-By: Antonio Scandurra --- crates/diagnostics/src/diagnostics.rs | 162 +++++++++++++---------- crates/editor/src/multi_buffer.rs | 38 +++++- crates/editor/src/multi_buffer/anchor.rs | 7 - 3 files changed, 127 insertions(+), 80 deletions(-) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 4067bb5894537a69ad6dd7f5eaa2fafb7114efd1..a7f9c0e5c1bf324abdf7dc51e2a6244ed8a36698 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -92,81 +92,99 @@ impl workspace::Item for ProjectDiagnostics { this.update(&mut cx, |this, cx| { let mut blocks = Vec::new(); - this.excerpts.update(cx, |excerpts, excerpts_cx| { - for group in snapshot.diagnostic_groups::() { - let excerpt_start = cmp::min( - group.primary.range.start.row, - group - .supporting - .first() - .map_or(u32::MAX, |entry| entry.range.start.row), - ); - let excerpt_end = cmp::max( - group.primary.range.end.row, - group - .supporting - .last() - .map_or(0, |entry| entry.range.end.row), - ); - - let primary_diagnostic = group.primary.diagnostic; - let excerpt_id = excerpts.push_excerpt( - ExcerptProperties { - buffer: &buffer, - range: Point::new(excerpt_start, 0) - ..Point::new( - excerpt_end, - snapshot.line_len(excerpt_end), + let excerpts_snapshot = + this.excerpts.update(cx, |excerpts, excerpts_cx| { + for group in snapshot.diagnostic_groups::() { + let excerpt_start = cmp::min( + group.primary.range.start.row, + group + .supporting + .first() + .map_or(u32::MAX, |entry| entry.range.start.row), + ); + let excerpt_end = cmp::max( + group.primary.range.end.row, + group + .supporting + .last() + .map_or(0, |entry| entry.range.end.row), + ); + + let primary_diagnostic = group.primary.diagnostic; + let excerpt_id = excerpts.push_excerpt( + ExcerptProperties { + buffer: &buffer, + range: Point::new(excerpt_start, 0) + ..Point::new( + excerpt_end, + snapshot.line_len(excerpt_end), + ), + header_height: primary_diagnostic + .message + .matches('\n') + .count() + as u8 + + 1, + render_header: Some(Arc::new({ + let settings = settings.clone(); + + move |_| { + let editor_style = + &settings.borrow().theme.editor; + let mut text_style = editor_style.text.clone(); + text_style.color = diagnostic_style( + primary_diagnostic.severity, + true, + &editor_style, + ) + .text; + + Text::new( + primary_diagnostic.message.clone(), + text_style, + ) + .boxed() + } + })), + }, + excerpts_cx, + ); + + for entry in group.supporting { + let buffer_anchor = + snapshot.anchor_before(entry.range.start); + blocks.push(BlockProperties { + position: (excerpt_id.clone(), buffer_anchor), + height: entry.diagnostic.message.matches('\n').count() + as u8 + + 1, + render: diagnostic_block_renderer( + entry.diagnostic, + true, + build_settings.clone(), ), - header_height: primary_diagnostic - .message - .matches('\n') - .count() - as u8 - + 1, - render_header: Some(Arc::new({ - let settings = settings.clone(); - - move |_| { - let editor_style = &settings.borrow().theme.editor; - let mut text_style = editor_style.text.clone(); - text_style.color = diagnostic_style( - primary_diagnostic.severity, - true, - &editor_style, - ) - .text; - - Text::new( - primary_diagnostic.message.clone(), - text_style, - ) - .boxed() - } - })), - }, - excerpts_cx, - ); - - for entry in group.supporting { - let buffer_anchor = snapshot.anchor_before(entry.range.start); - blocks.push(BlockProperties { - position: Anchor::new(excerpt_id.clone(), buffer_anchor), - height: entry.diagnostic.message.matches('\n').count() - as u8 - + 1, - render: diagnostic_block_renderer( - entry.diagnostic, - true, - build_settings.clone(), - ), - disposition: BlockDisposition::Below, - }); + disposition: BlockDisposition::Below, + }); + } } - } - }); + + excerpts.snapshot(excerpts_cx) + }); + this.editor.update(cx, |editor, cx| { - editor.insert_blocks(blocks, cx); + editor.insert_blocks( + blocks.into_iter().map(|block| { + let (excerpt_id, text_anchor) = block.position; + BlockProperties { + position: excerpts_snapshot + .anchor_in_excerpt(excerpt_id, text_anchor), + height: block.height, + render: block.render, + disposition: block.disposition, + } + }), + cx, + ); }); }) } diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 6fcc6e4a823a5a82fc9f0a017137cbe06a6b7ead..8221360ff45474d6e6b0af9dbc92422897756c9c 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -1360,9 +1360,11 @@ impl MultiBufferSnapshot { } let buffer_start = excerpt.range.start.to_offset(&excerpt.buffer); + let text_anchor = + excerpt.clip_anchor(excerpt.buffer.anchor_at(buffer_start + overshoot, bias)); Anchor { excerpt_id: excerpt.id.clone(), - text_anchor: excerpt.buffer.anchor_at(buffer_start + overshoot, bias), + text_anchor, } } else if offset == 0 && bias == Bias::Left { Anchor::min() @@ -1371,6 +1373,22 @@ impl MultiBufferSnapshot { } } + pub fn anchor_in_excerpt(&self, excerpt_id: ExcerptId, text_anchor: text::Anchor) -> Anchor { + let mut cursor = self.excerpts.cursor::>(); + cursor.seek(&Some(&excerpt_id), Bias::Left, &()); + if let Some(excerpt) = cursor.item() { + if excerpt.id == excerpt_id { + let text_anchor = excerpt.clip_anchor(text_anchor); + drop(cursor); + return Anchor { + excerpt_id, + text_anchor, + }; + } + } + panic!("excerpt not found"); + } + pub fn parse_count(&self) -> usize { self.parse_count } @@ -1688,6 +1706,24 @@ impl Excerpt { footer_height, } } + + fn clip_anchor(&self, text_anchor: text::Anchor) -> text::Anchor { + if text_anchor + .cmp(&self.range.start, &self.buffer) + .unwrap() + .is_lt() + { + self.range.start.clone() + } else if text_anchor + .cmp(&self.range.end, &self.buffer) + .unwrap() + .is_gt() + { + self.range.end.clone() + } else { + text_anchor + } + } } impl fmt::Debug for Excerpt { diff --git a/crates/editor/src/multi_buffer/anchor.rs b/crates/editor/src/multi_buffer/anchor.rs index 8fea4799e826afbacbd6f8ec4b31ce749da5e3d8..2cc4817a92b9f8d22de3e2e3640ea14eec669061 100644 --- a/crates/editor/src/multi_buffer/anchor.rs +++ b/crates/editor/src/multi_buffer/anchor.rs @@ -14,13 +14,6 @@ pub struct Anchor { } impl Anchor { - pub fn new(excerpt_id: ExcerptId, text_anchor: text::Anchor) -> Self { - Self { - excerpt_id, - text_anchor, - } - } - pub fn min() -> Self { Self { excerpt_id: ExcerptId::min(), From 2930ea8fb0abe1ba875ef4fcc23af4e5632eb7be Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 15 Dec 2021 12:12:39 -0800 Subject: [PATCH 105/196] Fix handling of excerpts surrounded by edits in MultiBuffer::edit --- crates/editor/src/multi_buffer.rs | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 8221360ff45474d6e6b0af9dbc92422897756c9c..b1d72e9f6f81b561e9ab91bb75346f45d4e1f149 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -352,13 +352,10 @@ impl MultiBuffer { if excerpt.id == end_excerpt.id { break; } - - let excerpt_range = start_excerpt.range.end.to_offset(&start_excerpt.buffer) - ..start_excerpt.range.end.to_offset(&start_excerpt.buffer); buffer_edits .entry(excerpt.buffer_id) .or_insert(Vec::new()) - .push((excerpt_range, false)); + .push((excerpt.range.to_offset(&excerpt.buffer), false)); cursor.next(&()); } } @@ -386,7 +383,7 @@ impl MultiBuffer { insertions.push( buffer.anchor_before(range.start)..buffer.anchor_before(range.end), ); - } else { + } else if !range.is_empty() { deletions.push( buffer.anchor_before(range.start)..buffer.anchor_before(range.end), ); From 368b4447ff3caa9b13ea3e15097c4722cb71ac0b Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 15 Dec 2021 15:41:38 -0800 Subject: [PATCH 106/196] Clip buffer points in DisplayMap::{prev,next}_row_boundary --- crates/editor/src/display_map.rs | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 9e007197288e9e7c0fdd9107f052489f5d7ed0a2..c9a139ceda67640fba27915ed8a129e109fa01f6 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -208,11 +208,18 @@ impl DisplaySnapshot { loop { *display_point.column_mut() = 0; let mut point = display_point.to_point(self); + point = self.buffer_snapshot.clip_point(point, Bias::Left); point.column = 0; let next_display_point = self.point_to_display_point(point, Bias::Left); if next_display_point == display_point { return (display_point, point); } + debug_assert!( + next_display_point < display_point, + "{:?} > {:?}", + next_display_point, + display_point + ); display_point = next_display_point; } } @@ -220,12 +227,19 @@ impl DisplaySnapshot { pub fn next_row_boundary(&self, mut display_point: DisplayPoint) -> (DisplayPoint, Point) { loop { *display_point.column_mut() = self.line_len(display_point.row()); - let mut point = display_point.to_point(self); + let mut point = self.display_point_to_point(display_point, Bias::Right); + point = self.buffer_snapshot.clip_point(point, Bias::Right); point.column = self.buffer_snapshot.line_len(point.row); let next_display_point = self.point_to_display_point(point, Bias::Right); if next_display_point == display_point { return (display_point, point); } + debug_assert!( + next_display_point > display_point, + "{:?} < {:?}", + next_display_point, + display_point + ); display_point = next_display_point; } } From f4115ddc3c895c246b83f0dfd342204fbfca3a09 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 15 Dec 2021 15:45:02 -0800 Subject: [PATCH 107/196] :art: point_to_display_point & display_point_to_point --- crates/editor/src/display_map.rs | 22 ++++++++++------------ 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index c9a139ceda67640fba27915ed8a129e109fa01f6..2bea6536a2dc6c2c9cdaf2cacfcf1d9e2bd7e5aa 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -245,21 +245,19 @@ impl DisplaySnapshot { } fn point_to_display_point(&self, point: Point, bias: Bias) -> DisplayPoint { - DisplayPoint( - self.blocks_snapshot.to_block_point( - self.wraps_snapshot.from_tab_point( - self.tabs_snapshot - .to_tab_point(point.to_fold_point(&self.folds_snapshot, bias)), - ), - ), - ) + let fold_point = point.to_fold_point(&self.folds_snapshot, bias); + let tab_point = self.tabs_snapshot.to_tab_point(fold_point); + let wrap_point = self.wraps_snapshot.from_tab_point(tab_point); + let block_point = self.blocks_snapshot.to_block_point(wrap_point); + DisplayPoint(block_point) } fn display_point_to_point(&self, point: DisplayPoint, bias: Bias) -> Point { - let unblocked_point = self.blocks_snapshot.to_wrap_point(point.0); - let unwrapped_point = self.wraps_snapshot.to_tab_point(unblocked_point); - let unexpanded_point = self.tabs_snapshot.to_fold_point(unwrapped_point, bias).0; - unexpanded_point.to_buffer_point(&self.folds_snapshot) + let block_point = point.0; + let wrap_point = self.blocks_snapshot.to_wrap_point(block_point); + let tab_point = self.wraps_snapshot.to_tab_point(wrap_point); + let fold_point = self.tabs_snapshot.to_fold_point(tab_point, bias).0; + fold_point.to_buffer_point(&self.folds_snapshot) } pub fn max_point(&self) -> DisplayPoint { From f8ef605cbdf47a0a98692c40b70bc892820d6607 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 15 Dec 2021 16:50:59 -0800 Subject: [PATCH 108/196] Update all MultiBuffer unit tests, removing expected trailing newline --- crates/editor/src/editor.rs | 8 +++--- crates/editor/src/multi_buffer.rs | 44 +++++++++++++++---------------- 2 files changed, 26 insertions(+), 26 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 70c81d84a3411ab6b24f0c9d11e3ce7947f3d9da..ff1922c8b1c8ddaee268487f6226b4b47423ab70 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -5902,7 +5902,7 @@ mod tests { multibuffer }); - assert_eq!(multibuffer.read(cx).read(cx).text(), "aaaa\nbbbb\n"); + assert_eq!(multibuffer.read(cx).read(cx).text(), "aaaa\nbbbb"); let (_, view) = cx.add_window(Default::default(), |cx| { build_editor(multibuffer, settings, cx) @@ -5918,7 +5918,7 @@ mod tests { .unwrap(); view.handle_input(&Input("X".to_string()), cx); - assert_eq!(view.text(cx), "Xaaaa\nXbbbb\n"); + assert_eq!(view.text(cx), "Xaaaa\nXbbbb"); assert_eq!( view.selected_display_ranges(cx), &[ @@ -5958,7 +5958,7 @@ mod tests { assert_eq!( multibuffer.read(cx).read(cx).text(), - "aaaa\nbbbb\nbbbb\ncccc\n" + "aaaa\nbbbb\nbbbb\ncccc" ); let (_, view) = cx.add_window(Default::default(), |cx| { @@ -5975,7 +5975,7 @@ mod tests { .unwrap(); view.handle_input(&Input("X".to_string()), cx); - assert_eq!(view.text(cx), "aaaa\nbXbbXb\nbXbbXb\ncccc\n"); + assert_eq!(view.text(cx), "aaaa\nbXbbXb\nbXbbXb\ncccc"); assert_eq!( view.selected_display_ranges(cx), &[ diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index b1d72e9f6f81b561e9ab91bb75346f45d4e1f149..04860c59370e80e0a25d2c4431a41f613d4a668d 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -2032,7 +2032,7 @@ mod tests { subscription.consume().into_inner(), [Edit { old: 0..0, - new: 0..13 + new: 0..12 }] ); @@ -2057,8 +2057,8 @@ mod tests { assert_eq!( subscription.consume().into_inner(), [Edit { - old: 13..13, - new: 13..29 + old: 12..12, + new: 12..28 }] ); @@ -2078,7 +2078,7 @@ mod tests { "\n", // "\n", // "\n", // - "jj\n" // + "jj" // ) ); @@ -2138,7 +2138,7 @@ mod tests { "\n", // "\n", // "\n", // - "jj\n" // + "jj" // ) ); @@ -2209,8 +2209,8 @@ mod tests { }); let new_snapshot = multibuffer.read(cx).snapshot(cx); - assert_eq!(old_snapshot.text(), "\nabcd\n\nefghi\n"); - assert_eq!(new_snapshot.text(), "\nWabcdX\n\nYefghiZ\n"); + assert_eq!(old_snapshot.text(), "\nabcd\n\nefghi"); + assert_eq!(new_snapshot.text(), "\nWabcdX\n\nYefghiZ"); assert_eq!(old_snapshot.anchor_before(0).to_offset(&new_snapshot), 1); assert_eq!(old_snapshot.anchor_after(0).to_offset(&new_snapshot), 2); @@ -2220,8 +2220,8 @@ mod tests { assert_eq!(old_snapshot.anchor_after(2).to_offset(&new_snapshot), 3); assert_eq!(old_snapshot.anchor_before(7).to_offset(&new_snapshot), 9); assert_eq!(old_snapshot.anchor_after(7).to_offset(&new_snapshot), 10); - assert_eq!(old_snapshot.anchor_before(13).to_offset(&new_snapshot), 16); - assert_eq!(old_snapshot.anchor_after(13).to_offset(&new_snapshot), 17); + assert_eq!(old_snapshot.anchor_before(12).to_offset(&new_snapshot), 15); + assert_eq!(old_snapshot.anchor_after(12).to_offset(&new_snapshot), 16); } #[gpui::test(iterations = 100)] @@ -2559,46 +2559,46 @@ mod tests { cx, ); multibuffer.end_transaction_at(now, cx); - assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678\n"); + assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678"); now += 2 * group_interval; multibuffer.start_transaction_at(now, cx); multibuffer.edit([2..2], "C", cx); multibuffer.end_transaction_at(now, cx); - assert_eq!(multibuffer.read(cx).text(), "ABC1234\nAB5678\n"); + assert_eq!(multibuffer.read(cx).text(), "ABC1234\nAB5678"); multibuffer.undo(cx); - assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678\n"); + assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678"); multibuffer.undo(cx); - assert_eq!(multibuffer.read(cx).text(), "1234\n5678\n"); + assert_eq!(multibuffer.read(cx).text(), "1234\n5678"); multibuffer.redo(cx); - assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678\n"); + assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678"); multibuffer.redo(cx); - assert_eq!(multibuffer.read(cx).text(), "ABC1234\nAB5678\n"); + assert_eq!(multibuffer.read(cx).text(), "ABC1234\nAB5678"); buffer_1.update(cx, |buffer_1, cx| buffer_1.undo(cx)); - assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678\n"); + assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678"); multibuffer.undo(cx); - assert_eq!(multibuffer.read(cx).text(), "1234\n5678\n"); + assert_eq!(multibuffer.read(cx).text(), "1234\n5678"); multibuffer.redo(cx); - assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678\n"); + assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678"); multibuffer.redo(cx); - assert_eq!(multibuffer.read(cx).text(), "ABC1234\nAB5678\n"); + assert_eq!(multibuffer.read(cx).text(), "ABC1234\nAB5678"); multibuffer.undo(cx); - assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678\n"); + assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678"); buffer_1.update(cx, |buffer_1, cx| buffer_1.redo(cx)); - assert_eq!(multibuffer.read(cx).text(), "ABC1234\nAB5678\n"); + assert_eq!(multibuffer.read(cx).text(), "ABC1234\nAB5678"); multibuffer.undo(cx); - assert_eq!(multibuffer.read(cx).text(), "C1234\n5678\n"); + assert_eq!(multibuffer.read(cx).text(), "C1234\n5678"); }); } } From e8570b5c26685e5dcc5b2aa42d56c26c2db1884e Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 15 Dec 2021 17:04:17 -0800 Subject: [PATCH 109/196] Allow multibuffer to clip to the ends of excerpts, before trailing newlines --- crates/editor/src/multi_buffer.rs | 145 ++++++++++++++++++------------ 1 file changed, 89 insertions(+), 56 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 04860c59370e80e0a25d2c4431a41f613d4a668d..3176bb3ea3c312fb63f776dcfe0e9095908e23a1 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -4,7 +4,7 @@ pub use anchor::{Anchor, AnchorRangeExt}; use anyhow::Result; use clock::ReplicaId; use collections::{HashMap, HashSet}; -use gpui::{AppContext, ElementBox, Entity, ModelContext, ModelHandle, MutableAppContext, Task}; +use gpui::{AppContext, ElementBox, Entity, ModelContext, ModelHandle, Task}; use language::{ Buffer, BufferChunks, BufferSnapshot, Chunk, DiagnosticEntry, Event, File, Language, Selection, ToOffset as _, ToPoint as _, TransactionId, @@ -171,7 +171,7 @@ impl MultiBuffer { } #[cfg(any(test, feature = "test-support"))] - pub fn build_simple(text: &str, cx: &mut MutableAppContext) -> ModelHandle { + pub fn build_simple(text: &str, cx: &mut gpui::MutableAppContext) -> ModelHandle { let buffer = cx.add_model(|cx| Buffer::new(0, text, cx)); cx.add_model(|cx| Self::singleton(buffer, cx)) } @@ -180,7 +180,7 @@ impl MultiBuffer { pub fn build_random( excerpts: usize, mut rng: &mut impl rand::Rng, - cx: &mut MutableAppContext, + cx: &mut gpui::MutableAppContext, ) -> ModelHandle { use rand::prelude::*; use text::RandomCharIter; @@ -604,7 +604,6 @@ impl MultiBuffer { snapshot.excerpts.update_last( |excerpt| { excerpt.has_trailing_newline = true; - excerpt.text_summary += TextSummary::from("\n"); prev_id = Some(excerpt.id.clone()); }, &(), @@ -852,10 +851,7 @@ impl MultiBufferSnapshot { cursor.seek(&offset, Bias::Left, &()); let mut excerpt_chunks = cursor.item().map(|excerpt| { let start_after_header = cursor.start() + excerpt.header_height as usize; - let mut end_before_footer = cursor.start() + excerpt.text_summary.bytes; - if excerpt.has_trailing_newline { - end_before_footer -= 1; - } + let end_before_footer = cursor.start() + excerpt.text_summary.bytes; let start = excerpt.range.start.to_offset(&excerpt.buffer); let end = @@ -942,6 +938,12 @@ impl MultiBufferSnapshot { if let Some(excerpt) = cursor.item() { let header_end = *cursor.start() + excerpt.header_height as usize; if offset < header_end { + if bias == Bias::Left { + cursor.prev(&()); + if let Some(excerpt) = cursor.item() { + return *cursor.start() + excerpt.text_summary.bytes; + } + } header_end } else { let excerpt_start = excerpt.range.start.to_offset(&excerpt.buffer); @@ -966,6 +968,12 @@ impl MultiBufferSnapshot { if let Some(excerpt) = cursor.item() { let header_end = *cursor.start() + Point::new(excerpt.header_height as u32, 0); if point < header_end { + if bias == Bias::Left { + cursor.prev(&()); + if let Some(excerpt) = cursor.item() { + return *cursor.start() + excerpt.text_summary.lines; + } + } header_end } else { let excerpt_start = excerpt.range.start.to_point(&excerpt.buffer); @@ -990,6 +998,12 @@ impl MultiBufferSnapshot { if let Some(excerpt) = cursor.item() { let header_end = *cursor.start() + PointUtf16::new(excerpt.header_height as u32, 0); if point < header_end { + if bias == Bias::Left { + cursor.prev(&()); + if let Some(excerpt) = cursor.item() { + return *cursor.start() + excerpt.text_summary.lines_utf16; + } + } header_end } else { let excerpt_start = excerpt @@ -1017,10 +1031,8 @@ impl MultiBufferSnapshot { let mut chunk = &[][..]; let excerpt_bytes = if let Some(excerpt) = excerpts.item() { - let mut excerpt_bytes = excerpt.bytes_in_range( - range.start - excerpts.start() - ..cmp::min(range.end - excerpts.start(), excerpt.text_summary.bytes), - ); + let mut excerpt_bytes = excerpt + .bytes_in_range(range.start - excerpts.start()..range.end - excerpts.start()); chunk = excerpt_bytes.next().unwrap_or(&[][..]); Some(excerpt_bytes) } else { @@ -1611,15 +1623,6 @@ impl Excerpt { text_summary.bytes += header_height as usize; text_summary.longest_row += header_height as u32; } - if has_trailing_newline { - text_summary.last_line_chars = 0; - text_summary.lines.row += 1; - text_summary.lines.column = 0; - text_summary.lines_utf16.row += 1; - text_summary.lines_utf16.column = 0; - text_summary.bytes += 1; - } - Excerpt { id, buffer_id, @@ -1651,7 +1654,7 @@ impl Excerpt { ) -> ExcerptChunks<'a> { let content_start = self.range.start.to_offset(&self.buffer); let chunks_start = content_start + range.start.saturating_sub(self.header_height as usize); - let mut chunks_end = content_start + let chunks_end = content_start + cmp::min(range.end, self.text_summary.bytes) .saturating_sub(self.header_height as usize); @@ -1659,13 +1662,15 @@ impl Excerpt { (self.header_height as usize).saturating_sub(range.start), range.len(), ); - let mut footer_height = 0; - if self.has_trailing_newline && range.end == self.text_summary.bytes { - chunks_end -= 1; - if !range.is_empty() { - footer_height = 1; - } - } + + let footer_height = if self.has_trailing_newline + && range.start <= self.text_summary.bytes + && range.end > self.text_summary.bytes + { + 1 + } else { + 0 + }; let content_chunks = self.buffer.chunks(chunks_start..chunks_end, theme); @@ -1679,7 +1684,7 @@ impl Excerpt { fn bytes_in_range(&self, range: Range) -> ExcerptBytes { let content_start = self.range.start.to_offset(&self.buffer); let bytes_start = content_start + range.start.saturating_sub(self.header_height as usize); - let mut bytes_end = content_start + let bytes_end = content_start + cmp::min(range.end, self.text_summary.bytes) .saturating_sub(self.header_height as usize); @@ -1687,13 +1692,15 @@ impl Excerpt { (self.header_height as usize).saturating_sub(range.start), range.len(), ); - let mut footer_height = 0; - if self.has_trailing_newline && range.end == self.text_summary.bytes { - bytes_end -= 1; - if !range.is_empty() { - footer_height = 1; - } - } + + let footer_height = if self.has_trailing_newline + && range.start <= self.text_summary.bytes + && range.end > self.text_summary.bytes + { + 1 + } else { + 0 + }; let content_bytes = self.buffer.bytes_in_range(bytes_start..bytes_end); @@ -1740,9 +1747,13 @@ impl sum_tree::Item for Excerpt { type Summary = ExcerptSummary; fn summary(&self) -> Self::Summary { + let mut text = self.text_summary.clone(); + if self.has_trailing_newline { + text += TextSummary::from("\n"); + } ExcerptSummary { excerpt_id: self.id.clone(), - text: self.text_summary.clone(), + text, } } } @@ -1809,11 +1820,7 @@ impl<'a> MultiBufferChunks<'a> { self.excerpts.seek(&offset, Bias::Right, &()); if let Some(excerpt) = self.excerpts.item() { self.excerpt_chunks = Some(excerpt.chunks_in_range( - self.range.start - self.excerpts.start() - ..cmp::min( - self.range.end - self.excerpts.start(), - excerpt.text_summary.bytes, - ), + self.range.start - self.excerpts.start()..self.range.end - self.excerpts.start(), self.theme, )); } else { @@ -1834,13 +1841,9 @@ impl<'a> Iterator for MultiBufferChunks<'a> { } else { self.excerpts.next(&()); let excerpt = self.excerpts.item()?; - self.excerpt_chunks = Some(excerpt.chunks_in_range( - 0..cmp::min( - self.range.end - self.excerpts.start(), - excerpt.text_summary.bytes, - ), - self.theme, - )); + self.excerpt_chunks = Some( + excerpt.chunks_in_range(0..self.range.end - self.excerpts.start(), self.theme), + ); self.next() } } @@ -1857,12 +1860,8 @@ impl<'a> MultiBufferBytes<'a> { } else { self.excerpts.next(&()); if let Some(excerpt) = self.excerpts.item() { - let mut excerpt_bytes = excerpt.bytes_in_range( - 0..cmp::min( - self.range.end - self.excerpts.start(), - excerpt.text_summary.bytes, - ), - ); + let mut excerpt_bytes = + excerpt.bytes_in_range(0..self.range.end - self.excerpts.start()); self.chunk = excerpt_bytes.next().unwrap(); self.excerpt_bytes = Some(excerpt_bytes); } @@ -2149,6 +2148,40 @@ mod tests { new: 8..9 }] ); + + let multibuffer = multibuffer.read(cx).snapshot(cx); + assert_eq!( + multibuffer.clip_point(Point::new(0, 0), Bias::Left), + Point::new(2, 0) + ); + assert_eq!( + multibuffer.clip_point(Point::new(0, 0), Bias::Right), + Point::new(2, 0) + ); + assert_eq!( + multibuffer.clip_point(Point::new(1, 0), Bias::Left), + Point::new(2, 0) + ); + assert_eq!( + multibuffer.clip_point(Point::new(1, 0), Bias::Right), + Point::new(2, 0) + ); + assert_eq!( + multibuffer.clip_point(Point::new(8, 0), Bias::Left), + Point::new(7, 4) + ); + assert_eq!( + multibuffer.clip_point(Point::new(8, 0), Bias::Right), + Point::new(11, 0) + ); + assert_eq!( + multibuffer.clip_point(Point::new(9, 0), Bias::Left), + Point::new(7, 4) + ); + assert_eq!( + multibuffer.clip_point(Point::new(9, 0), Bias::Right), + Point::new(11, 0) + ); } #[gpui::test] From f898dc6dae3489c52a458994e1875f89c5c84d53 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 15 Dec 2021 17:29:15 -0800 Subject: [PATCH 110/196] Guard against inverted ranges when building edits in unfold The multibuffer lets you refer to offsets inside of headers, so it's possible to create a fold that appears non-empty, but which spans zero characters in the underlying buffers. Fold ranges are biased inward: the start is biased right, and the end is biased left. Because of these two things, it's possible to create a fold that becomes "inverted" when you insert text at that position. --- crates/editor/src/display_map/fold_map.rs | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index 972e152c184bcddb00c5688876859ac4a4f302da..13e82fe9421ff915c1761f91bb110f7905a1a959 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -152,10 +152,12 @@ impl<'a> FoldMapWriter<'a> { let mut folds_cursor = intersecting_folds(&buffer, &self.0.folds, range, true); while let Some(fold) = folds_cursor.item() { let offset_range = fold.0.start.to_offset(&buffer)..fold.0.end.to_offset(&buffer); - edits.push(text::Edit { - old: offset_range.clone(), - new: offset_range, - }); + if offset_range.end > offset_range.start { + edits.push(text::Edit { + old: offset_range.clone(), + new: offset_range, + }); + } fold_ixs_to_delete.push(*folds_cursor.start()); folds_cursor.next(&buffer); } @@ -1366,7 +1368,6 @@ mod tests { } let text = &expected_text[start.0..end.0]; - log::info!("slicing {:?}..{:?} (text: {:?})", start, end, text); assert_eq!( snapshot .chunks(start..end, None) From 4c227746942776ecd70f79e9d3410e30fbe9bdee Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 15 Dec 2021 17:58:24 -0800 Subject: [PATCH 111/196] Always clip buffer points when clipping display points Co-Authored-By: Nathan Sobo --- crates/editor/src/display_map/block_map.rs | 18 +++++++----------- crates/editor/src/display_map/fold_map.rs | 6 +----- 2 files changed, 8 insertions(+), 16 deletions(-) diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index be4ac30616d36371f01525d08c55d28949fdf1dc..d0a9103af31b2d6c755f33077c5f6b01091fe740 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -598,17 +598,13 @@ impl BlockSnapshot { } let output_start = Point::new(output_start_row.0, 0); - if point.0 > output_start { - let output_overshoot = point.0 - output_start; - let input_start = Point::new(input_start_row.0, 0); - let input_point = self - .wrap_snapshot - .clip_point(WrapPoint(input_start + output_overshoot), bias); - let input_overshoot = input_point.0 - input_start; - return BlockPoint(output_start + input_overshoot); - } else { - return BlockPoint(output_start); - } + let output_overshoot = point.0 - output_start; + let input_start = Point::new(input_start_row.0, 0); + let input_point = self + .wrap_snapshot + .clip_point(WrapPoint(input_start + output_overshoot), bias); + let input_overshoot = input_point.0 - input_start; + return BlockPoint(output_start + input_overshoot); } else if search_left { cursor.prev(&()); } else { diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index 13e82fe9421ff915c1761f91bb110f7905a1a959..74d2b3851f6e9d489f33576bc2de1d7d40365ec3 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -691,11 +691,7 @@ impl FoldSnapshot { let buffer_position = cursor.start().1 + overshoot; let clipped_buffer_position = self.buffer_snapshot.clip_point(buffer_position, bias); - FoldPoint::new( - point.row(), - ((point.column() as i32) + clipped_buffer_position.column as i32 - - buffer_position.column as i32) as u32, - ) + FoldPoint(cursor.start().0 .0 + (clipped_buffer_position - cursor.start().1)) } } else { FoldPoint(self.transforms.summary().output.lines) From 3e2f68454556e5246b99b7645dca11501689093b Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 15 Dec 2021 18:30:09 -0800 Subject: [PATCH 112/196] Fix prev_row_boundary when a wrap follows a fold Co-Authored-By: Nathan Sobo --- crates/editor/src/display_map.rs | 14 ++++++++++++-- crates/editor/src/display_map/wrap_map.rs | 9 +++++++++ 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 2bea6536a2dc6c2c9cdaf2cacfcf1d9e2bd7e5aa..220614ffd66873a91d09ebaa29e4afd77d715381 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -210,7 +210,7 @@ impl DisplaySnapshot { let mut point = display_point.to_point(self); point = self.buffer_snapshot.clip_point(point, Bias::Left); point.column = 0; - let next_display_point = self.point_to_display_point(point, Bias::Left); + let next_display_point = self.point_to_display_point_with_clipping(point, Bias::Left); if next_display_point == display_point { return (display_point, point); } @@ -252,6 +252,16 @@ impl DisplaySnapshot { DisplayPoint(block_point) } + fn point_to_display_point_with_clipping(&self, point: Point, bias: Bias) -> DisplayPoint { + let fold_point = point.to_fold_point(&self.folds_snapshot, bias); + let tab_point = self.tabs_snapshot.to_tab_point(fold_point); + let wrap_point = self + .wraps_snapshot + .from_tab_point_with_clipping(tab_point, bias); + let block_point = self.blocks_snapshot.to_block_point(wrap_point); + DisplayPoint(block_point) + } + fn display_point_to_point(&self, point: DisplayPoint, bias: Bias) -> Point { let block_point = point.0; let wrap_point = self.blocks_snapshot.to_wrap_point(block_point); @@ -492,7 +502,7 @@ mod tests { use Bias::*; #[gpui::test(iterations = 100)] - async fn test_random(mut cx: gpui::TestAppContext, mut rng: StdRng) { + async fn test_random_display_map(mut cx: gpui::TestAppContext, mut rng: StdRng) { cx.foreground().set_block_on_ticks(0..=50); cx.foreground().forbid_parking(); let operations = env::var("OPERATIONS") diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index b674b467c9f01726db2cd33ac4857f7b8fe79c62..0dd91b74049464f1d28d0d6ea775d142c46881dc 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -672,6 +672,15 @@ impl WrapSnapshot { WrapPoint(cursor.start().1 .0 + (point.0 - cursor.start().0 .0)) } + pub fn from_tab_point_with_clipping(&self, point: TabPoint, bias: Bias) -> WrapPoint { + let mut cursor = self.transforms.cursor::<(TabPoint, WrapPoint)>(); + cursor.seek(&point, bias, &()); + self.clip_point( + WrapPoint(cursor.start().1 .0 + (point.0 - cursor.start().0 .0)), + bias, + ) + } + pub fn clip_point(&self, mut point: WrapPoint, bias: Bias) -> WrapPoint { if bias == Bias::Left { let mut cursor = self.transforms.cursor::(); From ec39c9d3354848936f6c8cf0dc134bf010030848 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 16 Dec 2021 12:28:54 +0100 Subject: [PATCH 113/196] Allow specifying `MAX_EXCERPTS` via an env variable in random tests --- crates/diagnostics/src/diagnostics.rs | 2 +- crates/editor/src/display_map.rs | 2 +- crates/editor/src/display_map/block_map.rs | 2 +- crates/editor/src/display_map/fold_map.rs | 2 +- crates/editor/src/display_map/tab_map.rs | 2 +- crates/editor/src/display_map/wrap_map.rs | 2 +- crates/editor/src/multi_buffer.rs | 7 ++++++- 7 files changed, 12 insertions(+), 7 deletions(-) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index a7f9c0e5c1bf324abdf7dc51e2a6244ed8a36698..3181f3b3ddc71f5d6ac8f86090c2cd6841522211 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -3,7 +3,7 @@ use std::{cmp, sync::Arc}; use editor::{ diagnostic_block_renderer, diagnostic_style, display_map::{BlockDisposition, BlockProperties}, - Anchor, Editor, ExcerptProperties, MultiBuffer, + Editor, ExcerptProperties, MultiBuffer, }; use gpui::{ action, elements::*, keymap::Binding, AppContext, Entity, ModelHandle, MutableAppContext, diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 220614ffd66873a91d09ebaa29e4afd77d715381..9454455214bddb41638927eaf53ea5454ba6baed 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -532,7 +532,7 @@ mod tests { let text = RandomCharIter::new(&mut rng).take(len).collect::(); MultiBuffer::build_simple(&text, cx) } else { - MultiBuffer::build_random(rng.gen_range(1..=5), &mut rng, cx) + MultiBuffer::build_random(&mut rng, cx) } }); diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index d0a9103af31b2d6c755f33077c5f6b01091fe740..fc1232ea642fe6d47476c4badb077c782baa3ae4 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -1123,7 +1123,7 @@ mod tests { log::info!("initial buffer text: {:?}", text); MultiBuffer::build_simple(&text, cx) } else { - MultiBuffer::build_random(rng.gen_range(1..=5), &mut rng, cx) + MultiBuffer::build_random(&mut rng, cx) }; let mut buffer_snapshot = buffer.read(cx).snapshot(cx); diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index 74d2b3851f6e9d489f33576bc2de1d7d40365ec3..4f7837a2eb030966aa30ca172882276aac68ba6b 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -1252,7 +1252,7 @@ mod tests { let buffer = if rng.gen() { MultiBuffer::build_simple(&text, cx) } else { - MultiBuffer::build_random(rng.gen_range(1..=5), &mut rng, cx) + MultiBuffer::build_random(&mut rng, cx) }; let mut buffer_snapshot = buffer.read(cx).snapshot(cx); let mut map = FoldMap::new(buffer_snapshot.clone()).0; diff --git a/crates/editor/src/display_map/tab_map.rs b/crates/editor/src/display_map/tab_map.rs index 14e54c9523a3eb2b232bbe1688796e54abbee94f..9b33a10b149e9eb1e3431c4bd8f0ae8c36cd066a 100644 --- a/crates/editor/src/display_map/tab_map.rs +++ b/crates/editor/src/display_map/tab_map.rs @@ -458,7 +458,7 @@ mod tests { let text = RandomCharIter::new(&mut rng).take(len).collect::(); MultiBuffer::build_simple(&text, cx) } else { - MultiBuffer::build_random(rng.gen_range(1..=5), &mut rng, cx) + MultiBuffer::build_random(&mut rng, cx) }; let buffer_snapshot = buffer.read(cx).snapshot(cx); log::info!("Buffer text: {:?}", buffer_snapshot.text()); diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index 0dd91b74049464f1d28d0d6ea775d142c46881dc..0620290e0dfd46d7651104190a70de019167981c 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -1021,7 +1021,7 @@ mod tests { let buffer = cx.update(|cx| { if rng.gen() { - MultiBuffer::build_random(rng.gen_range(1..=5), &mut rng, cx) + MultiBuffer::build_random(&mut rng, cx) } else { let len = rng.gen_range(0..10); let text = RandomCharIter::new(&mut rng).take(len).collect::(); diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 3176bb3ea3c312fb63f776dcfe0e9095908e23a1..b4ad63292b235df4c4e154cf5e3bfda4409ff664 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -178,13 +178,18 @@ impl MultiBuffer { #[cfg(any(test, feature = "test-support"))] pub fn build_random( - excerpts: usize, mut rng: &mut impl rand::Rng, cx: &mut gpui::MutableAppContext, ) -> ModelHandle { use rand::prelude::*; + use std::env; use text::RandomCharIter; + let max_excerpts = env::var("MAX_EXCERPTS") + .map(|i| i.parse().expect("invalid `MAX_EXCERPTS` variable")) + .unwrap_or(5); + let excerpts = rng.gen_range(1..=max_excerpts); + cx.add_model(|cx| { let mut multibuffer = MultiBuffer::new(0); let mut buffers = Vec::new(); From 64e2f6d506804ccecba7a55e8997cf46d883b988 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 16 Dec 2021 12:29:37 +0100 Subject: [PATCH 114/196] Ensure `BlockMap::clip_point` always yield a valid buffer location --- crates/editor/src/display_map/block_map.rs | 64 ++++++++++++++++------ crates/text/src/point.rs | 8 +++ 2 files changed, 54 insertions(+), 18 deletions(-) diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index fc1232ea642fe6d47476c4badb077c782baa3ae4..fe7fb0e191b2bb1a718fad2f56b04e5d412b438c 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -581,37 +581,45 @@ impl BlockSnapshot { cursor.seek(&BlockRow(point.row), Bias::Right, &()); let max_input_row = WrapRow(self.transforms.summary().input_rows); - let search_left = + let mut search_left = (bias == Bias::Left && cursor.start().1 .0 > 0) || cursor.end(&()).1 == max_input_row; + let mut reversed = false; loop { if let Some(transform) = cursor.item() { if transform.is_isomorphic() { let (output_start_row, input_start_row) = cursor.start(); let (output_end_row, input_end_row) = cursor.end(&()); + let output_start = Point::new(output_start_row.0, 0); + let input_start = Point::new(input_start_row.0, 0); + let input_end = Point::new(input_end_row.0, 0); + let input_point = if point.row >= output_end_row.0 { + let line_len = self.wrap_snapshot.line_len(input_end_row.0 - 1); + self.wrap_snapshot + .clip_point(WrapPoint::new(input_end_row.0 - 1, line_len), bias) + } else { + let output_overshoot = point.0.saturating_sub(output_start); + self.wrap_snapshot + .clip_point(WrapPoint(input_start + output_overshoot), bias) + }; - if point.row >= output_end_row.0 { - return BlockPoint::new( - output_end_row.0 - 1, - self.wrap_snapshot.line_len(input_end_row.0 - 1), - ); + if (input_start..input_end).contains(&input_point.0) { + let input_overshoot = input_point.0.saturating_sub(input_start); + return BlockPoint(output_start + input_overshoot); } + } - let output_start = Point::new(output_start_row.0, 0); - let output_overshoot = point.0 - output_start; - let input_start = Point::new(input_start_row.0, 0); - let input_point = self - .wrap_snapshot - .clip_point(WrapPoint(input_start + output_overshoot), bias); - let input_overshoot = input_point.0 - input_start; - return BlockPoint(output_start + input_overshoot); - } else if search_left { + if search_left { cursor.prev(&()); } else { cursor.next(&()); } - } else { + } else if reversed { return self.max_point(); + } else { + reversed = true; + search_left = !search_left; + cursor.seek(&BlockRow(point.row), Bias::Right, &()); } } } @@ -1368,16 +1376,30 @@ mod tests { let mut block_point = BlockPoint::new(0, 0); for c in expected_text.chars() { let left_point = blocks_snapshot.clip_point(block_point, Bias::Left); - let right_point = blocks_snapshot.clip_point(block_point, Bias::Right); - + let left_buffer_point = blocks_snapshot.to_point(left_point, Bias::Left); assert_eq!( blocks_snapshot.to_block_point(blocks_snapshot.to_wrap_point(left_point)), left_point ); + assert_eq!( + left_buffer_point, + buffer_snapshot.clip_point(left_buffer_point, Bias::Right), + "{:?} is not valid in buffer coordinates", + left_point + ); + + let right_point = blocks_snapshot.clip_point(block_point, Bias::Right); + let right_buffer_point = blocks_snapshot.to_point(right_point, Bias::Right); assert_eq!( blocks_snapshot.to_block_point(blocks_snapshot.to_wrap_point(right_point)), right_point ); + assert_eq!( + right_buffer_point, + buffer_snapshot.clip_point(right_buffer_point, Bias::Left), + "{:?} is not valid in buffer coordinates", + right_point + ); if c == '\n' { block_point.0 += Point::new(1, 0); @@ -1387,4 +1409,10 @@ mod tests { } } } + + impl BlockSnapshot { + fn to_point(&self, point: BlockPoint, bias: Bias) -> Point { + self.wrap_snapshot.to_point(self.to_wrap_point(point), bias) + } + } } diff --git a/crates/text/src/point.rs b/crates/text/src/point.rs index 93cc2c60076dc949245e62697b92367c9f764cf0..f36357650f3c3a0e8012d7fadf94eeeee2533a7b 100644 --- a/crates/text/src/point.rs +++ b/crates/text/src/point.rs @@ -35,6 +35,14 @@ impl Point { pub fn is_zero(&self) -> bool { self.row == 0 && self.column == 0 } + + pub fn saturating_sub(self, other: Self) -> Self { + if self < other { + Point::zero() + } else { + self - other + } + } } impl<'a> Add<&'a Self> for Point { From abf96e6ad6ea66e0a6b5ca52611e21eff9f3f70c Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 16 Dec 2021 12:36:27 +0100 Subject: [PATCH 115/196] Fix movement tests in `DisplayMap` --- crates/editor/src/display_map.rs | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 9454455214bddb41638927eaf53ea5454ba6baed..ee7dc2acf63ed713671a45aaf0f710bd2e6e1448 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -641,6 +641,8 @@ mod tests { } // Movement + let min_point = snapshot.clip_point(DisplayPoint::new(0, 0), Left); + let max_point = snapshot.clip_point(snapshot.max_point(), Right); for _ in 0..5 { let row = rng.gen_range(0..=snapshot.max_point().row()); let column = rng.gen_range(0..=snapshot.line_len(row)); @@ -650,7 +652,7 @@ mod tests { let moved_right = movement::right(&snapshot, point).unwrap(); log::info!("Right {:?}", moved_right); - if point < snapshot.max_point() { + if point < max_point { assert!(moved_right > point); if point.column() == snapshot.line_len(point.row()) || snapshot.soft_wrap_indent(point.row()).is_some() @@ -664,13 +666,13 @@ mod tests { let moved_left = movement::left(&snapshot, point).unwrap(); log::info!("Left {:?}", moved_left); - if !point.is_zero() { + if point > min_point { assert!(moved_left < point); if point.column() == 0 { assert!(moved_left.row() < point.row()); } } else { - assert!(moved_left.is_zero()); + assert_eq!(moved_left, point); } } } From de679cae7867cec093b38f60523aec8839af3fc2 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 16 Dec 2021 12:41:48 +0100 Subject: [PATCH 116/196] Re-enable creating multiple blocks at once in `BlockMap` tests --- crates/editor/src/display_map/block_map.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index fe7fb0e191b2bb1a718fad2f56b04e5d412b438c..ad77dc07a7c6ab254ee14e28413900bbfc08ed72 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -1155,7 +1155,7 @@ mod tests { wrap_map.update(cx, |map, cx| map.set_wrap_width(wrap_width, cx)); } 20..=39 => { - let block_count = rng.gen_range(1..=1); + let block_count = rng.gen_range(1..=5); let block_properties = (0..block_count) .map(|_| { let buffer = buffer.read(cx).read(cx); From 76601591645bb942221d8493d2d3b4489b86bea3 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 16 Dec 2021 16:14:27 +0100 Subject: [PATCH 117/196] Test blocks in display map randomized tests This highlighted some errors in the implementation. --- crates/editor/src/display_map.rs | 65 ++++++++++++++++++++++++++++++-- 1 file changed, 61 insertions(+), 4 deletions(-) diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index ee7dc2acf63ed713671a45aaf0f710bd2e6e1448..10e27309d5a595c72fbfaa838b4952ebe7d7bedd 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -493,9 +493,9 @@ impl ToDisplayPoint for Anchor { mod tests { use super::*; use crate::{movement, test::*}; - use gpui::{color::Color, MutableAppContext}; + use gpui::{color::Color, elements::*, MutableAppContext}; use language::{Buffer, Language, LanguageConfig, RandomCharIter, SelectionGoal}; - use rand::{prelude::StdRng, Rng}; + use rand::{prelude::*, Rng}; use std::{env, sync::Arc}; use theme::SyntaxTheme; use util::test::sample_text; @@ -541,6 +541,15 @@ mod tests { }); let (_observer, notifications) = Observer::new(&map, &mut cx); let mut fold_count = 0; + let mut blocks = Vec::new(); + + let snapshot = map.update(&mut cx, |map, cx| map.snapshot(cx)); + log::info!("buffer text: {:?}", snapshot.buffer_snapshot.text()); + log::info!("fold text: {:?}", snapshot.folds_snapshot.text()); + log::info!("tab text: {:?}", snapshot.tabs_snapshot.text()); + log::info!("wrap text: {:?}", snapshot.wraps_snapshot.text()); + log::info!("block text: {:?}", snapshot.blocks_snapshot.text()); + log::info!("display text: {:?}", snapshot.text()); for _i in 0..operations { match rng.gen_range(0..100) { @@ -553,7 +562,51 @@ mod tests { log::info!("setting wrap width to {:?}", wrap_width); map.update(&mut cx, |map, cx| map.set_wrap_width(wrap_width, cx)); } - 20..=80 => { + 20..=44 => { + map.update(&mut cx, |map, cx| { + if rng.gen() || blocks.is_empty() { + let buffer = map.snapshot(cx).buffer_snapshot; + let block_properties = (0..rng.gen_range(1..=1)) + .map(|_| { + let position = + buffer.anchor_after(buffer.clip_offset( + rng.gen_range(0..=buffer.len()), + Bias::Left, + )); + + let disposition = if rng.gen() { + BlockDisposition::Above + } else { + BlockDisposition::Below + }; + let height = rng.gen_range(1..5); + log::info!( + "inserting block {:?} {:?} with height {}", + disposition, + position.to_point(&buffer), + height + ); + BlockProperties { + position, + height, + disposition, + render: Arc::new(|_| Empty::new().boxed()), + } + }) + .collect::>(); + blocks.extend(map.insert_blocks(block_properties, cx)); + } else { + blocks.shuffle(&mut rng); + let remove_count = rng.gen_range(1..=4.min(blocks.len())); + let block_ids_to_remove = (0..remove_count) + .map(|_| blocks.remove(rng.gen_range(0..blocks.len()))) + .collect(); + log::info!("removing block ids {:?}", block_ids_to_remove); + map.remove_blocks(block_ids_to_remove, cx); + } + }); + } + 45..=79 => { let mut ranges = Vec::new(); for _ in 0..rng.gen_range(1..=3) { buffer.read_with(&cx, |buffer, cx| { @@ -588,6 +641,10 @@ mod tests { let snapshot = map.update(&mut cx, |map, cx| map.snapshot(cx)); fold_count = snapshot.fold_count(); log::info!("buffer text: {:?}", snapshot.buffer_snapshot.text()); + log::info!("fold text: {:?}", snapshot.folds_snapshot.text()); + log::info!("tab text: {:?}", snapshot.tabs_snapshot.text()); + log::info!("wrap text: {:?}", snapshot.wraps_snapshot.text()); + log::info!("block text: {:?}", snapshot.blocks_snapshot.text()); log::info!("display text: {:?}", snapshot.text()); // Line boundaries @@ -603,7 +660,7 @@ mod tests { assert!(next_display_bound >= point); assert_eq!(prev_buffer_bound.column, 0); assert_eq!(prev_display_bound.column(), 0); - if next_display_bound < snapshot.max_point() { + if next_buffer_bound < snapshot.buffer_snapshot.max_point() { assert_eq!( snapshot.buffer_snapshot.chars_at(next_buffer_bound).next(), Some('\n') From 0fc2db6d6ed8ac65bcffa8211d859fb1c0b3fcd1 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 16 Dec 2021 16:15:48 +0100 Subject: [PATCH 118/196] Account for folds when inserting/removing block decorations --- crates/editor/src/display_map/block_map.rs | 34 ++++++++---------- crates/editor/src/display_map/wrap_map.rs | 42 +++++++++++++++++++++- 2 files changed, 55 insertions(+), 21 deletions(-) diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index ad77dc07a7c6ab254ee14e28413900bbfc08ed72..a7183fa474f087937578ad34fa2b50243d64e6a4 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -377,16 +377,13 @@ impl<'a> BlockMapWriter<'a> { let position = buffer.anchor_after(block.position); let point = position.to_point(&buffer); - let start_row = wrap_snapshot + let wrap_row = wrap_snapshot .from_point(Point::new(point.row, 0), Bias::Left) .row(); - let end_row = if point.row == buffer.max_point().row { - wrap_snapshot.max_point().row() + 1 - } else { - wrap_snapshot - .from_point(Point::new(point.row + 1, 0), Bias::Left) - .row() - }; + let start_row = wrap_snapshot.prev_row_boundary(WrapPoint::new(wrap_row, 0)); + let end_row = wrap_snapshot + .next_row_boundary(WrapPoint::new(wrap_row, 0)) + .unwrap_or(wrap_snapshot.max_point().row() + 1); let block_ix = match self .0 @@ -431,16 +428,13 @@ impl<'a> BlockMapWriter<'a> { let buffer_row = block.position.to_point(&buffer).row; if last_block_buffer_row != Some(buffer_row) { last_block_buffer_row = Some(buffer_row); - let start_row = wrap_snapshot + let wrap_row = wrap_snapshot .from_point(Point::new(buffer_row, 0), Bias::Left) .row(); + let start_row = wrap_snapshot.prev_row_boundary(WrapPoint::new(wrap_row, 0)); let end_row = wrap_snapshot - .from_point( - Point::new(buffer_row, buffer.line_len(buffer_row)), - Bias::Left, - ) - .row() - + 1; + .next_row_boundary(WrapPoint::new(wrap_row, 0)) + .unwrap_or(wrap_snapshot.max_point().row() + 1); edits.push(Edit { old: start_row..end_row, new: start_row..end_row, @@ -457,7 +451,7 @@ impl<'a> BlockMapWriter<'a> { impl BlockSnapshot { #[cfg(test)] - fn text(&mut self) -> String { + pub fn text(&self) -> String { self.chunks(0..self.transforms.summary().output_rows, None) .map(|chunk| chunk.text) .collect() @@ -934,7 +928,7 @@ mod tests { }, ]); - let mut snapshot = block_map.read(wraps_snapshot, vec![]); + let snapshot = block_map.read(wraps_snapshot, vec![]); assert_eq!(snapshot.text(), "aaa\n\n\n\nbbb\nccc\nddd\n\n\n"); let blocks = snapshot @@ -1059,7 +1053,7 @@ mod tests { let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| { wrap_map.sync(tabs_snapshot, tab_edits, cx) }); - let mut snapshot = block_map.read(wraps_snapshot, wrap_edits); + let snapshot = block_map.read(wraps_snapshot, wrap_edits); assert_eq!(snapshot.text(), "aaa\n\nb!!!\n\n\nbb\nccc\nddd\n\n\n"); } @@ -1097,7 +1091,7 @@ mod tests { // Blocks with an 'above' disposition go above their corresponding buffer line. // Blocks with a 'below' disposition go below their corresponding buffer line. - let mut snapshot = block_map.read(wraps_snapshot, vec![]); + let snapshot = block_map.read(wraps_snapshot, vec![]); assert_eq!( snapshot.text(), "one two \nthree\n\nfour five \nsix\n\nseven \neight" @@ -1232,7 +1226,7 @@ mod tests { let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| { wrap_map.sync(tabs_snapshot, tab_edits, cx) }); - let mut blocks_snapshot = block_map.read(wraps_snapshot.clone(), wrap_edits); + let blocks_snapshot = block_map.read(wraps_snapshot.clone(), wrap_edits); assert_eq!( blocks_snapshot.transforms.summary().input_rows, wraps_snapshot.max_point().row() + 1 diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index 0620290e0dfd46d7651104190a70de019167981c..bee2a7f8dee4652e076aba869a7af4d583b1915f 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -10,7 +10,7 @@ use gpui::{ use language::Chunk; use lazy_static::lazy_static; use smol::future::yield_now; -use std::{collections::VecDeque, mem, ops::Range, time::Duration}; +use std::{cmp, collections::VecDeque, mem, ops::Range, time::Duration}; use sum_tree::{Bias, Cursor, SumTree}; use text::Patch; use theme::SyntaxTheme; @@ -694,6 +694,46 @@ impl WrapSnapshot { self.from_tab_point(self.tab_snapshot.clip_point(self.to_tab_point(point), bias)) } + pub fn prev_row_boundary(&self, mut point: WrapPoint) -> u32 { + if self.transforms.is_empty() { + return 0; + } + + *point.column_mut() = 0; + + let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(); + cursor.seek(&point, Bias::Right, &()); + if cursor.item().is_none() { + cursor.prev(&()); + } + + while let Some(transform) = cursor.item() { + if transform.is_isomorphic() && cursor.start().1.column() == 0 { + return cmp::min(cursor.end(&()).0.row(), point.row()); + } else { + cursor.prev(&()); + } + } + + unreachable!() + } + + pub fn next_row_boundary(&self, mut point: WrapPoint) -> Option { + point.0 += Point::new(1, 0); + + let mut cursor = self.transforms.cursor::<(WrapPoint, TabPoint)>(); + cursor.seek(&point, Bias::Right, &()); + while let Some(transform) = cursor.item() { + if transform.is_isomorphic() && cursor.start().1.column() == 0 { + return Some(cmp::max(cursor.start().0.row(), point.row())); + } else { + cursor.next(&()); + } + } + + None + } + fn check_invariants(&self) { #[cfg(test)] { From 80f3173fbd27e919f2b295964067e48a8030d4c2 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 16 Dec 2021 10:23:45 -0800 Subject: [PATCH 119/196] Always panic if invalid point is passed to {prev,next}_row_boundary Co-Authored-By: Antonio Scandurra --- crates/editor/src/display_map.rs | 24 ++++++++++-------------- 1 file changed, 10 insertions(+), 14 deletions(-) diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 10e27309d5a595c72fbfaa838b4952ebe7d7bedd..e1f7f473a864545ee9c53a1f35e270d336f620ce 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -204,7 +204,8 @@ impl DisplaySnapshot { self.buffer_snapshot.max_point().row + 1 } - pub fn prev_row_boundary(&self, mut display_point: DisplayPoint) -> (DisplayPoint, Point) { + pub fn prev_row_boundary(&self, input_display_point: DisplayPoint) -> (DisplayPoint, Point) { + let mut display_point = input_display_point; loop { *display_point.column_mut() = 0; let mut point = display_point.to_point(self); @@ -214,17 +215,15 @@ impl DisplaySnapshot { if next_display_point == display_point { return (display_point, point); } - debug_assert!( - next_display_point < display_point, - "{:?} > {:?}", - next_display_point, - display_point - ); + if next_display_point > display_point { + panic!("invalid display point {:?}", input_display_point); + } display_point = next_display_point; } } - pub fn next_row_boundary(&self, mut display_point: DisplayPoint) -> (DisplayPoint, Point) { + pub fn next_row_boundary(&self, input_display_point: DisplayPoint) -> (DisplayPoint, Point) { + let mut display_point = input_display_point; loop { *display_point.column_mut() = self.line_len(display_point.row()); let mut point = self.display_point_to_point(display_point, Bias::Right); @@ -234,12 +233,9 @@ impl DisplaySnapshot { if next_display_point == display_point { return (display_point, point); } - debug_assert!( - next_display_point > display_point, - "{:?} < {:?}", - next_display_point, - display_point - ); + if next_display_point < display_point { + panic!("invalid display point {:?}", input_display_point); + } display_point = next_display_point; } } From dcd05ef96b774bc5e62940de8bcbdb70ff53bdfd Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 16 Dec 2021 10:25:09 -0800 Subject: [PATCH 120/196] Resolve Anchor::min and ::max to valid positions Co-Authored-By: Antonio Scandurra --- crates/editor/src/multi_buffer.rs | 41 ++++++++++++++++++++----------- 1 file changed, 27 insertions(+), 14 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index b4ad63292b235df4c4e154cf5e3bfda4409ff664..6c35165eca73f0eb84b51b5543d1feb1353d1e5f 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -1286,19 +1286,22 @@ impl MultiBufferSnapshot { { let mut cursor = self.excerpts.cursor::(); cursor.seek(&Some(&anchor.excerpt_id), Bias::Left, &()); + if cursor.item().is_none() { + cursor.next(&()); + } + + let mut position = D::from_text_summary(&cursor.start().text); if let Some(excerpt) = cursor.item() { + position.add_summary(&excerpt.header_summary(), &()); if excerpt.id == anchor.excerpt_id { - let mut excerpt_start = D::from_text_summary(&cursor.start().text); - excerpt_start.add_summary(&excerpt.header_summary(), &()); let excerpt_buffer_start = excerpt.range.start.summary::(&excerpt.buffer); - let buffer_point = anchor.text_anchor.summary::(&excerpt.buffer); - if buffer_point > excerpt_buffer_start { - excerpt_start.add_assign(&(buffer_point - excerpt_buffer_start)); + let buffer_position = anchor.text_anchor.summary::(&excerpt.buffer); + if buffer_position > excerpt_buffer_start { + position.add_assign(&(buffer_position - excerpt_buffer_start)); } - return excerpt_start; } } - D::from_text_summary(&cursor.start().text) + position } pub fn summaries_for_anchors<'a, D, I>(&'a self, anchors: I) -> Vec @@ -1321,30 +1324,33 @@ impl MultiBufferSnapshot { }); cursor.seek_forward(&Some(excerpt_id), Bias::Left, &()); + if cursor.item().is_none() { + cursor.next(&()); + } + + let mut position = D::from_text_summary(&cursor.start().text); if let Some(excerpt) = cursor.item() { + position.add_summary(&excerpt.header_summary(), &()); if excerpt.id == *excerpt_id { - let mut excerpt_start = D::from_text_summary(&cursor.start().text); - excerpt_start.add_summary(&excerpt.header_summary(), &()); let excerpt_buffer_start = excerpt.range.start.summary::(&excerpt.buffer); summaries.extend( excerpt .buffer .summaries_for_anchors::(excerpt_anchors) .map(move |summary| { - let mut excerpt_start = excerpt_start.clone(); + let mut position = position.clone(); let excerpt_buffer_start = excerpt_buffer_start.clone(); if summary > excerpt_buffer_start { - excerpt_start.add_assign(&(summary - excerpt_buffer_start)); + position.add_assign(&(summary - excerpt_buffer_start)); } - excerpt_start + position }), ); continue; } } - let summary = D::from_text_summary(&cursor.start().text); - summaries.extend(excerpt_anchors.map(|_| summary.clone())); + summaries.extend(excerpt_anchors.map(|_| position.clone())); } summaries @@ -2237,6 +2243,13 @@ mod tests { }); let old_snapshot = multibuffer.read(cx).snapshot(cx); + assert_eq!(old_snapshot.anchor_before(0).to_offset(&old_snapshot), 1); + assert_eq!(old_snapshot.anchor_after(0).to_offset(&old_snapshot), 1); + assert_eq!(Anchor::min().to_offset(&old_snapshot), 1); + assert_eq!(Anchor::min().to_offset(&old_snapshot), 1); + assert_eq!(Anchor::max().to_offset(&old_snapshot), 12); + assert_eq!(Anchor::max().to_offset(&old_snapshot), 12); + buffer_1.update(cx, |buffer, cx| { buffer.edit([0..0], "W", cx); buffer.edit([5..5], "X", cx); From 38df091b06d7b191af081727833e122a5ecc05a7 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 16 Dec 2021 11:16:48 -0800 Subject: [PATCH 121/196] Fix up/down movement across excerpt headers Implement these movements in terms of clipping, instead of with explicit loops --- crates/editor/src/movement.rs | 134 ++++++++++++++++++++++++++-------- 1 file changed, 104 insertions(+), 30 deletions(-) diff --git a/crates/editor/src/movement.rs b/crates/editor/src/movement.rs index 6c7525cb00ef97288188f80b846105ffc3117ef7..00b5140a6cd6520d50d1dd4ac7fcd8b0f576c369 100644 --- a/crates/editor/src/movement.rs +++ b/crates/editor/src/movement.rs @@ -26,26 +26,25 @@ pub fn right(map: &DisplaySnapshot, mut point: DisplayPoint) -> Result Result<(DisplayPoint, SelectionGoal)> { - let goal_column = if let SelectionGoal::Column(column) = goal { + let mut goal_column = if let SelectionGoal::Column(column) = goal { column } else { - map.column_to_chars(point.row(), point.column()) + map.column_to_chars(start.row(), start.column()) }; - loop { - if point.row() > 0 { - *point.row_mut() -= 1; - *point.column_mut() = map.column_from_chars(point.row(), goal_column); - if !map.is_block_line(point.row()) { - break; - } - } else { - point = DisplayPoint::new(0, 0); - break; - } + let prev_row = start.row().saturating_sub(1); + let mut point = map.clip_point( + DisplayPoint::new(prev_row, map.line_len(prev_row)), + Bias::Left, + ); + if point.row() < start.row() { + *point.column_mut() = map.column_from_chars(point.row(), goal_column); + } else { + point = DisplayPoint::new(0, 0); + goal_column = 0; } let clip_bias = if point.column() == map.line_len(point.row()) { @@ -62,27 +61,22 @@ pub fn up( pub fn down( map: &DisplaySnapshot, - mut point: DisplayPoint, + start: DisplayPoint, goal: SelectionGoal, ) -> Result<(DisplayPoint, SelectionGoal)> { - let max_point = map.max_point(); - let goal_column = if let SelectionGoal::Column(column) = goal { + let mut goal_column = if let SelectionGoal::Column(column) = goal { column } else { - map.column_to_chars(point.row(), point.column()) + map.column_to_chars(start.row(), start.column()) }; - loop { - if point.row() < max_point.row() { - *point.row_mut() += 1; - *point.column_mut() = map.column_from_chars(point.row(), goal_column); - if !map.is_block_line(point.row()) { - break; - } - } else { - point = max_point; - break; - } + let next_row = start.row() + 1; + let mut point = map.clip_point(DisplayPoint::new(next_row, 0), Bias::Right); + if point.row() > start.row() { + *point.column_mut() = map.column_from_chars(point.row(), goal_column); + } else { + point = map.max_point(); + goal_column = map.column_to_chars(point.row(), point.column()) } let clip_bias = if point.column() == map.line_len(point.row()) { @@ -244,7 +238,87 @@ fn char_kind(c: char) -> CharKind { #[cfg(test)] mod tests { use super::*; - use crate::{DisplayMap, MultiBuffer}; + use crate::{Buffer, DisplayMap, ExcerptProperties, MultiBuffer}; + use language::Point; + + #[gpui::test] + fn test_move_up_and_down_with_excerpts(cx: &mut gpui::MutableAppContext) { + let family_id = cx.font_cache().load_family(&["Helvetica"]).unwrap(); + let font_id = cx + .font_cache() + .select_font(family_id, &Default::default()) + .unwrap(); + + let buffer = cx.add_model(|cx| Buffer::new(0, "abc\ndefg\nhijkl\nmn", cx)); + let multibuffer = cx.add_model(|cx| { + let mut multibuffer = MultiBuffer::new(0); + multibuffer.push_excerpt( + ExcerptProperties { + buffer: &buffer, + range: Point::new(0, 0)..Point::new(1, 4), + header_height: 2, + render_header: None, + }, + cx, + ); + multibuffer.push_excerpt( + ExcerptProperties { + buffer: &buffer, + range: Point::new(2, 0)..Point::new(3, 2), + header_height: 3, + render_header: None, + }, + cx, + ); + multibuffer + }); + + let display_map = + cx.add_model(|cx| DisplayMap::new(multibuffer, 2, font_id, 14.0, None, cx)); + + let snapshot = display_map.update(cx, |map, cx| map.snapshot(cx)); + assert_eq!(snapshot.text(), "\n\nabc\ndefg\n\n\n\nhijkl\nmn"); + + // Can't move up into the first excerpt's header + assert_eq!( + up(&snapshot, DisplayPoint::new(2, 2), SelectionGoal::Column(2)).unwrap(), + (DisplayPoint::new(2, 0), SelectionGoal::Column(0)), + ); + assert_eq!( + up(&snapshot, DisplayPoint::new(2, 0), SelectionGoal::None).unwrap(), + (DisplayPoint::new(2, 0), SelectionGoal::Column(0)), + ); + + // Move up and down within first excerpt + assert_eq!( + up(&snapshot, DisplayPoint::new(3, 4), SelectionGoal::Column(4)).unwrap(), + (DisplayPoint::new(2, 3), SelectionGoal::Column(4)), + ); + assert_eq!( + down(&snapshot, DisplayPoint::new(2, 3), SelectionGoal::Column(4)).unwrap(), + (DisplayPoint::new(3, 4), SelectionGoal::Column(4)), + ); + + // Move up and down across second excerpt's header + assert_eq!( + up(&snapshot, DisplayPoint::new(7, 5), SelectionGoal::Column(5)).unwrap(), + (DisplayPoint::new(3, 4), SelectionGoal::Column(5)), + ); + assert_eq!( + down(&snapshot, DisplayPoint::new(3, 4), SelectionGoal::Column(5)).unwrap(), + (DisplayPoint::new(7, 5), SelectionGoal::Column(5)), + ); + + // Can't move down off the end + assert_eq!( + down(&snapshot, DisplayPoint::new(8, 0), SelectionGoal::Column(0)).unwrap(), + (DisplayPoint::new(8, 2), SelectionGoal::Column(2)), + ); + assert_eq!( + down(&snapshot, DisplayPoint::new(8, 2), SelectionGoal::Column(2)).unwrap(), + (DisplayPoint::new(8, 2), SelectionGoal::Column(2)), + ); + } #[gpui::test] fn test_prev_next_word_boundary_multibyte(cx: &mut gpui::MutableAppContext) { From a293e9c0c52b3aa913434c5569b2b94375eb5c7a Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 16 Dec 2021 11:17:06 -0800 Subject: [PATCH 122/196] Suppress unused field warnings --- crates/project/src/fs.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/project/src/fs.rs b/crates/project/src/fs.rs index 85c0e45fb002a606907a16bec3af9c6cb5b59d8f..895d7d4cc1a0b440a4ca92f63ebed1284f025365 100644 --- a/crates/project/src/fs.rs +++ b/crates/project/src/fs.rs @@ -134,6 +134,7 @@ impl Fs for RealFs { } } +#[cfg(any(test, feature = "test-support"))] #[derive(Clone, Debug)] struct FakeFsEntry { metadata: Metadata, From db33e4935a4fde1ad716aaceed61b24c85e73652 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 16 Dec 2021 12:17:47 -0800 Subject: [PATCH 123/196] Implement MultiBuffer::buffer_rows --- crates/editor/src/multi_buffer.rs | 123 ++++++++++++++++++++++++++++-- 1 file changed, 117 insertions(+), 6 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 6c35165eca73f0eb84b51b5543d1feb1353d1e5f..deb476d6c7ab17c19da2eed2245adcdea8e91239 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -111,6 +111,12 @@ struct ExcerptSummary { text: TextSummary, } +pub struct MultiBufferRows<'a> { + header_height: u32, + buffer_row_range: Range, + excerpts: Cursor<'a, Excerpt, Point>, +} + pub struct MultiBufferChunks<'a> { range: Range, excerpts: Cursor<'a, Excerpt, usize>, @@ -1052,6 +1058,32 @@ impl MultiBufferSnapshot { } } + pub fn buffer_rows<'a>(&'a self, start_row: u32) -> MultiBufferRows<'a> { + let mut excerpts = self.excerpts.cursor::(); + excerpts.seek(&Point::new(start_row, 0), Bias::Right, &()); + if excerpts.item().is_none() { + excerpts.prev(&()); + } + + let mut header_height = 0; + let mut buffer_row_range = 0..0; + if let Some(excerpt) = excerpts.item() { + let overshoot = start_row - excerpts.start().row; + let excerpt_start = excerpt.range.start.to_point(&excerpt.buffer).row; + let excerpt_header_height = excerpt.header_height as u32; + header_height = excerpt_header_height.saturating_sub(overshoot); + buffer_row_range.start = + excerpt_start + overshoot.saturating_sub(excerpt_header_height); + buffer_row_range.end = + excerpt_start + excerpt.text_summary.lines.row + 1 - excerpt_header_height; + } + MultiBufferRows { + header_height, + buffer_row_range, + excerpts, + } + } + pub fn chunks<'a, T: ToOffset>( &'a self, range: Range, @@ -1821,6 +1853,34 @@ impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Option<&'a ExcerptId> { } } +impl<'a> Iterator for MultiBufferRows<'a> { + type Item = Option; + + fn next(&mut self) -> Option { + loop { + if self.header_height > 0 { + self.header_height -= 1; + return Some(None); + } + if !self.buffer_row_range.is_empty() { + let row = Some(self.buffer_row_range.start); + self.buffer_row_range.start += 1; + return Some(row); + } + self.excerpts.next(&()); + if let Some(excerpt) = self.excerpts.item() { + self.header_height = excerpt.header_height as u32; + self.buffer_row_range.start = excerpt.range.start.to_point(&excerpt.buffer).row; + self.buffer_row_range.end = + self.buffer_row_range.start + excerpt.text_summary.lines.row + 1 + - self.header_height; + } else { + return None; + } + } + } +} + impl<'a> MultiBufferChunks<'a> { pub fn offset(&self) -> usize { self.range.start @@ -2009,15 +2069,26 @@ mod tests { fn test_singleton_multibuffer(cx: &mut MutableAppContext) { let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'a'), cx)); let multibuffer = cx.add_model(|cx| MultiBuffer::singleton(buffer.clone(), cx)); + + let snapshot = multibuffer.read(cx).snapshot(cx); + assert_eq!(snapshot.text(), buffer.read(cx).text()); + assert_eq!( - multibuffer.read(cx).snapshot(cx).text(), - buffer.read(cx).text() + snapshot.buffer_rows(0).collect::>(), + (0..buffer.read(cx).row_count()) + .map(Some) + .collect::>() ); - buffer.update(cx, |buffer, cx| buffer.edit([1..3], "XXX", cx)); + buffer.update(cx, |buffer, cx| buffer.edit([1..3], "XXX\n", cx)); + let snapshot = multibuffer.read(cx).snapshot(cx); + + assert_eq!(snapshot.text(), buffer.read(cx).text()); assert_eq!( - multibuffer.read(cx).snapshot(cx).text(), - buffer.read(cx).text() + snapshot.buffer_rows(0).collect::>(), + (0..buffer.read(cx).row_count()) + .map(Some) + .collect::>() ); } @@ -2075,8 +2146,9 @@ mod tests { subscription }); + let snapshot = multibuffer.read(cx).snapshot(cx); assert_eq!( - multibuffer.read(cx).snapshot(cx).text(), + snapshot.text(), concat!( "\n", // Preserve newlines "\n", // @@ -2091,6 +2163,22 @@ mod tests { "jj" // ) ); + assert_eq!( + snapshot.buffer_rows(0).collect::>(), + &[ + None, + None, + Some(1), + Some(2), + None, + Some(3), + Some(4), + None, + None, + None, + Some(3) + ] + ); { let snapshot = multibuffer.read(cx).read(cx); @@ -2342,17 +2430,25 @@ mod tests { let mut excerpt_starts = Vec::new(); let mut expected_text = String::new(); + let mut expected_buffer_rows = Vec::new(); for (buffer, range, header_height) in &expected_excerpts { let buffer = buffer.read(cx); let buffer_range = range.to_offset(buffer); for _ in 0..*header_height { expected_text.push('\n'); + expected_buffer_rows.push(None); } excerpt_starts.push(TextSummary::from(expected_text.as_str())); expected_text.extend(buffer.text_for_range(buffer_range.clone())); expected_text.push('\n'); + + let buffer_row_range = buffer.offset_to_point(buffer_range.start).row + ..=buffer.offset_to_point(buffer_range.end).row; + for row in buffer_row_range { + expected_buffer_rows.push(Some(row)); + } } // Remove final trailing newline. if !expected_excerpts.is_empty() { @@ -2362,6 +2458,21 @@ mod tests { assert_eq!(snapshot.text(), expected_text); log::info!("MultiBuffer text: {:?}", expected_text); + assert_eq!( + snapshot.buffer_rows(0).collect::>(), + expected_buffer_rows, + ); + + for _ in 0..5 { + let start_row = rng.gen_range(0..=expected_buffer_rows.len()); + assert_eq!( + snapshot.buffer_rows(start_row as u32).collect::>(), + &expected_buffer_rows[start_row..], + "buffer_rows({})", + start_row + ); + } + let mut excerpt_starts = excerpt_starts.into_iter(); for (buffer, range, _) in &expected_excerpts { let buffer_id = buffer.id(); From 6444fcd4428e35f28129f6484e3a15227021e659 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 16 Dec 2021 13:53:32 -0800 Subject: [PATCH 124/196] Integrate MultiBuffer::buffer_rows into the display map --- crates/editor/src/display_map/block_map.rs | 7 +- crates/editor/src/display_map/fold_map.rs | 88 +++++++++++++----- crates/editor/src/display_map/wrap_map.rs | 15 ++- crates/editor/src/multi_buffer.rs | 103 ++++++++++++++------- 4 files changed, 146 insertions(+), 67 deletions(-) diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index a7183fa474f087937578ad34fa2b50243d64e6a4..620ac97e06bc73bdf51ed91acf36584e70e92184 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -1263,6 +1263,7 @@ mod tests { .sort_unstable_by_key(|(id, block)| (block.position.row, block.disposition, *id)); let mut sorted_blocks = sorted_blocks.into_iter().peekable(); + let input_buffer_rows = buffer_snapshot.buffer_rows(0).collect::>(); let mut expected_buffer_rows = Vec::new(); let mut expected_text = String::new(); let input_text = wraps_snapshot.text(); @@ -1272,9 +1273,9 @@ mod tests { expected_text.push('\n'); } - let buffer_row = wraps_snapshot + let buffer_row = input_buffer_rows[wraps_snapshot .to_point(WrapPoint::new(row, 0), Bias::Left) - .row; + .row as usize]; while let Some((_, block)) = sorted_blocks.peek() { if block.position.row == row && block.disposition == BlockDisposition::Above { @@ -1290,7 +1291,7 @@ mod tests { } let soft_wrapped = wraps_snapshot.to_tab_point(WrapPoint::new(row, 0)).column() > 0; - expected_buffer_rows.push(if soft_wrapped { None } else { Some(buffer_row) }); + expected_buffer_rows.push(if soft_wrapped { None } else { buffer_row }); expected_text.push_str(input_line); while let Some((_, block)) = sorted_blocks.peek() { diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index 4f7837a2eb030966aa30ca172882276aac68ba6b..60dd40c85325cd92cbb6eefa0ea912139d5d4f7e 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -1,4 +1,7 @@ -use crate::{Anchor, AnchorRangeExt, MultiBufferChunks, MultiBufferSnapshot, ToOffset}; +use crate::{ + multi_buffer::MultiBufferRows, Anchor, AnchorRangeExt, MultiBufferChunks, MultiBufferSnapshot, + ToOffset, +}; use language::{Chunk, Edit, Point, PointUtf16, TextSummary}; use parking_lot::Mutex; use std::{ @@ -563,9 +566,18 @@ impl FoldSnapshot { } let fold_point = FoldPoint::new(start_row, 0); - let mut cursor = self.transforms.cursor(); + let mut cursor = self.transforms.cursor::<(FoldPoint, Point)>(); cursor.seek(&fold_point, Bias::Left, &()); - FoldBufferRows { fold_point, cursor } + + let overshoot = fold_point.0 - cursor.start().0 .0; + let buffer_point = cursor.start().1 + overshoot; + let input_buffer_rows = self.buffer_snapshot.buffer_rows(buffer_point.row); + + FoldBufferRows { + fold_point, + input_buffer_rows, + cursor, + } } pub fn max_point(&self) -> FoldPoint { @@ -897,26 +909,30 @@ impl<'a> sum_tree::Dimension<'a, FoldSummary> for usize { pub struct FoldBufferRows<'a> { cursor: Cursor<'a, Transform, (FoldPoint, Point)>, + input_buffer_rows: MultiBufferRows<'a>, fold_point: FoldPoint, } impl<'a> Iterator for FoldBufferRows<'a> { - type Item = u32; + type Item = Option; fn next(&mut self) -> Option { + let mut traversed_fold = false; while self.fold_point > self.cursor.end(&()).0 { self.cursor.next(&()); + traversed_fold = true; if self.cursor.item().is_none() { - // TODO: Return a bool from next? break; } } if self.cursor.item().is_some() { - let overshoot = self.fold_point.0 - self.cursor.start().0 .0; - let buffer_point = self.cursor.start().1 + overshoot; + if traversed_fold { + self.input_buffer_rows.seek(self.cursor.start().1.row); + self.input_buffer_rows.next(); + } *self.fold_point.row_mut() += 1; - Some(buffer_point.row) + self.input_buffer_rows.next() } else { None } @@ -1282,20 +1298,38 @@ mod tests { snapshot_edits.push((snapshot.clone(), edits)); let mut expected_text: String = buffer_snapshot.text().to_string(); - let mut expected_buffer_rows = Vec::new(); - let mut next_row = buffer_snapshot.max_point().row; for fold_range in map.merged_fold_ranges().into_iter().rev() { - let fold_start = buffer_snapshot.offset_to_point(fold_range.start); - let fold_end = buffer_snapshot.offset_to_point(fold_range.end); - expected_buffer_rows.extend((fold_end.row + 1..=next_row).rev()); - next_row = fold_start.row; - expected_text.replace_range(fold_range.start..fold_range.end, "…"); } - expected_buffer_rows.extend((0..=next_row).rev()); - expected_buffer_rows.reverse(); assert_eq!(snapshot.text(), expected_text); + log::info!( + "fold text {:?} ({} lines)", + expected_text, + expected_text.matches('\n').count() + 1 + ); + + let mut prev_row = 0; + let mut expected_buffer_rows = Vec::new(); + for fold_range in map.merged_fold_ranges().into_iter() { + let fold_start = buffer_snapshot.offset_to_point(fold_range.start).row; + let fold_end = buffer_snapshot.offset_to_point(fold_range.end).row; + expected_buffer_rows.extend( + buffer_snapshot + .buffer_rows(prev_row) + .take((1 + fold_start - prev_row) as usize), + ); + prev_row = 1 + fold_end; + } + expected_buffer_rows.extend(buffer_snapshot.buffer_rows(prev_row)); + + assert_eq!( + expected_buffer_rows.len(), + expected_text.matches('\n').count() + 1, + "wrong expected buffer rows {:?}. text: {:?}", + expected_buffer_rows, + expected_text + ); for (output_row, line) in expected_text.lines().enumerate() { let line_len = snapshot.line_len(output_row as u32); @@ -1373,14 +1407,19 @@ mod tests { ); } - for (idx, buffer_row) in expected_buffer_rows.iter().enumerate() { - let fold_row = Point::new(*buffer_row, 0) - .to_fold_point(&snapshot, Right) + let mut fold_row = 0; + while fold_row < expected_buffer_rows.len() as u32 { + fold_row = snapshot + .clip_point(FoldPoint::new(fold_row, 0), Bias::Right) .row(); + eprintln!("fold_row: {} of {}", fold_row, expected_buffer_rows.len()); assert_eq!( snapshot.buffer_rows(fold_row).collect::>(), - expected_buffer_rows[idx..], + expected_buffer_rows[(fold_row as usize)..], + "wrong buffer rows starting at fold row {}", + fold_row, ); + fold_row += 1; } for fold_range in map.merged_fold_ranges() { @@ -1470,8 +1509,11 @@ mod tests { let (snapshot, _) = map.read(buffer_snapshot.clone(), vec![]); assert_eq!(snapshot.text(), "aa…cccc\nd…eeeee\nffffff\n"); - assert_eq!(snapshot.buffer_rows(0).collect::>(), [0, 3, 5, 6]); - assert_eq!(snapshot.buffer_rows(3).collect::>(), [6]); + assert_eq!( + snapshot.buffer_rows(0).collect::>(), + [Some(0), Some(3), Some(5), Some(6)] + ); + assert_eq!(snapshot.buffer_rows(3).collect::>(), [Some(6)]); } impl FoldMap { diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index bee2a7f8dee4652e076aba869a7af4d583b1915f..4a2510fb823dc128c8c547d9c0aae44049a7c2b1 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -64,7 +64,7 @@ pub struct WrapChunks<'a> { pub struct WrapBufferRows<'a> { input_buffer_rows: fold_map::FoldBufferRows<'a>, - input_buffer_row: u32, + input_buffer_row: Option, output_row: u32, soft_wrapped: bool, max_output_row: u32, @@ -751,22 +751,19 @@ impl WrapSnapshot { } } + let input_buffer_rows = self.buffer_snapshot().buffer_rows(0).collect::>(); let mut expected_buffer_rows = Vec::new(); - let mut buffer_row = 0; let mut prev_tab_row = 0; for display_row in 0..=self.max_point().row() { let tab_point = self.to_tab_point(WrapPoint::new(display_row, 0)); - let soft_wrapped; - if tab_point.row() == prev_tab_row { - soft_wrapped = display_row != 0; + if tab_point.row() == prev_tab_row && display_row != 0 { + expected_buffer_rows.push(None); } else { let fold_point = self.tab_snapshot.to_fold_point(tab_point, Bias::Left).0; let buffer_point = fold_point.to_buffer_point(&self.tab_snapshot.fold_snapshot); - buffer_row = buffer_point.row; + expected_buffer_rows.push(input_buffer_rows[buffer_point.row as usize]); prev_tab_row = tab_point.row(); - soft_wrapped = false; } - expected_buffer_rows.push(if soft_wrapped { None } else { Some(buffer_row) }); } for start_display_row in 0..expected_buffer_rows.len() { @@ -866,7 +863,7 @@ impl<'a> Iterator for WrapBufferRows<'a> { self.soft_wrapped = true; } - Some(if soft_wrapped { None } else { Some(buffer_row) }) + Some(if soft_wrapped { None } else { buffer_row }) } } diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index deb476d6c7ab17c19da2eed2245adcdea8e91239..d907e5dbf4e4f9cdb51b2e196615bca3f6419f4c 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -1059,29 +1059,13 @@ impl MultiBufferSnapshot { } pub fn buffer_rows<'a>(&'a self, start_row: u32) -> MultiBufferRows<'a> { - let mut excerpts = self.excerpts.cursor::(); - excerpts.seek(&Point::new(start_row, 0), Bias::Right, &()); - if excerpts.item().is_none() { - excerpts.prev(&()); - } - - let mut header_height = 0; - let mut buffer_row_range = 0..0; - if let Some(excerpt) = excerpts.item() { - let overshoot = start_row - excerpts.start().row; - let excerpt_start = excerpt.range.start.to_point(&excerpt.buffer).row; - let excerpt_header_height = excerpt.header_height as u32; - header_height = excerpt_header_height.saturating_sub(overshoot); - buffer_row_range.start = - excerpt_start + overshoot.saturating_sub(excerpt_header_height); - buffer_row_range.end = - excerpt_start + excerpt.text_summary.lines.row + 1 - excerpt_header_height; - } - MultiBufferRows { - header_height, - buffer_row_range, - excerpts, - } + let mut result = MultiBufferRows { + header_height: 0, + buffer_row_range: 0..0, + excerpts: self.excerpts.cursor(), + }; + result.seek(start_row); + result } pub fn chunks<'a, T: ToOffset>( @@ -1853,6 +1837,36 @@ impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Option<&'a ExcerptId> { } } +impl<'a> MultiBufferRows<'a> { + pub fn seek(&mut self, row: u32) { + self.header_height = 0; + self.buffer_row_range = 0..0; + + self.excerpts + .seek_forward(&Point::new(row, 0), Bias::Right, &()); + if self.excerpts.item().is_none() { + self.excerpts.prev(&()); + + if self.excerpts.item().is_none() && row == 0 { + self.buffer_row_range = 0..1; + return; + } + } + + if let Some(excerpt) = self.excerpts.item() { + let overshoot = row - self.excerpts.start().row; + let excerpt_start = excerpt.range.start.to_point(&excerpt.buffer).row; + let excerpt_header_height = excerpt.header_height as u32; + + self.header_height = excerpt_header_height.saturating_sub(overshoot); + self.buffer_row_range.start = + excerpt_start + overshoot.saturating_sub(excerpt_header_height); + self.buffer_row_range.end = + excerpt_start + excerpt.text_summary.lines.row + 1 - excerpt_header_height; + } + } +} + impl<'a> Iterator for MultiBufferRows<'a> { type Item = Option; @@ -1867,16 +1881,14 @@ impl<'a> Iterator for MultiBufferRows<'a> { self.buffer_row_range.start += 1; return Some(row); } + self.excerpts.item()?; self.excerpts.next(&()); - if let Some(excerpt) = self.excerpts.item() { - self.header_height = excerpt.header_height as u32; - self.buffer_row_range.start = excerpt.range.start.to_point(&excerpt.buffer).row; - self.buffer_row_range.end = - self.buffer_row_range.start + excerpt.text_summary.lines.row + 1 - - self.header_height; - } else { - return None; - } + let excerpt = self.excerpts.item()?; + self.header_height = excerpt.header_height as u32; + self.buffer_row_range.start = excerpt.range.start.to_point(&excerpt.buffer).row; + self.buffer_row_range.end = + self.buffer_row_range.start + excerpt.text_summary.lines.row + 1 + - self.header_height; } } } @@ -2179,6 +2191,23 @@ mod tests { Some(3) ] ); + assert_eq!( + snapshot.buffer_rows(2).collect::>(), + &[ + Some(1), + Some(2), + None, + Some(3), + Some(4), + None, + None, + None, + Some(3) + ] + ); + assert_eq!(snapshot.buffer_rows(10).collect::>(), &[Some(3)]); + assert_eq!(snapshot.buffer_rows(11).collect::>(), &[]); + assert_eq!(snapshot.buffer_rows(12).collect::>(), &[]); { let snapshot = multibuffer.read(cx).read(cx); @@ -2283,6 +2312,16 @@ mod tests { ); } + #[gpui::test] + fn test_empty_excerpt_buffer(cx: &mut MutableAppContext) { + let multibuffer = cx.add_model(|_| MultiBuffer::new(0)); + + let snapshot = multibuffer.read(cx).snapshot(cx); + assert_eq!(snapshot.text(), ""); + assert_eq!(snapshot.buffer_rows(0).collect::>(), &[Some(0)]); + assert_eq!(snapshot.buffer_rows(1).collect::>(), &[]); + } + #[gpui::test] fn test_singleton_multibuffer_anchors(cx: &mut MutableAppContext) { let buffer = cx.add_model(|cx| Buffer::new(0, "abcd", cx)); From 9cd4e5ba0431e89d9a99a2087ed468dc62442adb Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 16 Dec 2021 14:14:22 -0800 Subject: [PATCH 125/196] Transfer focus from ProjectDiagnostics view to its editor --- crates/diagnostics/src/diagnostics.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 3181f3b3ddc71f5d6ac8f86090c2cd6841522211..0ef4fe8663d7603a5b7597713e1cada4899d2576 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -52,6 +52,10 @@ impl View for ProjectDiagnosticsEditor { fn render(&mut self, _: &mut RenderContext) -> ElementBox { ChildView::new(self.editor.id()).boxed() } + + fn on_focus(&mut self, cx: &mut ViewContext) { + cx.focus(&self.editor); + } } impl ProjectDiagnosticsEditor { From 31eeffa5a7e53a9b2b26ca45b13203a5deb0e5fd Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 16 Dec 2021 14:20:01 -0800 Subject: [PATCH 126/196] Autoscroll after inserting blocks --- crates/editor/src/editor.rs | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index ff1922c8b1c8ddaee268487f6226b4b47423ab70..18405817f9f184e3e402133d7fdd49a9bf83e163 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -3432,8 +3432,11 @@ impl Editor { where P: ToOffset + Clone, { - self.display_map - .update(cx, |display_map, cx| display_map.insert_blocks(blocks, cx)) + let blocks = self + .display_map + .update(cx, |display_map, cx| display_map.insert_blocks(blocks, cx)); + self.request_autoscroll(Autoscroll::Fit, cx); + blocks } pub fn longest_row(&self, cx: &mut MutableAppContext) -> u32 { From ae147a379dd8b95ba422d9e953b52fee7cfd10e9 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 16 Dec 2021 16:05:28 -0800 Subject: [PATCH 127/196] Don't terminate on an empty input chunk in `ExcerptChunks` --- crates/editor/src/multi_buffer.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index d907e5dbf4e4f9cdb51b2e196615bca3f6419f4c..902f9e4bc136c1b3c0a4e5fb8d9780eeec854ed5 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -2018,9 +2018,7 @@ impl<'a> Iterator for ExcerptChunks<'a> { } if let Some(chunk) = self.content_chunks.next() { - if !chunk.text.is_empty() { - return Some(chunk); - } + return Some(chunk); } if self.footer_height > 0 { From fb492a9fb853888eae02084528d766def64a00ce Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 16 Dec 2021 16:36:33 -0800 Subject: [PATCH 128/196] Correctly incorporate editor settings into diagnostic header rendering --- crates/chat_panel/src/chat_panel.rs | 4 +-- crates/diagnostics/src/diagnostics.rs | 30 ++++------------ crates/editor/src/editor.rs | 39 +++++++++++---------- crates/editor/src/element.rs | 4 +-- crates/file_finder/src/file_finder.rs | 3 +- crates/go_to_line/src/go_to_line.rs | 5 ++- crates/server/src/rpc.rs | 3 +- crates/theme_selector/src/theme_selector.rs | 4 +-- 8 files changed, 37 insertions(+), 55 deletions(-) diff --git a/crates/chat_panel/src/chat_panel.rs b/crates/chat_panel/src/chat_panel.rs index 7aca6daa956bc709f206e39e822696405345859a..0ec3884a8569854aef07a31c5642ea8e36fd5d5a 100644 --- a/crates/chat_panel/src/chat_panel.rs +++ b/crates/chat_panel/src/chat_panel.rs @@ -13,7 +13,7 @@ use gpui::{ ViewContext, ViewHandle, }; use postage::{prelude::Stream, watch}; -use std::{rc::Rc, sync::Arc}; +use std::sync::Arc; use time::{OffsetDateTime, UtcOffset}; use util::{ResultExt, TryFutureExt}; use workspace::Settings; @@ -56,7 +56,7 @@ impl ChatPanel { 4, { let settings = settings.clone(); - Rc::new(move |_| { + Arc::new(move |_| { let settings = settings.borrow(); EditorSettings { tab_size: settings.tab_size, diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 0ef4fe8663d7603a5b7597713e1cada4899d2576..06b8a785b4972eeac6decfeb2d763a9472fa68b4 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -1,7 +1,5 @@ -use std::{cmp, sync::Arc}; - use editor::{ - diagnostic_block_renderer, diagnostic_style, + diagnostic_block_renderer, diagnostic_header_renderer, display_map::{BlockDisposition, BlockProperties}, Editor, ExcerptProperties, MultiBuffer, }; @@ -12,6 +10,7 @@ use gpui::{ use language::Point; use postage::watch; use project::Project; +use std::cmp; use workspace::Workspace; action!(Toggle); @@ -129,27 +128,10 @@ impl workspace::Item for ProjectDiagnostics { .count() as u8 + 1, - render_header: Some(Arc::new({ - let settings = settings.clone(); - - move |_| { - let editor_style = - &settings.borrow().theme.editor; - let mut text_style = editor_style.text.clone(); - text_style.color = diagnostic_style( - primary_diagnostic.severity, - true, - &editor_style, - ) - .text; - - Text::new( - primary_diagnostic.message.clone(), - text_style, - ) - .boxed() - } - })), + render_header: Some(diagnostic_header_renderer( + primary_diagnostic, + build_settings.clone(), + )), }, excerpts_cx, ); diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 18405817f9f184e3e402133d7fdd49a9bf83e163..0ee13fa413e7f4b22b490cbb1f3158358ea94b52 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -28,7 +28,9 @@ use language::{ TransactionId, }; pub use multi_buffer::{Anchor, ExcerptProperties, MultiBuffer}; -use multi_buffer::{AnchorRangeExt, MultiBufferChunks, MultiBufferSnapshot, ToOffset, ToPoint}; +use multi_buffer::{ + AnchorRangeExt, MultiBufferChunks, MultiBufferSnapshot, RenderHeaderFn, ToOffset, ToPoint, +}; use postage::watch; use serde::{Deserialize, Serialize}; use smallvec::SmallVec; @@ -39,7 +41,6 @@ use std::{ iter::{self, FromIterator}, mem, ops::{Deref, Range, RangeInclusive, Sub}, - rc::Rc, sync::Arc, time::{Duration, Instant}, }; @@ -356,7 +357,7 @@ pub enum SoftWrap { Column(u32), } -type BuildSettings = Rc EditorSettings>; +type BuildSettings = Arc EditorSettings>; pub struct Editor { handle: WeakViewHandle, @@ -3771,24 +3772,26 @@ pub fn diagnostic_block_renderer( build_settings: BuildSettings, ) -> RenderBlock { Arc::new(move |cx: &BlockContext| { - let diagnostic = diagnostic.clone(); let settings = build_settings(cx); - render_diagnostic(diagnostic, &settings.style, is_valid, cx.anchor_x) + let mut text_style = settings.style.text.clone(); + text_style.color = diagnostic_style(diagnostic.severity, is_valid, &settings.style).text; + Text::new(diagnostic.message.clone(), text_style) + .contained() + .with_margin_left(cx.anchor_x) + .boxed() }) } -fn render_diagnostic( +pub fn diagnostic_header_renderer( diagnostic: Diagnostic, - style: &EditorStyle, - valid: bool, - anchor_x: f32, -) -> ElementBox { - let mut text_style = style.text.clone(); - text_style.color = diagnostic_style(diagnostic.severity, valid, &style).text; - Text::new(diagnostic.message, text_style) - .contained() - .with_margin_left(anchor_x) - .boxed() + build_settings: BuildSettings, +) -> RenderHeaderFn { + Arc::new(move |cx| { + let settings = build_settings(cx); + let mut text_style = settings.style.text.clone(); + text_style.color = diagnostic_style(diagnostic.severity, true, &settings.style).text; + Text::new(diagnostic.message.clone(), text_style).boxed() + }) } pub fn diagnostic_style( @@ -3813,7 +3816,7 @@ pub fn settings_builder( buffer: WeakModelHandle, settings: watch::Receiver, ) -> BuildSettings { - Rc::new(move |cx| { + Arc::new(move |cx| { let settings = settings.borrow(); let font_cache = cx.font_cache(); let font_family_id = settings.buffer_font_family; @@ -6106,7 +6109,7 @@ mod tests { settings: EditorSettings, cx: &mut ViewContext, ) -> Editor { - Editor::for_buffer(buffer, Rc::new(move |_| settings.clone()), cx) + Editor::for_buffer(buffer, Arc::new(move |_| settings.clone()), cx) } } diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index f0f07426fd78f858a843f50e63a3bd25cd47bd07..0878071cd30dd2eb6641321577bb69db2c3c2226 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -1179,7 +1179,7 @@ fn scale_horizontal_mouse_autoscroll_delta(delta: f32) -> f32 { mod tests { use super::*; use crate::{Editor, EditorSettings, MultiBuffer}; - use std::rc::Rc; + use std::sync::Arc; use util::test::sample_text; #[gpui::test] @@ -1191,7 +1191,7 @@ mod tests { buffer, { let settings = settings.clone(); - Rc::new(move |_| settings.clone()) + Arc::new(move |_| settings.clone()) }, cx, ) diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index fd1a60041faaa510de1c88f1ec3d95ad2d4f0379..87d02ea31f7f45f6be6f9a872a3f0d289227fb57 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -16,7 +16,6 @@ use project::{Project, ProjectPath}; use std::{ cmp, path::Path, - rc::Rc, sync::{ atomic::{self, AtomicBool}, Arc, @@ -271,7 +270,7 @@ impl FileFinder { Editor::single_line( { let settings = settings.clone(); - Rc::new(move |_| { + Arc::new(move |_| { let settings = settings.borrow(); EditorSettings { style: settings.theme.selector.input_editor.as_editor(), diff --git a/crates/go_to_line/src/go_to_line.rs b/crates/go_to_line/src/go_to_line.rs index a037b78d924804377bb8052389adb696d11db61d..cf965ebf1b398816c87862e4de9db388998e5615 100644 --- a/crates/go_to_line/src/go_to_line.rs +++ b/crates/go_to_line/src/go_to_line.rs @@ -1,11 +1,10 @@ -use std::rc::Rc; - use editor::{display_map::ToDisplayPoint, Autoscroll, Editor, EditorSettings}; use gpui::{ action, elements::*, geometry::vector::Vector2F, keymap::Binding, Axis, Entity, MutableAppContext, RenderContext, View, ViewContext, ViewHandle, }; use postage::watch; +use std::sync::Arc; use text::{Bias, Point, Selection}; use workspace::{Settings, Workspace}; @@ -51,7 +50,7 @@ impl GoToLine { Editor::single_line( { let settings = settings.clone(); - Rc::new(move |_| { + Arc::new(move |_| { let settings = settings.borrow(); EditorSettings { tab_size: settings.tab_size, diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index 4ca7b34717cea15f8ce55283a41528c5d55bd0c4..264d684beb0c48764d67101e93993bac0ad8e2d4 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -932,7 +932,6 @@ mod tests { use std::{ ops::Deref, path::Path, - rc::Rc, sync::{ atomic::{AtomicBool, Ordering::SeqCst}, Arc, @@ -1047,7 +1046,7 @@ mod tests { .unwrap(); let editor_b = cx_b.add_view(window_b, |cx| { - Editor::for_buffer(buffer_b, Rc::new(|cx| EditorSettings::test(cx)), cx) + Editor::for_buffer(buffer_b, Arc::new(|cx| EditorSettings::test(cx)), cx) }); // TODO // // Create a selection set as client B and see that selection set as client A. diff --git a/crates/theme_selector/src/theme_selector.rs b/crates/theme_selector/src/theme_selector.rs index b611330afd93d2896c4b2a65b67c3ba86cb595d6..005d66ad41f3c2643eb714e4024aae69e0687132 100644 --- a/crates/theme_selector/src/theme_selector.rs +++ b/crates/theme_selector/src/theme_selector.rs @@ -9,7 +9,7 @@ use gpui::{ }; use parking_lot::Mutex; use postage::watch; -use std::{cmp, rc::Rc, sync::Arc}; +use std::{cmp, sync::Arc}; use theme::ThemeRegistry; use workspace::{Settings, Workspace}; @@ -64,7 +64,7 @@ impl ThemeSelector { Editor::single_line( { let settings = settings.clone(); - Rc::new(move |_| { + Arc::new(move |_| { let settings = settings.borrow(); EditorSettings { tab_size: settings.tab_size, From 528d64d3cc5b9ffb3f4396021e561ac772dae08b Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 16 Dec 2021 18:09:12 -0800 Subject: [PATCH 129/196] WIP - Improve project diagnostic context rendering --- Cargo.lock | 1 + crates/diagnostics/Cargo.toml | 7 + crates/diagnostics/src/diagnostics.rs | 285 +++++++++++++++++--------- crates/editor/src/editor.rs | 10 +- crates/language/src/diagnostic_set.rs | 36 ++-- 5 files changed, 225 insertions(+), 114 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 987bd265728de9fdf8aa3207f7114cc8b4275200..3d1af6fbb4759a0cce9d3e1ae2e5dd1c9afd0309 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1421,6 +1421,7 @@ dependencies = [ "language", "postage", "project", + "unindent", "workspace", ] diff --git a/crates/diagnostics/Cargo.toml b/crates/diagnostics/Cargo.toml index eebe4b209159e0dd19b237659f3bf229758d7998..a3d75b21f9c2d7002d19b793844a8911754f134c 100644 --- a/crates/diagnostics/Cargo.toml +++ b/crates/diagnostics/Cargo.toml @@ -15,3 +15,10 @@ gpui = { path = "../gpui" } project = { path = "../project" } workspace = { path = "../workspace" } postage = { version = "0.4", features = ["futures-traits"] } + +[dev-dependencies] +unindent = "0.1" +editor = { path = "../editor", features = ["test-support"] } +language = { path = "../language", features = ["test-support"] } +gpui = { path = "../gpui", features = ["test-support"] } +workspace = { path = "../workspace", features = ["test-support"] } diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 06b8a785b4972eeac6decfeb2d763a9472fa68b4..60ee3935f674dce2610765b3e3fccc630eed3389 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -1,16 +1,16 @@ use editor::{ - diagnostic_block_renderer, diagnostic_header_renderer, + context_header_renderer, diagnostic_block_renderer, diagnostic_header_renderer, display_map::{BlockDisposition, BlockProperties}, - Editor, ExcerptProperties, MultiBuffer, + BuildSettings, Editor, ExcerptProperties, MultiBuffer, }; use gpui::{ action, elements::*, keymap::Binding, AppContext, Entity, ModelHandle, MutableAppContext, RenderContext, View, ViewContext, ViewHandle, }; -use language::Point; +use language::{Bias, Buffer, Point}; use postage::watch; use project::Project; -use std::cmp; +use std::ops::Range; use workspace::Workspace; action!(Toggle); @@ -27,6 +27,7 @@ struct ProjectDiagnostics { struct ProjectDiagnosticsEditor { editor: ViewHandle, excerpts: ModelHandle, + build_settings: BuildSettings, } impl ProjectDiagnostics { @@ -58,10 +59,114 @@ impl View for ProjectDiagnosticsEditor { } impl ProjectDiagnosticsEditor { + fn new( + replica_id: u16, + settings: watch::Receiver, + cx: &mut ViewContext, + ) -> Self { + let excerpts = cx.add_model(|_| MultiBuffer::new(replica_id)); + let build_settings = editor::settings_builder(excerpts.downgrade(), settings.clone()); + let editor = + cx.add_view(|cx| Editor::for_buffer(excerpts.clone(), build_settings.clone(), cx)); + Self { + excerpts, + editor, + build_settings, + } + } + fn toggle(workspace: &mut Workspace, _: &Toggle, cx: &mut ViewContext) { let diagnostics = cx.add_model(|_| ProjectDiagnostics::new(workspace.project().clone())); workspace.add_item(diagnostics, cx); } + + fn populate_excerpts(&mut self, buffer: ModelHandle, cx: &mut ViewContext) { + let mut blocks = Vec::new(); + let snapshot = buffer.read(cx).snapshot(); + + let excerpts_snapshot = self.excerpts.update(cx, |excerpts, excerpts_cx| { + for group in snapshot.diagnostic_groups::() { + let mut pending_range: Option<(Range, usize)> = None; + let mut is_first_excerpt = true; + for (ix, entry) in group.entries.iter().map(Some).chain([None]).enumerate() { + if let Some((range, start_ix)) = &mut pending_range { + if let Some(entry) = entry { + if entry.range.start.row <= range.end.row + 1 { + range.end = range.end.max(entry.range.end); + continue; + } + } + + let excerpt_start = Point::new(range.start.row.saturating_sub(1), 0); + let excerpt_end = snapshot + .clip_point(Point::new(range.end.row + 1, u32::MAX), Bias::Left); + + let mut excerpt = ExcerptProperties { + buffer: &buffer, + range: excerpt_start..excerpt_end, + header_height: 0, + render_header: None, + }; + + if is_first_excerpt { + let primary = &group.entries[group.primary_ix].diagnostic; + excerpt.header_height = primary.message.matches('\n').count() as u8 + 1; + excerpt.render_header = Some(diagnostic_header_renderer( + primary.clone(), + self.build_settings.clone(), + )); + } else { + excerpt.header_height = 1; + excerpt.render_header = + Some(context_header_renderer(self.build_settings.clone())); + } + + is_first_excerpt = false; + let excerpt_id = excerpts.push_excerpt(excerpt, excerpts_cx); + for entry in &group.entries[*start_ix..ix] { + if !entry.diagnostic.is_primary { + let buffer_anchor = snapshot.anchor_before(entry.range.start); + blocks.push(BlockProperties { + position: (excerpt_id.clone(), buffer_anchor), + height: entry.diagnostic.message.matches('\n').count() as u8 + + 1, + render: diagnostic_block_renderer( + entry.diagnostic.clone(), + true, + self.build_settings.clone(), + ), + disposition: BlockDisposition::Below, + }); + } + } + + pending_range.take(); + } + + if let Some(entry) = entry { + pending_range = Some((entry.range.clone(), ix)); + } + } + } + + excerpts.snapshot(excerpts_cx) + }); + + self.editor.update(cx, |editor, cx| { + editor.insert_blocks( + blocks.into_iter().map(|block| { + let (excerpt_id, text_anchor) = block.position; + BlockProperties { + position: excerpts_snapshot.anchor_in_excerpt(excerpt_id, text_anchor), + height: block.height, + render: block.render, + disposition: block.disposition, + } + }), + cx, + ); + }); + } } impl workspace::Item for ProjectDiagnostics { @@ -73,113 +178,27 @@ impl workspace::Item for ProjectDiagnostics { cx: &mut ViewContext, ) -> Self::View { let project = handle.read(cx).project.clone(); - let excerpts = cx.add_model(|cx| MultiBuffer::new(project.read(cx).replica_id(cx))); - let build_settings = editor::settings_builder(excerpts.downgrade(), settings.clone()); - let editor = - cx.add_view(|cx| Editor::for_buffer(excerpts.clone(), build_settings.clone(), cx)); - let project_paths = project .read(cx) .diagnostic_summaries(cx) .map(|e| e.0) .collect::>(); - cx.spawn(|this, mut cx| { + cx.spawn(|view, mut cx| { let project = project.clone(); async move { for project_path in project_paths { let buffer = project .update(&mut cx, |project, cx| project.open_buffer(project_path, cx)) .await?; - let snapshot = buffer.read_with(&cx, |b, _| b.snapshot()); - - this.update(&mut cx, |this, cx| { - let mut blocks = Vec::new(); - let excerpts_snapshot = - this.excerpts.update(cx, |excerpts, excerpts_cx| { - for group in snapshot.diagnostic_groups::() { - let excerpt_start = cmp::min( - group.primary.range.start.row, - group - .supporting - .first() - .map_or(u32::MAX, |entry| entry.range.start.row), - ); - let excerpt_end = cmp::max( - group.primary.range.end.row, - group - .supporting - .last() - .map_or(0, |entry| entry.range.end.row), - ); - - let primary_diagnostic = group.primary.diagnostic; - let excerpt_id = excerpts.push_excerpt( - ExcerptProperties { - buffer: &buffer, - range: Point::new(excerpt_start, 0) - ..Point::new( - excerpt_end, - snapshot.line_len(excerpt_end), - ), - header_height: primary_diagnostic - .message - .matches('\n') - .count() - as u8 - + 1, - render_header: Some(diagnostic_header_renderer( - primary_diagnostic, - build_settings.clone(), - )), - }, - excerpts_cx, - ); - - for entry in group.supporting { - let buffer_anchor = - snapshot.anchor_before(entry.range.start); - blocks.push(BlockProperties { - position: (excerpt_id.clone(), buffer_anchor), - height: entry.diagnostic.message.matches('\n').count() - as u8 - + 1, - render: diagnostic_block_renderer( - entry.diagnostic, - true, - build_settings.clone(), - ), - disposition: BlockDisposition::Below, - }); - } - } - - excerpts.snapshot(excerpts_cx) - }); - - this.editor.update(cx, |editor, cx| { - editor.insert_blocks( - blocks.into_iter().map(|block| { - let (excerpt_id, text_anchor) = block.position; - BlockProperties { - position: excerpts_snapshot - .anchor_in_excerpt(excerpt_id, text_anchor), - height: block.height, - render: block.render, - disposition: block.disposition, - } - }), - cx, - ); - }); - }) + view.update(&mut cx, |view, cx| view.populate_excerpts(buffer, cx)) } Result::Ok::<_, anyhow::Error>(()) } }) .detach(); - ProjectDiagnosticsEditor { editor, excerpts } + ProjectDiagnosticsEditor::new(project.read(cx).replica_id(cx), settings, cx) } fn project_path(&self) -> Option { @@ -212,3 +231,83 @@ impl workspace::ItemView for ProjectDiagnosticsEditor { todo!() } } + +#[cfg(test)] +mod tests { + use super::*; + use language::{Diagnostic, DiagnosticEntry, DiagnosticSeverity, PointUtf16}; + use unindent::Unindent as _; + use workspace::WorkspaceParams; + + #[gpui::test] + fn test_diagnostics(cx: &mut MutableAppContext) { + let settings = WorkspaceParams::test(cx).settings; + let view = cx.add_view(Default::default(), |cx| { + ProjectDiagnosticsEditor::new(0, settings, cx) + }); + + let text = " + fn main() { + let x = vec![]; + let y = vec![]; + a(x); + b(y); + c(); + d(y); + e(x); + } + " + .unindent(); + + let buffer = cx.add_model(|cx| { + let mut buffer = Buffer::new(0, text, cx); + buffer + .update_diagnostics( + None, + vec![ + DiagnosticEntry { + range: PointUtf16::new(1, 8)..PointUtf16::new(1, 9), + diagnostic: Diagnostic { + message: + "move occurs because `x` has type `Vec`, which does not implement the `Copy` trait" + .to_string(), + severity: DiagnosticSeverity::INFORMATION, + is_primary: false, + group_id: 0, + ..Default::default() + }, + }, + DiagnosticEntry { + range: PointUtf16::new(2, 8)..PointUtf16::new(2, 9), + diagnostic: Diagnostic { + message: + "move occurs because `y` has type `Vec`, which does not implement the `Copy` trait" + .to_string(), + severity: DiagnosticSeverity::INFORMATION, + is_primary: false, + group_id: 1, + ..Default::default() + }, + }, + DiagnosticEntry { + range: PointUtf16::new(3, 6)..PointUtf16::new(3, 7), + diagnostic: Diagnostic { + message: "value moved here".to_string(), + severity: DiagnosticSeverity::INFORMATION, + is_primary: false, + group_id: 0, + ..Default::default() + }, + }, + ], + cx, + ) + .unwrap(); + buffer + }); + + view.update(cx, |view, cx| { + view.populate_excerpts(buffer, cx); + }); + } +} diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 0ee13fa413e7f4b22b490cbb1f3158358ea94b52..337e4285aef3e5f16ac267562a85ef3b528a949f 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -357,7 +357,7 @@ pub enum SoftWrap { Column(u32), } -type BuildSettings = Arc EditorSettings>; +pub type BuildSettings = Arc EditorSettings>; pub struct Editor { handle: WeakViewHandle, @@ -3794,6 +3794,14 @@ pub fn diagnostic_header_renderer( }) } +pub fn context_header_renderer(build_settings: BuildSettings) -> RenderHeaderFn { + Arc::new(move |cx| { + let settings = build_settings(cx); + let text_style = settings.style.text.clone(); + Text::new("...".to_string(), text_style).boxed() + }) +} + pub fn diagnostic_style( severity: DiagnosticSeverity, valid: bool, diff --git a/crates/language/src/diagnostic_set.rs b/crates/language/src/diagnostic_set.rs index 58ef94a0d552a588a05b4892b89b2504f78fbdc8..0918424ca0ee83e9df1c2f94bd7e3d85e93470a2 100644 --- a/crates/language/src/diagnostic_set.rs +++ b/crates/language/src/diagnostic_set.rs @@ -20,8 +20,8 @@ pub struct DiagnosticEntry { } pub struct DiagnosticGroup { - pub primary: DiagnosticEntry, - pub supporting: Vec>, + pub entries: Vec>, + pub primary_ix: usize, } #[derive(Clone, Debug)] @@ -108,33 +108,29 @@ impl DiagnosticSet { where O: FromAnchor + Ord + Copy, { - let mut groups = - HashMap::>, Vec>)>::default(); - + let mut groups = HashMap::default(); for entry in self.diagnostics.iter() { let entry = entry.resolve(buffer); - let (ref mut primary, ref mut supporting) = groups + groups .entry(entry.diagnostic.group_id) - .or_insert((None, Vec::new())); - if entry.diagnostic.is_primary { - *primary = Some(entry); - } else { - supporting.push(entry); - } + .or_insert(Vec::new()) + .push(entry); } let mut groups = groups .into_values() - .map(|(primary, mut supporting)| { - supporting.sort_unstable_by_key(|entry| entry.range.start); - DiagnosticGroup { - primary: primary.unwrap(), - supporting, - } + .filter_map(|mut entries| { + entries.sort_unstable_by_key(|entry| entry.range.start); + entries + .iter() + .position(|entry| entry.diagnostic.is_primary) + .map(|primary_ix| DiagnosticGroup { + entries, + primary_ix, + }) }) .collect::>(); - groups.sort_unstable_by_key(|group| group.primary.range.start); - + groups.sort_unstable_by_key(|group| group.entries[group.primary_ix].range.start); groups } From 63f171200e46235f1076673a5079e7876567acda Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 17 Dec 2021 12:16:09 +0100 Subject: [PATCH 130/196] Enhance diagnostic unit test and correctly display primary diagnostic That is, if the diagnostic has more than one line we will display the first line in the header and all the other message lines at the error location. --- crates/diagnostics/src/diagnostics.rs | 63 +++++++++++++++++++-------- 1 file changed, 45 insertions(+), 18 deletions(-) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 60ee3935f674dce2610765b3e3fccc630eed3389..4ff3a67f4692a2087c5a83d5ec75dbbaf2894093 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -110,9 +110,12 @@ impl ProjectDiagnosticsEditor { if is_first_excerpt { let primary = &group.entries[group.primary_ix].diagnostic; - excerpt.header_height = primary.message.matches('\n').count() as u8 + 1; + let mut header = primary.clone(); + header.message = + primary.message.split('\n').next().unwrap().to_string(); + excerpt.header_height = 1; excerpt.render_header = Some(diagnostic_header_renderer( - primary.clone(), + header, self.build_settings.clone(), )); } else { @@ -124,14 +127,20 @@ impl ProjectDiagnosticsEditor { is_first_excerpt = false; let excerpt_id = excerpts.push_excerpt(excerpt, excerpts_cx); for entry in &group.entries[*start_ix..ix] { - if !entry.diagnostic.is_primary { + let mut diagnostic = entry.diagnostic.clone(); + if diagnostic.is_primary { + let mut lines = entry.diagnostic.message.split('\n'); + lines.next(); + diagnostic.message = lines.collect(); + } + + if !diagnostic.message.is_empty() { let buffer_anchor = snapshot.anchor_before(entry.range.start); blocks.push(BlockProperties { position: (excerpt_id.clone(), buffer_anchor), - height: entry.diagnostic.message.matches('\n').count() as u8 - + 1, + height: diagnostic.message.matches('\n').count() as u8 + 1, render: diagnostic_block_renderer( - entry.diagnostic.clone(), + diagnostic, true, self.build_settings.clone(), ), @@ -252,7 +261,10 @@ mod tests { let y = vec![]; a(x); b(y); - c(); + // comment 1 + // comment 2 + // comment 3 + // comment 4 d(y); e(x); } @@ -266,10 +278,10 @@ mod tests { None, vec![ DiagnosticEntry { - range: PointUtf16::new(1, 8)..PointUtf16::new(1, 9), + range: PointUtf16::new(2, 8)..PointUtf16::new(2, 9), diagnostic: Diagnostic { message: - "move occurs because `x` has type `Vec`, which does not implement the `Copy` trait" + "move occurs because `y` has type `Vec`, which does not implement the `Copy` trait" .to_string(), severity: DiagnosticSeverity::INFORMATION, is_primary: false, @@ -278,23 +290,21 @@ mod tests { }, }, DiagnosticEntry { - range: PointUtf16::new(2, 8)..PointUtf16::new(2, 9), + range: PointUtf16::new(4, 6)..PointUtf16::new(4, 7), diagnostic: Diagnostic { - message: - "move occurs because `y` has type `Vec`, which does not implement the `Copy` trait" - .to_string(), + message: "value moved here".to_string(), severity: DiagnosticSeverity::INFORMATION, is_primary: false, - group_id: 1, + group_id: 0, ..Default::default() }, }, DiagnosticEntry { - range: PointUtf16::new(3, 6)..PointUtf16::new(3, 7), + range: PointUtf16::new(8, 6)..PointUtf16::new(8, 7), diagnostic: Diagnostic { - message: "value moved here".to_string(), - severity: DiagnosticSeverity::INFORMATION, - is_primary: false, + message: "use of moved value\nvalue used here after move".to_string(), + severity: DiagnosticSeverity::ERROR, + is_primary: true, group_id: 0, ..Default::default() }, @@ -308,6 +318,23 @@ mod tests { view.update(cx, |view, cx| { view.populate_excerpts(buffer, cx); + assert_eq!( + view.excerpts.read(cx).read(cx).text(), + concat!( + "\n", + " let x = vec![];\n", + " let y = vec![];\n", + " a(x);\n", + "\n", + " a(x);\n", + " b(y);\n", + " // comment 1\n", + "\n", + " // comment 3\n", + " // comment 4\n", + " d(y);" + ) + ); }); } } From 3914d1d07219f75d2f5ce2599206bbb0e8a06022 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 17 Dec 2021 13:49:21 +0100 Subject: [PATCH 131/196] Display filename on the first excerpt's header for a group --- crates/diagnostics/src/diagnostics.rs | 10 ++++++---- crates/editor/src/editor.rs | 16 +++++++++++++--- 2 files changed, 19 insertions(+), 7 deletions(-) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 4ff3a67f4692a2087c5a83d5ec75dbbaf2894093..ad458970090f75f537187b899ee551711ccaba20 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -113,8 +113,9 @@ impl ProjectDiagnosticsEditor { let mut header = primary.clone(); header.message = primary.message.split('\n').next().unwrap().to_string(); - excerpt.header_height = 1; + excerpt.header_height = 2; excerpt.render_header = Some(diagnostic_header_renderer( + buffer.clone(), header, self.build_settings.clone(), )); @@ -321,15 +322,16 @@ mod tests { assert_eq!( view.excerpts.read(cx).read(cx).text(), concat!( - "\n", + "\n", // primary diagnostic message + "\n", // filename " let x = vec![];\n", " let y = vec![];\n", " a(x);\n", - "\n", + "\n", // context ellipsis " a(x);\n", " b(y);\n", " // comment 1\n", - "\n", + "\n", // context ellipsis " // comment 3\n", " // comment 4\n", " d(y);" diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 337e4285aef3e5f16ac267562a85ef3b528a949f..072f4db1da4e3163cb00f595b09eef6c5ce9b7bf 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -14,7 +14,7 @@ use display_map::*; pub use element::*; use gpui::{ action, - elements::Text, + elements::*, fonts::TextStyle, geometry::vector::{vec2f, Vector2F}, keymap::Binding, @@ -3783,6 +3783,7 @@ pub fn diagnostic_block_renderer( } pub fn diagnostic_header_renderer( + buffer: ModelHandle, diagnostic: Diagnostic, build_settings: BuildSettings, ) -> RenderHeaderFn { @@ -3790,7 +3791,16 @@ pub fn diagnostic_header_renderer( let settings = build_settings(cx); let mut text_style = settings.style.text.clone(); text_style.color = diagnostic_style(diagnostic.severity, true, &settings.style).text; - Text::new(diagnostic.message.clone(), text_style).boxed() + let file_path = if let Some(file) = buffer.read(cx).file() { + file.path().to_string_lossy().to_string() + } else { + "untitled".to_string() + }; + + Flex::column() + .with_child(Label::new(diagnostic.message.clone(), text_style).boxed()) + .with_child(Label::new(file_path, settings.style.text.clone()).boxed()) + .boxed() }) } @@ -3798,7 +3808,7 @@ pub fn context_header_renderer(build_settings: BuildSettings) -> RenderHeaderFn Arc::new(move |cx| { let settings = build_settings(cx); let text_style = settings.style.text.clone(); - Text::new("...".to_string(), text_style).boxed() + Label::new("...".to_string(), text_style).boxed() }) } From a2ee38f37bb4cc44921040786c91d0b79f7361ba Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 17 Dec 2021 14:57:24 +0100 Subject: [PATCH 132/196] Make `MultiBuffer::is_parsing` a test-only method --- crates/editor/src/multi_buffer.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 902f9e4bc136c1b3c0a4e5fb8d9780eeec854ed5..eac9a9ed8da59ff0ecc0886a26d83fc27e72cacb 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -689,6 +689,7 @@ impl MultiBuffer { self.as_singleton().unwrap().read(cx).has_conflict() } + #[cfg(test)] pub fn is_parsing(&self, cx: &AppContext) -> bool { self.as_singleton().unwrap().read(cx).is_parsing() } From c9cbc2fe1e4bf74702c7fdba303dadd909e06526 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 17 Dec 2021 14:57:42 +0100 Subject: [PATCH 133/196] Implement `MultiBuffer::range_for_syntax_ancestor` --- crates/editor/src/multi_buffer.rs | 40 ++++++++++++++++++++++++++++--- 1 file changed, 37 insertions(+), 3 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index eac9a9ed8da59ff0ecc0886a26d83fc27e72cacb..3abddbae95b0eca12767c379c3bb4ea4f121b31d 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -1473,9 +1473,43 @@ impl MultiBufferSnapshot { pub fn range_for_syntax_ancestor(&self, range: Range) -> Option> { let range = range.start.to_offset(self)..range.end.to_offset(self); - self.as_singleton() - .unwrap() - .range_for_syntax_ancestor(range) + + let mut cursor = self.excerpts.cursor::(); + cursor.seek(&range.start, Bias::Right, &()); + let start_excerpt = cursor.item(); + + cursor.seek(&range.end, Bias::Right, &()); + let end_excerpt = cursor.item(); + + start_excerpt + .zip(end_excerpt) + .and_then(|(start_excerpt, end_excerpt)| { + if start_excerpt.id != end_excerpt.id { + return None; + } + + let excerpt_buffer_start = + start_excerpt.range.start.to_offset(&start_excerpt.buffer); + let excerpt_buffer_end = excerpt_buffer_start + start_excerpt.text_summary.bytes + - start_excerpt.header_height as usize; + + let start_after_header = cursor.start() + start_excerpt.header_height as usize; + let start_in_buffer = + excerpt_buffer_start + range.start.saturating_sub(start_after_header); + let end_in_buffer = + excerpt_buffer_start + range.end.saturating_sub(start_after_header); + let mut ancestor_buffer_range = start_excerpt + .buffer + .range_for_syntax_ancestor(start_in_buffer..end_in_buffer)?; + ancestor_buffer_range.start = + cmp::max(ancestor_buffer_range.start, excerpt_buffer_start); + ancestor_buffer_range.end = cmp::min(ancestor_buffer_range.end, excerpt_buffer_end); + + let start = + start_after_header + (ancestor_buffer_range.start - excerpt_buffer_start); + let end = start_after_header + (ancestor_buffer_range.end - excerpt_buffer_start); + Some(start..end) + }) } fn buffer_snapshot_for_excerpt<'a>( From 5f819b6edca971241d3d51fe2f4277248f299a34 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 17 Dec 2021 15:05:05 +0100 Subject: [PATCH 134/196] Implement `MultiBuffer::enclosing_bracket_ranges` --- crates/editor/src/multi_buffer.rs | 46 ++++++++++++++++++++++++++++++- 1 file changed, 45 insertions(+), 1 deletion(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 3abddbae95b0eca12767c379c3bb4ea4f121b31d..374a2226ec2cca8a1676378c961367d9d06e29df 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -1435,7 +1435,51 @@ impl MultiBufferSnapshot { range: Range, ) -> Option<(Range, Range)> { let range = range.start.to_offset(self)..range.end.to_offset(self); - self.as_singleton().unwrap().enclosing_bracket_ranges(range) + + let mut cursor = self.excerpts.cursor::(); + cursor.seek(&range.start, Bias::Right, &()); + let start_excerpt = cursor.item(); + + cursor.seek(&range.end, Bias::Right, &()); + let end_excerpt = cursor.item(); + + start_excerpt + .zip(end_excerpt) + .and_then(|(start_excerpt, end_excerpt)| { + if start_excerpt.id != end_excerpt.id { + return None; + } + + let excerpt_buffer_start = + start_excerpt.range.start.to_offset(&start_excerpt.buffer); + let excerpt_buffer_end = excerpt_buffer_start + start_excerpt.text_summary.bytes + - start_excerpt.header_height as usize; + + let start_after_header = cursor.start() + start_excerpt.header_height as usize; + let start_in_buffer = + excerpt_buffer_start + range.start.saturating_sub(start_after_header); + let end_in_buffer = + excerpt_buffer_start + range.end.saturating_sub(start_after_header); + let (mut start_bracket_range, mut end_bracket_range) = start_excerpt + .buffer + .enclosing_bracket_ranges(start_in_buffer..end_in_buffer)?; + + if start_bracket_range.start >= excerpt_buffer_start + && end_bracket_range.end < excerpt_buffer_end + { + start_bracket_range.start = + start_after_header + (start_bracket_range.start - excerpt_buffer_start); + start_bracket_range.end = + start_after_header + (start_bracket_range.end - excerpt_buffer_start); + end_bracket_range.start = + start_after_header + (end_bracket_range.start - excerpt_buffer_start); + end_bracket_range.end = + start_after_header + (end_bracket_range.end - excerpt_buffer_start); + Some((start_bracket_range, end_bracket_range)) + } else { + None + } + }) } pub fn diagnostics_update_count(&self) -> usize { From 7d2b74a93bb28433e03b99e8eb356ab5980c1b14 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 17 Dec 2021 15:33:09 +0100 Subject: [PATCH 135/196] Implement `MultiBuffer::{is_dirty,has_conflict}` --- crates/diagnostics/src/diagnostics.rs | 21 ++++++++++++++++++++- crates/editor/src/editor.rs | 1 + crates/editor/src/items.rs | 4 ++-- crates/editor/src/multi_buffer.rs | 25 +++++++++++++++++-------- 4 files changed, 40 insertions(+), 11 deletions(-) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index ad458970090f75f537187b899ee551711ccaba20..9a8a27cec0aa2168365c60f30f8062cec4311e2d 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -20,6 +20,8 @@ pub fn init(cx: &mut MutableAppContext) { cx.add_action(ProjectDiagnosticsEditor::toggle); } +type Event = editor::Event; + struct ProjectDiagnostics { project: ModelHandle, } @@ -41,7 +43,7 @@ impl Entity for ProjectDiagnostics { } impl Entity for ProjectDiagnosticsEditor { - type Event = (); + type Event = Event; } impl View for ProjectDiagnosticsEditor { @@ -68,6 +70,8 @@ impl ProjectDiagnosticsEditor { let build_settings = editor::settings_builder(excerpts.downgrade(), settings.clone()); let editor = cx.add_view(|cx| Editor::for_buffer(excerpts.clone(), build_settings.clone(), cx)); + cx.subscribe(&editor, |_, _, event, cx| cx.emit(*event)) + .detach(); Self { excerpts, editor, @@ -240,6 +244,21 @@ impl workspace::ItemView for ProjectDiagnosticsEditor { ) -> gpui::Task> { todo!() } + + fn is_dirty(&self, cx: &AppContext) -> bool { + self.excerpts.read(cx).read(cx).is_dirty() + } + + fn has_conflict(&self, cx: &AppContext) -> bool { + self.excerpts.read(cx).read(cx).has_conflict() + } + + fn should_update_tab_on_event(event: &Event) -> bool { + matches!( + event, + Event::Saved | Event::Dirtied | Event::FileHandleChanged + ) + } } #[cfg(test)] diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 072f4db1da4e3163cb00f595b09eef6c5ce9b7bf..26a1fcd494f84e8d89e4add432916eaff7e54a83 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -3638,6 +3638,7 @@ fn compute_scroll_position( scroll_position } +#[derive(Copy, Clone)] pub enum Event { Activate, Edited, diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 0a4ddc8af9590540e5a588541a6006aa747fb697..62a0d53f0a0734095f8c1a52271efb50cc2ed915 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -181,11 +181,11 @@ impl ItemView for Editor { } fn is_dirty(&self, cx: &AppContext) -> bool { - self.buffer().read(cx).is_dirty(cx) + self.buffer().read(cx).read(cx).is_dirty() } fn has_conflict(&self, cx: &AppContext) -> bool { - self.buffer().read(cx).has_conflict(cx) + self.buffer().read(cx).read(cx).has_conflict() } } diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 374a2226ec2cca8a1676378c961367d9d06e29df..de348b449953d41a523db62072f2045e991dce4e 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -82,6 +82,8 @@ pub struct MultiBufferSnapshot { excerpts: SumTree, parse_count: usize, diagnostics_update_count: usize, + is_dirty: bool, + has_conflict: bool, } pub type RenderHeaderFn = Arc ElementBox>; @@ -681,14 +683,6 @@ impl MultiBuffer { self.as_singleton().unwrap().read(cx).file() } - pub fn is_dirty(&self, cx: &AppContext) -> bool { - self.as_singleton().unwrap().read(cx).is_dirty() - } - - pub fn has_conflict(&self, cx: &AppContext) -> bool { - self.as_singleton().unwrap().read(cx).has_conflict() - } - #[cfg(test)] pub fn is_parsing(&self, cx: &AppContext) -> bool { self.as_singleton().unwrap().read(cx).is_parsing() @@ -699,6 +693,8 @@ impl MultiBuffer { let mut excerpts_to_edit = Vec::new(); let mut reparsed = false; let mut diagnostics_updated = false; + let mut is_dirty = false; + let mut has_conflict = false; for buffer_state in self.buffers.values() { let buffer = buffer_state.buffer.read(cx); let buffer_edited = buffer.version().gt(&buffer_state.last_version); @@ -716,6 +712,8 @@ impl MultiBuffer { reparsed |= buffer_reparsed; diagnostics_updated |= buffer_diagnostics_updated; + is_dirty |= buffer.is_dirty(); + has_conflict |= buffer.has_conflict(); } if reparsed { snapshot.parse_count += 1; @@ -723,6 +721,9 @@ impl MultiBuffer { if diagnostics_updated { snapshot.diagnostics_update_count += 1; } + snapshot.is_dirty = is_dirty; + snapshot.has_conflict = has_conflict; + excerpts_to_edit.sort_unstable_by_key(|(excerpt_id, _, _)| *excerpt_id); let mut edits = Vec::new(); @@ -1493,6 +1494,14 @@ impl MultiBufferSnapshot { .and_then(|excerpt| excerpt.buffer.language()) } + pub fn is_dirty(&self) -> bool { + self.is_dirty + } + + pub fn has_conflict(&self) -> bool { + self.has_conflict + } + pub fn diagnostic_group<'a, O>( &'a self, group_id: usize, From 6685d5aa7d4131ab1d4dc2e98a10e4b9bd2fe662 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 17 Dec 2021 16:11:18 +0100 Subject: [PATCH 136/196] Implement `MultiBuffer::save` This is still not integrated with `workspace::ItemView`. --- crates/editor/src/multi_buffer.rs | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index de348b449953d41a523db62072f2045e991dce4e..b94854d2f5c4347a4239280e87660aa961efe963 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -16,7 +16,7 @@ use std::{ ops::{Range, Sub}, str, sync::Arc, - time::{Duration, Instant, SystemTime}, + time::{Duration, Instant}, }; use sum_tree::{Bias, Cursor, SumTree}; use text::{ @@ -663,13 +663,18 @@ impl MultiBuffer { cx.emit(event.clone()); } - pub fn save( - &mut self, - cx: &mut ModelContext, - ) -> Result>> { - self.as_singleton() - .unwrap() - .update(cx, |buffer, cx| buffer.save(cx)) + pub fn save(&mut self, cx: &mut ModelContext) -> Result>> { + let mut save_tasks = Vec::new(); + for BufferState { buffer, .. } in self.buffers.values() { + save_tasks.push(buffer.update(cx, |buffer, cx| buffer.save(cx))?); + } + + Ok(cx.spawn(|_, _| async move { + for save in save_tasks { + save.await?; + } + Ok(()) + })) } pub fn language<'a>(&self, cx: &'a AppContext) -> Option<&'a Arc> { From f0fe346e15180a3f959d8f51c2b8b788e68a3bc1 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 17 Dec 2021 16:37:12 +0100 Subject: [PATCH 137/196] Gracefully degrade `diagnostics_in_range`, `diagnostic_group` and `file` Co-Authored-By: Nathan Sobo --- crates/editor/src/multi_buffer.rs | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index b94854d2f5c4347a4239280e87660aa961efe963..7f0b2ea736cf8bb70a3e693dccf566c479b628ad 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -685,7 +685,7 @@ impl MultiBuffer { } pub fn file<'a>(&self, cx: &'a AppContext) -> Option<&'a dyn File> { - self.as_singleton().unwrap().read(cx).file() + self.as_singleton()?.read(cx).file() } #[cfg(test)] @@ -1514,7 +1514,9 @@ impl MultiBufferSnapshot { where O: text::FromAnchor + 'a, { - self.as_singleton().unwrap().diagnostic_group(group_id) + self.as_singleton() + .into_iter() + .flat_map(move |buffer| buffer.diagnostic_group(group_id)) } pub fn diagnostics_in_range<'a, T, O>( @@ -1525,8 +1527,9 @@ impl MultiBufferSnapshot { T: 'a + ToOffset, O: 'a + text::FromAnchor, { - let range = range.start.to_offset(self)..range.end.to_offset(self); - self.as_singleton().unwrap().diagnostics_in_range(range) + self.as_singleton().into_iter().flat_map(move |buffer| { + buffer.diagnostics_in_range(range.start.to_offset(self)..range.end.to_offset(self)) + }) } pub fn range_for_syntax_ancestor(&self, range: Range) -> Option> { From 88d663a253ba7b764afcf07e851937106793932d Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 17 Dec 2021 16:48:16 +0100 Subject: [PATCH 138/196] Allow saving of all buffers contained in project diagnostics editor Co-Authored-By: Nathan Sobo --- crates/diagnostics/src/diagnostics.rs | 24 ++++++---- crates/editor/src/items.rs | 8 ++++ crates/workspace/src/workspace.rs | 68 ++++++++++++++++----------- 3 files changed, 64 insertions(+), 36 deletions(-) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 9a8a27cec0aa2168365c60f30f8062cec4311e2d..f39803f1482cc027fe3b2422f8b1be3e36242d04 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -1,3 +1,4 @@ +use anyhow::Result; use editor::{ context_header_renderer, diagnostic_block_renderer, diagnostic_header_renderer, display_map::{BlockDisposition, BlockProperties}, @@ -5,7 +6,7 @@ use editor::{ }; use gpui::{ action, elements::*, keymap::Binding, AppContext, Entity, ModelHandle, MutableAppContext, - RenderContext, View, ViewContext, ViewHandle, + RenderContext, Task, View, ViewContext, ViewHandle, }; use language::{Bias, Buffer, Point}; use postage::watch; @@ -207,7 +208,7 @@ impl workspace::Item for ProjectDiagnostics { .await?; view.update(&mut cx, |view, cx| view.populate_excerpts(buffer, cx)) } - Result::Ok::<_, anyhow::Error>(()) + Result::<_, anyhow::Error>::Ok(()) } }) .detach(); @@ -229,11 +230,8 @@ impl workspace::ItemView for ProjectDiagnosticsEditor { None } - fn save( - &mut self, - _: &mut ViewContext, - ) -> anyhow::Result>> { - todo!() + fn save(&mut self, cx: &mut ViewContext) -> Result>> { + self.excerpts.update(cx, |excerpts, cx| excerpts.save(cx)) } fn save_as( @@ -241,8 +239,8 @@ impl workspace::ItemView for ProjectDiagnosticsEditor { _: ModelHandle, _: &std::path::Path, _: &mut ViewContext, - ) -> gpui::Task> { - todo!() + ) -> Task> { + unreachable!() } fn is_dirty(&self, cx: &AppContext) -> bool { @@ -259,6 +257,14 @@ impl workspace::ItemView for ProjectDiagnosticsEditor { Event::Saved | Event::Dirtied | Event::FileHandleChanged ) } + + fn can_save(&self, _: &AppContext) -> bool { + true + } + + fn can_save_as(&self, _: &AppContext) -> bool { + false + } } #[cfg(test)] diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 62a0d53f0a0734095f8c1a52271efb50cc2ed915..b662044553ce6cd66b3f1ce1a4305d984e54a270 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -187,6 +187,14 @@ impl ItemView for Editor { fn has_conflict(&self, cx: &AppContext) -> bool { self.buffer().read(cx).read(cx).has_conflict() } + + fn can_save(&self, cx: &AppContext) -> bool { + self.project_path(cx).is_some() + } + + fn can_save_as(&self, _: &AppContext) -> bool { + true + } } pub struct CursorPosition { diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index e49f81efa71d0915f421d0a9ea80fad0833e37a7..6f0aa7503295625a24342acea2a08494647c56f2 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -102,7 +102,9 @@ pub trait ItemView: View { fn has_conflict(&self, _: &AppContext) -> bool { false } + fn can_save(&self, cx: &AppContext) -> bool; fn save(&mut self, cx: &mut ViewContext) -> Result>>; + fn can_save_as(&self, cx: &AppContext) -> bool; fn save_as( &mut self, worktree: ModelHandle, @@ -146,6 +148,8 @@ pub trait ItemViewHandle { fn to_any(&self) -> AnyViewHandle; fn is_dirty(&self, cx: &AppContext) -> bool; fn has_conflict(&self, cx: &AppContext) -> bool; + fn can_save(&self, cx: &AppContext) -> bool; + fn can_save_as(&self, cx: &AppContext) -> bool; fn save(&self, cx: &mut MutableAppContext) -> Result>>; fn save_as( &self, @@ -276,6 +280,14 @@ impl ItemViewHandle for ViewHandle { fn to_any(&self) -> AnyViewHandle { self.into() } + + fn can_save(&self, cx: &AppContext) -> bool { + self.read(cx).can_save(cx) + } + + fn can_save_as(&self, cx: &AppContext) -> bool { + self.read(cx).can_save_as(cx) + } } impl Clone for Box { @@ -685,7 +697,35 @@ impl Workspace { pub fn save_active_item(&mut self, _: &Save, cx: &mut ViewContext) { if let Some(item) = self.active_item(cx) { let handle = cx.handle(); - if item.project_path(cx.as_ref()).is_none() { + if item.can_save(cx) { + if item.has_conflict(cx.as_ref()) { + const CONFLICT_MESSAGE: &'static str = "This file has changed on disk since you started editing it. Do you want to overwrite it?"; + + cx.prompt( + PromptLevel::Warning, + CONFLICT_MESSAGE, + &["Overwrite", "Cancel"], + move |answer, cx| { + if answer == 0 { + cx.spawn(|mut cx| async move { + if let Err(error) = cx.update(|cx| item.save(cx)).unwrap().await + { + error!("failed to save item: {:?}, ", error); + } + }) + .detach(); + } + }, + ); + } else { + cx.spawn(|_, mut cx| async move { + if let Err(error) = cx.update(|cx| item.save(cx)).unwrap().await { + error!("failed to save item: {:?}, ", error); + } + }) + .detach(); + } + } else if item.can_save_as(cx) { let worktree = self.worktrees(cx).first(); let start_abs_path = worktree .and_then(|w| w.read(cx).as_local()) @@ -717,32 +757,6 @@ impl Workspace { .detach() } }); - return; - } else if item.has_conflict(cx.as_ref()) { - const CONFLICT_MESSAGE: &'static str = "This file has changed on disk since you started editing it. Do you want to overwrite it?"; - - cx.prompt( - PromptLevel::Warning, - CONFLICT_MESSAGE, - &["Overwrite", "Cancel"], - move |answer, cx| { - if answer == 0 { - cx.spawn(|mut cx| async move { - if let Err(error) = cx.update(|cx| item.save(cx)).unwrap().await { - error!("failed to save item: {:?}, ", error); - } - }) - .detach(); - } - }, - ); - } else { - cx.spawn(|_, mut cx| async move { - if let Err(error) = cx.update(|cx| item.save(cx)).unwrap().await { - error!("failed to save item: {:?}, ", error); - } - }) - .detach(); } } } From c41b958829f7a1a04ad420332a061e00d24b5a6c Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 17 Dec 2021 22:00:39 -0800 Subject: [PATCH 139/196] WIP - start restructuring collaboration around entire projects Co-Authored-By: Nathan Sobo --- crates/client/src/user.rs | 24 +- crates/project/src/project.rs | 380 ++++++++++++++++++++++++++++- crates/project/src/worktree.rs | 433 +++++++-------------------------- crates/rpc/proto/zed.proto | 189 ++++++++------ crates/rpc/src/peer.rs | 2 + crates/rpc/src/proto.rs | 50 ++-- crates/server/src/rpc.rs | 72 +++--- crates/server/src/rpc/store.rs | 170 ++++++++----- 8 files changed, 765 insertions(+), 555 deletions(-) diff --git a/crates/client/src/user.rs b/crates/client/src/user.rs index 0a387487e1b064e3b387a634dfbbd62a13789e6f..26be77bf2d66dbc450d7fa4bc99a89ce2a9a79e0 100644 --- a/crates/client/src/user.rs +++ b/crates/client/src/user.rs @@ -22,14 +22,14 @@ pub struct User { #[derive(Debug)] pub struct Contact { pub user: Arc, - pub worktrees: Vec, + pub projects: Vec, } #[derive(Debug)] -pub struct WorktreeMetadata { +pub struct ProjectMetadata { pub id: u64, - pub root_name: String, pub is_shared: bool, + pub worktree_root_names: Vec, pub guests: Vec>, } @@ -112,7 +112,7 @@ impl UserStore { let mut user_ids = HashSet::new(); for contact in &message.contacts { user_ids.insert(contact.user_id); - user_ids.extend(contact.worktrees.iter().flat_map(|w| &w.guests).copied()); + user_ids.extend(contact.projects.iter().flat_map(|w| &w.guests).copied()); } let load_users = self.load_users(user_ids.into_iter().collect(), cx); @@ -221,10 +221,10 @@ impl Contact { user_store.fetch_user(contact.user_id, cx) }) .await?; - let mut worktrees = Vec::new(); - for worktree in contact.worktrees { + let mut projects = Vec::new(); + for project in contact.projects { let mut guests = Vec::new(); - for participant_id in worktree.guests { + for participant_id in project.guests { guests.push( user_store .update(cx, |user_store, cx| { @@ -233,14 +233,14 @@ impl Contact { .await?, ); } - worktrees.push(WorktreeMetadata { - id: worktree.id, - root_name: worktree.root_name, - is_shared: worktree.is_shared, + projects.push(ProjectMetadata { + id: project.id, + worktree_root_names: project.worktree_root_names.clone(), + is_shared: project.is_shared, guests, }); } - Ok(Self { user, worktrees }) + Ok(Self { user, projects }) } } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index cd3f09c98be1fafafb9819d672dc7ffc928e8895..f08b8a891c2159c81ff65a2e4e7b53ae729e78e9 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -3,18 +3,20 @@ mod ignore; mod worktree; use anyhow::{anyhow, Result}; -use client::{Client, UserStore}; +use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore}; use clock::ReplicaId; +use collections::HashMap; use futures::Future; use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet}; -use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task}; +use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task}; use language::{Buffer, DiagnosticEntry, LanguageRegistry}; use lsp::DiagnosticSeverity; +use postage::{prelude::Stream, sink::Sink, watch}; use std::{ path::Path, sync::{atomic::AtomicBool, Arc}, }; -use util::TryFutureExt as _; +use util::{ResultExt, TryFutureExt as _}; pub use fs::*; pub use worktree::*; @@ -27,6 +29,29 @@ pub struct Project { client: Arc, user_store: ModelHandle, fs: Arc, + client_state: ProjectClientState, + collaborators: HashMap, + subscriptions: Vec, +} + +enum ProjectClientState { + Local { + is_shared: bool, + remote_id_tx: watch::Sender>, + remote_id_rx: watch::Receiver>, + _maintain_remote_id_task: Task>, + }, + Remote { + remote_id: u64, + replica_id: ReplicaId, + }, +} + +#[derive(Clone, Debug)] +pub struct Collaborator { + pub user: Arc, + pub peer_id: PeerId, + pub replica_id: ReplicaId, } pub enum Event { @@ -80,14 +105,46 @@ pub struct ProjectEntry { } impl Project { - pub fn new( + pub fn local( languages: Arc, client: Arc, user_store: ModelHandle, fs: Arc, + cx: &mut ModelContext, ) -> Self { + let (remote_id_tx, remote_id_rx) = watch::channel(); + let _maintain_remote_id_task = cx.spawn_weak({ + let rpc = client.clone(); + move |this, cx| { + async move { + let mut status = rpc.status(); + while let Some(status) = status.recv().await { + if let Some(this) = this.upgrade(&cx) { + let remote_id = if let client::Status::Connected { .. } = status { + let response = rpc.request(proto::RegisterProject {}).await?; + Some(response.project_id) + } else { + None + }; + this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx)); + } + } + Ok(()) + } + .log_err() + } + }); + Self { worktrees: Default::default(), + collaborators: Default::default(), + client_state: ProjectClientState::Local { + is_shared: false, + remote_id_tx, + remote_id_rx, + _maintain_remote_id_task, + }, + subscriptions: Vec::new(), active_worktree: None, active_entry: None, languages, @@ -97,9 +154,120 @@ impl Project { } } + pub async fn open_remote( + remote_id: u64, + languages: Arc, + client: Arc, + user_store: ModelHandle, + fs: Arc, + cx: &mut AsyncAppContext, + ) -> Result> { + client.authenticate_and_connect(&cx).await?; + + let response = client + .request(proto::JoinProject { + project_id: remote_id, + }) + .await?; + + let replica_id = response.replica_id as ReplicaId; + + let mut worktrees = Vec::new(); + for worktree in response.worktrees { + worktrees.push( + Worktree::remote( + remote_id, + replica_id, + worktree, + client.clone(), + user_store.clone(), + languages.clone(), + cx, + ) + .await?, + ); + } + + let user_ids = response + .collaborators + .iter() + .map(|peer| peer.user_id) + .collect(); + user_store + .update(cx, |user_store, cx| user_store.load_users(user_ids, cx)) + .await?; + let mut collaborators = HashMap::default(); + for message in response.collaborators { + let collaborator = Collaborator::from_proto(message, &user_store, cx).await?; + collaborators.insert(collaborator.peer_id, collaborator); + } + + Ok(cx.add_model(|cx| Self { + worktrees, + active_worktree: None, + active_entry: None, + collaborators, + languages, + user_store, + fs, + subscriptions: vec![ + client.subscribe_to_entity(remote_id, cx, Self::handle_add_collaborator), + client.subscribe_to_entity(remote_id, cx, Self::handle_remove_collaborator), + client.subscribe_to_entity(remote_id, cx, Self::handle_register_worktree), + client.subscribe_to_entity(remote_id, cx, Self::handle_update_worktree), + client.subscribe_to_entity(remote_id, cx, Self::handle_update_buffer), + client.subscribe_to_entity(remote_id, cx, Self::handle_buffer_saved), + ], + client, + client_state: ProjectClientState::Remote { + remote_id, + replica_id, + }, + })) + } + + fn set_remote_id(&mut self, remote_id: Option, cx: &mut ModelContext) { + if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state { + cx.foreground().spawn(remote_id_tx.send(remote_id)).detach(); + } + + for worktree in &self.worktrees { + worktree.update(cx, |worktree, cx| { + if let Some(worktree) = worktree.as_local_mut() { + worktree.set_project_remote_id(remote_id); + } + }); + } + + self.subscriptions.clear(); + if let Some(remote_id) = remote_id { + self.subscriptions.extend([ + self.client + .subscribe_to_entity(remote_id, cx, Self::handle_update_worktree), + self.client + .subscribe_to_entity(remote_id, cx, Self::handle_update_buffer), + self.client + .subscribe_to_entity(remote_id, cx, Self::handle_buffer_saved), + ]); + } + } + + pub fn remote_id(&self) -> Option { + match &self.client_state { + ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(), + ProjectClientState::Remote { remote_id, .. } => Some(*remote_id), + } + } + pub fn replica_id(&self, cx: &AppContext) -> ReplicaId { - // TODO - self.worktrees.first().unwrap().read(cx).replica_id() + match &self.client_state { + ProjectClientState::Local { .. } => 0, + ProjectClientState::Remote { replica_id, .. } => *replica_id, + } + } + + pub fn collaborators(&self) -> &HashMap { + &self.collaborators } pub fn worktrees(&self) -> &[ModelHandle] { @@ -113,6 +281,40 @@ impl Project { .cloned() } + pub fn share(&self, cx: &mut ModelContext) -> Task> { + let rpc = self.client.clone(); + cx.spawn(|this, mut cx| async move { + let remote_id = this.update(&mut cx, |this, cx| { + if let ProjectClientState::Local { + is_shared, + remote_id_rx, + .. + } = &mut this.client_state + { + *is_shared = true; + Ok(*remote_id_rx.borrow()) + } else { + Err(anyhow!("can't share a remote project")) + } + })?; + + let remote_id = remote_id.ok_or_else(|| anyhow!("no project id"))?; + rpc.send(proto::ShareProject { + project_id: remote_id, + }) + .await?; + + this.update(&mut cx, |this, cx| { + for worktree in &this.worktrees { + worktree.update(cx, |worktree, cx| { + worktree.as_local_mut().unwrap().share(cx).detach(); + }); + } + }); + Ok(()) + }) + } + pub fn open_buffer( &self, path: ProjectPath, @@ -139,6 +341,24 @@ impl Project { let worktree = Worktree::open_local(client, user_store, path, fs, languages, &mut cx).await?; this.update(&mut cx, |this, cx| { + if let Some(project_id) = this.remote_id() { + worktree.update(cx, |worktree, cx| { + worktree + .as_local_mut() + .unwrap() + .set_project_remote_id(Some(project_id)); + cx.foreground().spawn( + client + .request(proto::RegisterWorktree { + project_id, + root_name: worktree.root_name().to_string(), + authorized_logins: worktree.authorized_logins(), + worktree_id: worktree.id() as u64, + }) + .log_err(), + ); + }); + } this.add_worktree(worktree.clone(), cx); }); Ok(worktree) @@ -154,10 +374,8 @@ impl Project { let languages = self.languages.clone(); let user_store = self.user_store.clone(); cx.spawn(|this, mut cx| async move { - rpc.authenticate_and_connect(&cx).await?; let worktree = - Worktree::open_remote(rpc.clone(), remote_id, languages, user_store, &mut cx) - .await?; + Worktree::remote(rpc.clone(), remote_id, languages, user_store, &mut cx).await?; this.update(&mut cx, |this, cx| { cx.subscribe(&worktree, move |this, _, event, cx| match event { worktree::Event::Closed => { @@ -304,6 +522,116 @@ impl Project { } } + // RPC message handlers + + fn handle_add_collaborator( + &mut self, + mut envelope: TypedEnvelope, + _: Arc, + cx: &mut ModelContext, + ) -> Result<()> { + let user_store = self.user_store.clone(); + let collaborator = envelope + .payload + .collaborator + .take() + .ok_or_else(|| anyhow!("empty collaborator"))?; + + cx.spawn(|this, mut cx| { + async move { + let collaborator = + Collaborator::from_proto(collaborator, &user_store, &mut cx).await?; + this.collaborators + .insert(collaborator.peer_id, collaborator); + cx.notify(); + Ok(()) + } + .log_err() + }) + .detach(); + + Ok(()) + } + + fn handle_remove_collaborator( + &mut self, + envelope: TypedEnvelope, + _: Arc, + cx: &mut ModelContext, + ) -> Result<()> { + let peer_id = PeerId(envelope.payload.peer_id); + let replica_id = self + .collaborators + .remove(&peer_id) + .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))? + .replica_id; + for worktree in &self.worktrees { + worktree.update(cx, |worktree, cx| { + worktree.remove_collaborator(peer_id, replica_id, cx); + }) + } + Ok(()) + } + + fn handle_register_worktree( + &mut self, + envelope: TypedEnvelope, + _: Arc, + cx: &mut ModelContext, + ) -> Result<()> { + let peer_id = PeerId(envelope.payload.peer_id); + let replica_id = self + .collaborators + .remove(&peer_id) + .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))? + .replica_id; + for worktree in &self.worktrees { + worktree.update(cx, |worktree, cx| { + worktree.remove_collaborator(peer_id, replica_id, cx); + }) + } + Ok(()) + } + + fn handle_update_worktree( + &mut self, + mut envelope: TypedEnvelope, + _: Arc, + cx: &mut ModelContext, + ) -> Result<()> { + if let Some(worktree) = self.worktree_for_id(envelope.payload.worktree_id as usize) { + worktree + .as_remote_mut() + .unwrap() + .update_from_remote(envelope, cx); + } + Ok(()) + } + + pub fn handle_update_buffer( + &mut self, + envelope: TypedEnvelope, + _: Arc, + cx: &mut ModelContext, + ) -> Result<()> { + if let Some(worktree) = self.worktree_for_id(envelope.payload.worktree_id as usize) { + worktree.handle_update_buffer(envelope, cx)?; + } + Ok(()) + } + + pub fn handle_buffer_saved( + &mut self, + envelope: TypedEnvelope, + _: Arc, + cx: &mut ModelContext, + ) -> Result<()> { + if let Some(worktree) = self.worktree_for_id(envelope.payload.worktree_id as usize) { + worktree.handle_buffer_saved(envelope, cx); + } + Ok(()) + } + pub fn match_paths<'a>( &self, query: &'a str, @@ -400,6 +728,38 @@ impl<'a> Iterator for CandidateSetIter<'a> { impl Entity for Project { type Event = Event; + + fn release(&mut self, cx: &mut gpui::MutableAppContext) { + if let Some(project_id) = *self.remote_id.borrow() { + let rpc = self.client.clone(); + cx.spawn(|_| async move { + if let Err(err) = rpc.send(proto::UnregisterProject { project_id }).await { + log::error!("error unregistering project: {}", err); + } + }) + .detach(); + } + } +} + +impl Collaborator { + fn from_proto( + message: proto::Collaborator, + user_store: &ModelHandle, + cx: &mut AsyncAppContext, + ) -> impl Future> { + let user = user_store.update(cx, |user_store, cx| { + user_store.fetch_user(message.user_id, cx) + }); + + async move { + Ok(Self { + peer_id: PeerId(message.peer_id), + user: user.await?, + replica_id: message.replica_id as ReplicaId, + }) + } + } } #[cfg(test)] @@ -514,6 +874,6 @@ mod tests { let client = client::Client::new(); let http_client = FakeHttpClient::new(|_| async move { Ok(ServerResponse::new(404)) }); let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx)); - cx.add_model(|_| Project::new(languages, client, user_store, fs)) + cx.add_model(|cx| Project::new(languages, client, user_store, fs, cx)) } } diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 89eeb4d91749ccca4528490b306ae082ab50d6e6..011275bd3b491ab26a02e28561da5bf36d6eeb1c 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -5,7 +5,7 @@ use super::{ }; use ::ignore::gitignore::{Gitignore, GitignoreBuilder}; use anyhow::{anyhow, Context, Result}; -use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore}; +use client::{proto, Client, PeerId, TypedEnvelope, UserStore}; use clock::ReplicaId; use collections::{hash_map, HashMap}; use futures::{Stream, StreamExt}; @@ -66,62 +66,9 @@ pub enum Event { Closed, } -#[derive(Clone, Debug)] -pub struct Collaborator { - pub user: Arc, - pub peer_id: PeerId, - pub replica_id: ReplicaId, -} - -impl Collaborator { - fn from_proto( - message: proto::Collaborator, - user_store: &ModelHandle, - cx: &mut AsyncAppContext, - ) -> impl Future> { - let user = user_store.update(cx, |user_store, cx| { - user_store.fetch_user(message.user_id, cx) - }); - - async move { - Ok(Self { - peer_id: PeerId(message.peer_id), - user: user.await?, - replica_id: message.replica_id as ReplicaId, - }) - } - } -} - impl Entity for Worktree { type Event = Event; - fn release(&mut self, cx: &mut MutableAppContext) { - match self { - Self::Local(tree) => { - if let Some(worktree_id) = *tree.remote_id.borrow() { - let rpc = tree.client.clone(); - cx.spawn(|_| async move { - if let Err(err) = rpc.send(proto::CloseWorktree { worktree_id }).await { - log::error!("error closing worktree: {}", err); - } - }) - .detach(); - } - } - Self::Remote(tree) => { - let rpc = tree.client.clone(); - let worktree_id = tree.remote_id; - cx.spawn(|_| async move { - if let Err(err) = rpc.send(proto::LeaveWorktree { worktree_id }).await { - log::error!("error closing worktree: {}", err); - } - }) - .detach(); - } - } - } - fn app_will_quit( &mut self, _: &mut MutableAppContext, @@ -172,32 +119,16 @@ impl Worktree { Ok(tree) } - pub async fn open_remote( - client: Arc, - id: u64, - languages: Arc, - user_store: ModelHandle, - cx: &mut AsyncAppContext, - ) -> Result> { - let response = client - .request(proto::JoinWorktree { worktree_id: id }) - .await?; - Worktree::remote(response, client, user_store, languages, cx).await - } - - async fn remote( - join_response: proto::JoinWorktreeResponse, + pub async fn remote( + project_remote_id: u64, + replica_id: ReplicaId, + worktree: proto::Worktree, client: Arc, user_store: ModelHandle, languages: Arc, cx: &mut AsyncAppContext, ) -> Result> { - let worktree = join_response - .worktree - .ok_or_else(|| anyhow!("empty worktree"))?; - let remote_id = worktree.id; - let replica_id = join_response.replica_id as ReplicaId; let root_char_bag: CharBag = worktree .root_name .chars() @@ -232,20 +163,6 @@ impl Worktree { }) .await; - let user_ids = join_response - .collaborators - .iter() - .map(|peer| peer.user_id) - .collect(); - user_store - .update(cx, |user_store, cx| user_store.load_users(user_ids, cx)) - .await?; - let mut collaborators = HashMap::default(); - for message in join_response.collaborators { - let collaborator = Collaborator::from_proto(message, &user_store, cx).await?; - collaborators.insert(collaborator.peer_id, collaborator); - } - let worktree = cx.update(|cx| { cx.add_model(|cx: &mut ModelContext| { let snapshot = Snapshot { @@ -290,16 +207,8 @@ impl Worktree { .detach(); } - let _subscriptions = vec![ - client.subscribe_to_entity(remote_id, cx, Self::handle_add_collaborator), - client.subscribe_to_entity(remote_id, cx, Self::handle_remove_collaborator), - client.subscribe_to_entity(remote_id, cx, Self::handle_update), - client.subscribe_to_entity(remote_id, cx, Self::handle_update_buffer), - client.subscribe_to_entity(remote_id, cx, Self::handle_buffer_saved), - client.subscribe_to_entity(remote_id, cx, Self::handle_unshare), - ]; - Worktree::Remote(RemoteWorktree { + project_remote_id, remote_id, replica_id, snapshot, @@ -309,11 +218,9 @@ impl Worktree { loading_buffers: Default::default(), open_buffers: Default::default(), diagnostic_summaries: HashMap::default(), - collaborators, queued_operations: Default::default(), languages, user_store, - _subscriptions, }) }) }); @@ -359,71 +266,37 @@ impl Worktree { } } - pub fn languages(&self) -> &Arc { + pub fn authorized_logins(&self) -> Vec { match self { - Worktree::Local(worktree) => &worktree.languages, - Worktree::Remote(worktree) => &worktree.languages, + Worktree::Local(worktree) => worktree.config.collaborators.clone(), + Worktree::Remote(worktree) => Vec::new(), } } - pub fn user_store(&self) -> &ModelHandle { - match self { - Worktree::Local(worktree) => &worktree.user_store, - Worktree::Remote(worktree) => &worktree.user_store, - } - } - - pub fn handle_add_collaborator( + pub fn remove_collaborator( &mut self, - mut envelope: TypedEnvelope, - _: Arc, + peer_id: PeerId, + replica_id: ReplicaId, cx: &mut ModelContext, - ) -> Result<()> { - let user_store = self.user_store().clone(); - let collaborator = envelope - .payload - .collaborator - .take() - .ok_or_else(|| anyhow!("empty collaborator"))?; - - cx.spawn(|this, mut cx| { - async move { - let collaborator = - Collaborator::from_proto(collaborator, &user_store, &mut cx).await?; - this.update(&mut cx, |this, cx| match this { - Worktree::Local(worktree) => worktree.add_collaborator(collaborator, cx), - Worktree::Remote(worktree) => worktree.add_collaborator(collaborator, cx), - }); - Ok(()) - } - .log_err() - }) - .detach(); - - Ok(()) + ) { + match self { + Worktree::Local(worktree) => worktree.remove_collaborator(peer_id, replica_id, cx), + Worktree::Remote(worktree) => worktree.remove_collaborator(peer_id, replica_id, cx), + } } - pub fn handle_remove_collaborator( - &mut self, - envelope: TypedEnvelope, - _: Arc, - cx: &mut ModelContext, - ) -> Result<()> { + pub fn languages(&self) -> &Arc { match self { - Worktree::Local(worktree) => worktree.remove_collaborator(envelope, cx), - Worktree::Remote(worktree) => worktree.remove_collaborator(envelope, cx), + Worktree::Local(worktree) => &worktree.languages, + Worktree::Remote(worktree) => &worktree.languages, } } - pub fn handle_update( - &mut self, - envelope: TypedEnvelope, - _: Arc, - cx: &mut ModelContext, - ) -> anyhow::Result<()> { - self.as_remote_mut() - .unwrap() - .update_from_remote(envelope, cx) + pub fn user_store(&self) -> &ModelHandle { + match self { + Worktree::Local(worktree) => &worktree.user_store, + Worktree::Remote(worktree) => &worktree.user_store, + } } pub fn handle_open_buffer( @@ -463,13 +336,6 @@ impl Worktree { .close_remote_buffer(envelope, cx) } - pub fn collaborators(&self) -> &HashMap { - match self { - Worktree::Local(worktree) => &worktree.collaborators, - Worktree::Remote(worktree) => &worktree.collaborators, - } - } - pub fn diagnostic_summaries<'a>( &'a self, ) -> impl Iterator, DiagnosticSummary)> + 'a { @@ -623,9 +489,12 @@ impl Worktree { cx: &mut ModelContext, ) -> Result<()> { let sender_id = envelope.original_sender_id()?; - let buffer = self - .as_local() - .unwrap() + let this = self.as_local().unwrap(); + let project_id = this + .project_remote_id + .ok_or_else(|| anyhow!("can't save buffer while disconnected"))?; + + let buffer = this .shared_buffers .get(&sender_id) .and_then(|shared_buffers| shared_buffers.get(&envelope.payload.buffer_id).cloned()) @@ -646,6 +515,7 @@ impl Worktree { rpc.respond( receipt, proto::BufferSaved { + project_id, worktree_id, buffer_id, version: (&version).into(), @@ -689,16 +559,6 @@ impl Worktree { Ok(()) } - pub fn handle_unshare( - &mut self, - _: TypedEnvelope, - _: Arc, - cx: &mut ModelContext, - ) -> Result<()> { - cx.emit(Event::Closed); - Ok(()) - } - fn poll_snapshot(&mut self, cx: &mut ModelContext) { match self { Self::Local(worktree) => { @@ -905,17 +765,19 @@ impl Worktree { operation: Operation, cx: &mut ModelContext, ) { - if let Some((rpc, remote_id)) = match self { + if let Some((rpc, project_id)) = match self { Worktree::Local(worktree) => worktree - .remote_id - .borrow() + .project_remote_id .map(|id| (worktree.client.clone(), id)), - Worktree::Remote(worktree) => Some((worktree.client.clone(), worktree.remote_id)), + Worktree::Remote(worktree) => { + Some((worktree.client.clone(), worktree.project_remote_id)) + } } { cx.spawn(|worktree, mut cx| async move { if let Err(error) = rpc .request(proto::UpdateBuffer { - worktree_id: remote_id, + project_id, + worktree_id: worktree.id() as u64, buffer_id, operations: vec![language::proto::serialize_operation(&operation)], }) @@ -956,16 +818,14 @@ pub struct LocalWorktree { background_snapshot: Arc>, last_scan_state_rx: watch::Receiver, _background_scanner_task: Option>, - _maintain_remote_id_task: Task>, + project_remote_id: Option, poll_task: Option>, - remote_id: watch::Receiver>, share: Option, loading_buffers: LoadingBuffers, open_buffers: HashMap>, shared_buffers: HashMap>>, diagnostics: HashMap, Vec>>, diagnostic_summaries: HashMap, DiagnosticSummary>, - collaborators: HashMap, queued_operations: Vec<(u64, Operation)>, languages: Arc, client: Arc, @@ -976,10 +836,10 @@ pub struct LocalWorktree { struct ShareState { snapshots_tx: Sender, - _subscriptions: Vec, } pub struct RemoteWorktree { + project_remote_id: u64, remote_id: u64, snapshot: Snapshot, snapshot_rx: watch::Receiver, @@ -988,12 +848,10 @@ pub struct RemoteWorktree { replica_id: ReplicaId, loading_buffers: LoadingBuffers, open_buffers: HashMap, - collaborators: HashMap, diagnostic_summaries: HashMap, DiagnosticSummary>, languages: Arc, user_store: ModelHandle, queued_operations: Vec<(u64, Operation)>, - _subscriptions: Vec, } type LoadingBuffers = HashMap< @@ -1061,48 +919,13 @@ impl LocalWorktree { ); } - let (mut remote_id_tx, remote_id_rx) = watch::channel(); - let _maintain_remote_id_task = cx.spawn_weak({ - let rpc = client.clone(); - move |this, cx| { - async move { - let mut status = rpc.status(); - while let Some(status) = status.recv().await { - if let Some(this) = this.upgrade(&cx) { - let remote_id = if let client::Status::Connected { .. } = status { - let authorized_logins = this.read_with(&cx, |this, _| { - this.as_local().unwrap().config.collaborators.clone() - }); - let response = rpc - .request(proto::OpenWorktree { - root_name: root_name.clone(), - authorized_logins, - }) - .await?; - - Some(response.worktree_id) - } else { - None - }; - if remote_id_tx.send(remote_id).await.is_err() { - break; - } - } - } - Ok(()) - } - .log_err() - } - }); - let tree = Self { snapshot: snapshot.clone(), config, - remote_id: remote_id_rx, + project_remote_id: None, background_snapshot: Arc::new(Mutex::new(snapshot)), last_scan_state_rx, _background_scanner_task: None, - _maintain_remote_id_task, share: None, poll_task: None, loading_buffers: Default::default(), @@ -1111,7 +934,6 @@ impl LocalWorktree { diagnostics: Default::default(), diagnostic_summaries: Default::default(), queued_operations: Default::default(), - collaborators: Default::default(), languages, client, user_store, @@ -1152,6 +974,10 @@ impl LocalWorktree { Ok((tree, scan_states_tx)) } + pub fn set_project_remote_id(&mut self, id: Option) { + self.project_remote_id = id; + } + pub fn languages(&self) -> &LanguageRegistry { &self.languages } @@ -1297,27 +1123,12 @@ impl LocalWorktree { Ok(()) } - pub fn add_collaborator( - &mut self, - collaborator: Collaborator, - cx: &mut ModelContext, - ) { - self.collaborators - .insert(collaborator.peer_id, collaborator); - cx.notify(); - } - pub fn remove_collaborator( &mut self, - envelope: TypedEnvelope, + peer_id: PeerId, + replica_id: ReplicaId, cx: &mut ModelContext, - ) -> Result<()> { - let peer_id = PeerId(envelope.payload.peer_id); - let replica_id = self - .collaborators - .remove(&peer_id) - .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))? - .replica_id; + ) { self.shared_buffers.remove(&peer_id); for (_, buffer) in &self.open_buffers { if let Some(buffer) = buffer.upgrade(cx) { @@ -1325,8 +1136,6 @@ impl LocalWorktree { } } cx.notify(); - - Ok(()) } pub fn scan_complete(&self) -> impl Future { @@ -1339,22 +1148,6 @@ impl LocalWorktree { } } - pub fn remote_id(&self) -> Option { - *self.remote_id.borrow() - } - - pub fn next_remote_id(&self) -> impl Future> { - let mut remote_id = self.remote_id.clone(); - async move { - while let Some(remote_id) = remote_id.recv().await { - if remote_id.is_some() { - return remote_id; - } - } - None - } - } - fn is_scanning(&self) -> bool { if let ScanState::Scanning = *self.last_scan_state_rx.borrow() { true @@ -1456,31 +1249,28 @@ impl LocalWorktree { }) } - pub fn share(&mut self, cx: &mut ModelContext) -> Task> { + pub fn share(&mut self, cx: &mut ModelContext) -> Task> { let snapshot = self.snapshot(); - let share_request = self.share_request(cx); let rpc = self.client.clone(); + let project_id = self.project_remote_id; + let worktree_id = cx.model_id() as u64; cx.spawn(|this, mut cx| async move { - let share_request = if let Some(request) = share_request.await { - request - } else { - return Err(anyhow!("failed to open worktree on the server")); - }; + let project_id = project_id.ok_or_else(|| anyhow!("no project id"))?; - let remote_id = share_request.worktree.as_ref().unwrap().id; - let share_response = rpc.request(share_request).await?; - - log::info!("sharing worktree {:?}", share_response); let (snapshots_to_send_tx, snapshots_to_send_rx) = smol::channel::unbounded::(); - cx.background() .spawn({ let rpc = rpc.clone(); async move { let mut prev_snapshot = snapshot; while let Ok(snapshot) = snapshots_to_send_rx.recv().await { - let message = snapshot.build_update(&prev_snapshot, remote_id, false); + let message = snapshot.build_update( + &prev_snapshot, + project_id, + worktree_id, + false, + ); match rpc.send(message).await { Ok(()) => prev_snapshot = snapshot, Err(err) => log::error!("error sending snapshot diff {}", err), @@ -1491,64 +1281,32 @@ impl LocalWorktree { .detach(); this.update(&mut cx, |worktree, cx| { - let _subscriptions = vec![ - rpc.subscribe_to_entity(remote_id, cx, Worktree::handle_add_collaborator), - rpc.subscribe_to_entity(remote_id, cx, Worktree::handle_remove_collaborator), - rpc.subscribe_to_entity(remote_id, cx, Worktree::handle_open_buffer), - rpc.subscribe_to_entity(remote_id, cx, Worktree::handle_close_buffer), - rpc.subscribe_to_entity(remote_id, cx, Worktree::handle_update_buffer), - rpc.subscribe_to_entity(remote_id, cx, Worktree::handle_save_buffer), - ]; - let worktree = worktree.as_local_mut().unwrap(); worktree.share = Some(ShareState { snapshots_tx: snapshots_to_send_tx, - _subscriptions, }); }); - Ok(remote_id) + Ok(()) }) } - pub fn unshare(&mut self, cx: &mut ModelContext) { - self.share.take(); - let rpc = self.client.clone(); - let remote_id = self.remote_id(); - cx.foreground() - .spawn( - async move { - if let Some(worktree_id) = remote_id { - rpc.send(proto::UnshareWorktree { worktree_id }).await?; - } - Ok(()) - } - .log_err(), - ) - .detach() - } - - fn share_request(&self, cx: &mut ModelContext) -> Task> { - let remote_id = self.next_remote_id(); + fn to_proto(&self, cx: &mut ModelContext) -> impl Future { + let id = cx.model_id() as u64; let snapshot = self.snapshot(); let root_name = self.root_name.clone(); - cx.background().spawn(async move { - remote_id.await.map(|id| { - let entries = snapshot + async move { + proto::Worktree { + id, + root_name, + entries: snapshot .entries_by_path .cursor::<()>() .filter(|e| !e.is_ignored) .map(Into::into) - .collect(); - proto::ShareWorktree { - worktree: Some(proto::Worktree { - id, - root_name, - entries, - }), - } - }) - }) + .collect(), + } + } } } @@ -1617,6 +1375,7 @@ impl RemoteWorktree { ) -> Task>> { let rpc = self.client.clone(); let replica_id = self.replica_id; + let project_id = self.project_remote_id; let remote_worktree_id = self.remote_id; let root_path = self.snapshot.abs_path.clone(); let path: Arc = Arc::from(path); @@ -1629,6 +1388,7 @@ impl RemoteWorktree { .ok_or_else(|| anyhow!("file does not exist"))?; let response = rpc .request(proto::OpenBuffer { + project_id, worktree_id: remote_worktree_id as u64, path: path_string, }) @@ -1669,10 +1429,6 @@ impl RemoteWorktree { }) } - pub fn remote_id(&self) -> u64 { - self.remote_id - } - pub fn close_all_buffers(&mut self, cx: &mut MutableAppContext) { for (_, buffer) in self.open_buffers.drain() { if let RemoteBuffer::Loaded(buffer) = buffer { @@ -1703,34 +1459,18 @@ impl RemoteWorktree { Ok(()) } - pub fn add_collaborator( - &mut self, - collaborator: Collaborator, - cx: &mut ModelContext, - ) { - self.collaborators - .insert(collaborator.peer_id, collaborator); - cx.notify(); - } - pub fn remove_collaborator( &mut self, - envelope: TypedEnvelope, + peer_id: PeerId, + replica_id: ReplicaId, cx: &mut ModelContext, - ) -> Result<()> { - let peer_id = PeerId(envelope.payload.peer_id); - let replica_id = self - .collaborators - .remove(&peer_id) - .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))? - .replica_id; + ) { for (_, buffer) in &self.open_buffers { if let Some(buffer) = buffer.upgrade(cx) { buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx)); } } cx.notify(); - Ok(()) } } @@ -1756,6 +1496,7 @@ impl Snapshot { pub fn build_update( &self, other: &Self, + project_id: u64, worktree_id: u64, include_ignored: bool, ) -> proto::UpdateWorktree { @@ -1809,9 +1550,10 @@ impl Snapshot { } proto::UpdateWorktree { + project_id, + worktree_id, updated_entries, removed_entries, - worktree_id, } } @@ -2168,15 +1910,17 @@ impl language::File for File { version: clock::Global, cx: &mut MutableAppContext, ) -> Task> { + let worktree_id = self.worktree.id() as u64; self.worktree.update(cx, |worktree, cx| match worktree { Worktree::Local(worktree) => { let rpc = worktree.client.clone(); - let worktree_id = *worktree.remote_id.borrow(); + let project_id = worktree.project_remote_id; let save = worktree.save(self.path.clone(), text, cx); cx.background().spawn(async move { let entry = save.await?; - if let Some(worktree_id) = worktree_id { + if let Some(project_id) = project_id { rpc.send(proto::BufferSaved { + project_id, worktree_id, buffer_id, version: (&version).into(), @@ -2189,10 +1933,11 @@ impl language::File for File { } Worktree::Remote(worktree) => { let rpc = worktree.client.clone(); - let worktree_id = worktree.remote_id; + let project_id = worktree.project_remote_id; cx.foreground().spawn(async move { let response = rpc .request(proto::SaveBuffer { + project_id, worktree_id, buffer_id, }) @@ -2225,14 +1970,16 @@ impl language::File for File { } fn buffer_removed(&self, buffer_id: u64, cx: &mut MutableAppContext) { + let worktree_id = self.worktree.id() as u64; self.worktree.update(cx, |worktree, cx| { if let Worktree::Remote(worktree) = worktree { - let worktree_id = worktree.remote_id; + let project_id = worktree.project_remote_id; let rpc = worktree.client.clone(); cx.background() .spawn(async move { if let Err(error) = rpc .send(proto::CloseBuffer { + project_id, worktree_id, buffer_id, }) @@ -3370,9 +3117,7 @@ mod tests { // Create a remote copy of this worktree. let initial_snapshot = tree.read_with(&cx, |tree, _| tree.snapshot()); let worktree_id = 1; - let share_request = tree.update(&mut cx, |tree, cx| { - tree.as_local().unwrap().share_request(cx) - }); + let proto_message = tree.update(&mut cx, |tree, cx| tree.as_local().unwrap().to_proto(cx)); let open_worktree = server.receive::().await.unwrap(); server .respond( @@ -3383,7 +3128,7 @@ mod tests { let remote = Worktree::remote( proto::JoinWorktreeResponse { - worktree: share_request.await.unwrap().worktree, + worktree: Some(proto_message.await), replica_id: 1, collaborators: Vec::new(), }, diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index 34c80437955cfa6604077278ebdfe86d6dc79658..53f1226e72f46558300f11768b044bd2016ebeb2 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -9,38 +9,46 @@ message Envelope { Ack ack = 4; Error error = 5; Ping ping = 6; - ShareWorktree share_worktree = 7; - ShareWorktreeResponse share_worktree_response = 8; - JoinWorktree join_worktree = 9; - JoinWorktreeResponse join_worktree_response = 10; - UpdateWorktree update_worktree = 11; - CloseWorktree close_worktree = 12; - OpenBuffer open_buffer = 13; - OpenBufferResponse open_buffer_response = 14; - CloseBuffer close_buffer = 15; - UpdateBuffer update_buffer = 16; - SaveBuffer save_buffer = 17; - BufferSaved buffer_saved = 18; - AddCollaborator add_collaborator = 19; - RemoveCollaborator remove_collaborator = 20; - GetChannels get_channels = 21; - GetChannelsResponse get_channels_response = 22; - GetUsers get_users = 23; - GetUsersResponse get_users_response = 24; - JoinChannel join_channel = 25; - JoinChannelResponse join_channel_response = 26; - LeaveChannel leave_channel = 27; - SendChannelMessage send_channel_message = 28; - SendChannelMessageResponse send_channel_message_response = 29; - ChannelMessageSent channel_message_sent = 30; - GetChannelMessages get_channel_messages = 31; - GetChannelMessagesResponse get_channel_messages_response = 32; - OpenWorktree open_worktree = 33; - OpenWorktreeResponse open_worktree_response = 34; - UnshareWorktree unshare_worktree = 35; - UpdateContacts update_contacts = 36; - LeaveWorktree leave_worktree = 37; - UpdateDiagnosticSummary update_diagnostic_summary = 38; + + RegisterProject register_project = 7; + RegisterProjectResponse register_project_response = 8; + UnregisterProject unregister_project = 9; + ShareProject share_project = 10; + UnshareProject unshare_project = 11; + JoinProject join_project = 12; + JoinProjectResponse join_project_response = 13; + LeaveProject leave_project = 14; + AddProjectCollaborator add_project_collaborator = 15; + RemoveProjectCollaborator remove_project_collaborator = 16; + + RegisterWorktree register_worktree = 17; + UnregisterWorktree unregister_worktree = 18; + ShareWorktree share_worktree = 100; + UpdateWorktree update_worktree = 19; + UpdateDiagnosticSummary update_diagnostic_summary = 20; + + OpenBuffer open_buffer = 22; + OpenBufferResponse open_buffer_response = 23; + CloseBuffer close_buffer = 24; + UpdateBuffer update_buffer = 25; + SaveBuffer save_buffer = 26; + BufferSaved buffer_saved = 27; + + GetChannels get_channels = 28; + GetChannelsResponse get_channels_response = 29; + JoinChannel join_channel = 30; + JoinChannelResponse join_channel_response = 31; + LeaveChannel leave_channel = 32; + SendChannelMessage send_channel_message = 33; + SendChannelMessageResponse send_channel_message_response = 34; + ChannelMessageSent channel_message_sent = 35; + GetChannelMessages get_channel_messages = 36; + GetChannelMessagesResponse get_channel_messages_response = 37; + + UpdateContacts update_contacts = 38; + + GetUsers get_users = 39; + GetUsersResponse get_users_response = 40; } } @@ -54,62 +62,76 @@ message Error { string message = 1; } -message OpenWorktree { - string root_name = 1; - repeated string authorized_logins = 2; +message RegisterProject {} + +message RegisterProjectResponse { + uint64 project_id = 1; } -message OpenWorktreeResponse { - uint64 worktree_id = 1; +message UnregisterProject { + uint64 project_id = 1; } -message ShareWorktree { - Worktree worktree = 1; +message ShareProject { + uint64 project_id = 1; } -message ShareWorktreeResponse {} +message UnshareProject { + uint64 project_id = 1; +} -message UnshareWorktree { - uint64 worktree_id = 1; +message JoinProject { + uint64 project_id = 1; } -message JoinWorktree { - uint64 worktree_id = 1; +message JoinProjectResponse { + uint32 replica_id = 2; + repeated Worktree worktrees = 3; + repeated Collaborator collaborators = 4; } -message LeaveWorktree { - uint64 worktree_id = 1; +message LeaveProject { + uint64 project_id = 1; } -message JoinWorktreeResponse { - Worktree worktree = 2; - uint32 replica_id = 3; - repeated Collaborator collaborators = 4; +message RegisterWorktree { + uint64 project_id = 1; + uint64 worktree_id = 2; + string root_name = 3; + repeated string authorized_logins = 4; } -message UpdateWorktree { - uint64 worktree_id = 1; - repeated Entry updated_entries = 2; - repeated uint64 removed_entries = 3; +message UnregisterWorktree { + uint64 project_id = 1; + uint64 worktree_id = 2; } -message CloseWorktree { - uint64 worktree_id = 1; +message ShareWorktree { + uint64 project_id = 1; + Worktree worktree = 2; +} + +message UpdateWorktree { + uint64 project_id = 1; + uint64 worktree_id = 2; + repeated Entry updated_entries = 3; + repeated uint64 removed_entries = 4; } -message AddCollaborator { - uint64 worktree_id = 1; +message AddProjectCollaborator { + uint64 project_id = 1; Collaborator collaborator = 2; } -message RemoveCollaborator { - uint64 worktree_id = 1; +message RemoveProjectCollaborator { + uint64 project_id = 1; uint32 peer_id = 2; } message OpenBuffer { - uint64 worktree_id = 1; - string path = 2; + uint64 project_id = 1; + uint64 worktree_id = 2; + string path = 3; } message OpenBufferResponse { @@ -117,33 +139,38 @@ message OpenBufferResponse { } message CloseBuffer { - uint64 worktree_id = 1; - uint64 buffer_id = 2; + uint64 project_id = 1; + uint64 worktree_id = 2; + uint64 buffer_id = 3; } message UpdateBuffer { - uint64 worktree_id = 1; - uint64 buffer_id = 2; - repeated Operation operations = 3; + uint64 project_id = 1; + uint64 worktree_id = 2; + uint64 buffer_id = 3; + repeated Operation operations = 4; } message SaveBuffer { - uint64 worktree_id = 1; - uint64 buffer_id = 2; + uint64 project_id = 1; + uint64 worktree_id = 2; + uint64 buffer_id = 3; } message BufferSaved { - uint64 worktree_id = 1; - uint64 buffer_id = 2; - repeated VectorClockEntry version = 3; - Timestamp mtime = 4; + uint64 project_id = 1; + uint64 worktree_id = 2; + uint64 buffer_id = 3; + repeated VectorClockEntry version = 4; + Timestamp mtime = 5; } message UpdateDiagnosticSummary { - uint64 worktree_id = 1; - string path = 2; - uint32 error_count = 3; - uint32 warning_count = 4; + uint64 project_id = 1; + uint64 worktree_id = 2; + string path = 3; + uint32 error_count = 4; + uint32 warning_count = 5; } message GetChannels {} @@ -368,12 +395,12 @@ message ChannelMessage { message Contact { uint64 user_id = 1; - repeated WorktreeMetadata worktrees = 2; + repeated ProjectMetadata projects = 2; } -message WorktreeMetadata { +message ProjectMetadata { uint64 id = 1; - string root_name = 2; - bool is_shared = 3; + bool is_shared = 2; + repeated string worktree_root_names = 3; repeated uint64 guests = 4; } diff --git a/crates/rpc/src/peer.rs b/crates/rpc/src/peer.rs index d2f2cb2c418e863aa0015e4ef3486959cef4db11..bd5d1c384f241c328bbacf7df2f028f85db68470 100644 --- a/crates/rpc/src/peer.rs +++ b/crates/rpc/src/peer.rs @@ -388,6 +388,7 @@ mod tests { .request( client1_conn_id, proto::OpenBuffer { + project_id: 0, worktree_id: 1, path: "path/one".to_string(), }, @@ -410,6 +411,7 @@ mod tests { .request( client2_conn_id, proto::OpenBuffer { + project_id: 0, worktree_id: 2, path: "path/two".to_string(), }, diff --git a/crates/rpc/src/proto.rs b/crates/rpc/src/proto.rs index bfdce85b774b2574751651e8436c8286539f28fe..8eb62d8ce0e8347de937c0969d8e52a9b73a35e3 100644 --- a/crates/rpc/src/proto.rs +++ b/crates/rpc/src/proto.rs @@ -121,68 +121,70 @@ macro_rules! entity_messages { messages!( Ack, - AddCollaborator, + AddProjectCollaborator, BufferSaved, ChannelMessageSent, CloseBuffer, - CloseWorktree, Error, GetChannelMessages, GetChannelMessagesResponse, GetChannels, GetChannelsResponse, - UpdateContacts, GetUsers, GetUsersResponse, JoinChannel, JoinChannelResponse, - JoinWorktree, - JoinWorktreeResponse, + JoinProject, + JoinProjectResponse, LeaveChannel, - LeaveWorktree, + LeaveProject, OpenBuffer, OpenBufferResponse, - OpenWorktree, - OpenWorktreeResponse, + RegisterProjectResponse, Ping, - RemoveCollaborator, + RegisterProject, + RegisterWorktree, + RemoveProjectCollaborator, SaveBuffer, SendChannelMessage, SendChannelMessageResponse, + ShareProject, ShareWorktree, - ShareWorktreeResponse, - UnshareWorktree, + UnregisterProject, + UnregisterWorktree, UpdateBuffer, + UpdateContacts, UpdateWorktree, ); request_messages!( + (GetChannelMessages, GetChannelMessagesResponse), (GetChannels, GetChannelsResponse), (GetUsers, GetUsersResponse), (JoinChannel, JoinChannelResponse), + (JoinProject, JoinProjectResponse), (OpenBuffer, OpenBufferResponse), - (JoinWorktree, JoinWorktreeResponse), - (OpenWorktree, OpenWorktreeResponse), (Ping, Ack), + (RegisterProject, RegisterProjectResponse), + (RegisterWorktree, Ack), (SaveBuffer, BufferSaved), - (UpdateBuffer, Ack), - (ShareWorktree, ShareWorktreeResponse), - (UnshareWorktree, Ack), (SendChannelMessage, SendChannelMessageResponse), - (GetChannelMessages, GetChannelMessagesResponse), + (ShareWorktree, Ack), + (UpdateBuffer, Ack), ); entity_messages!( - worktree_id, - AddCollaborator, + project_id, + AddProjectCollaborator, + RemoveProjectCollaborator, + JoinProject, + LeaveProject, BufferSaved, - CloseBuffer, - CloseWorktree, OpenBuffer, - JoinWorktree, - RemoveCollaborator, + CloseBuffer, SaveBuffer, - UnshareWorktree, + RegisterWorktree, + UnregisterWorktree, UpdateBuffer, UpdateWorktree, ); diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index 264d684beb0c48764d67101e93993bac0ad8e2d4..b0b07e7bae85f30873563254dbfbbbc6f3595477 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -60,8 +60,8 @@ impl Server { server .add_handler(Server::ping) - .add_handler(Server::open_worktree) - .add_handler(Server::close_worktree) + .add_handler(Server::register_worktree) + .add_handler(Server::unregister_worktree) .add_handler(Server::share_worktree) .add_handler(Server::unshare_worktree) .add_handler(Server::join_worktree) @@ -169,26 +169,26 @@ impl Server { self.peer.disconnect(connection_id).await; let removed_connection = self.state_mut().remove_connection(connection_id)?; - for (worktree_id, worktree) in removed_connection.hosted_worktrees { - if let Some(share) = worktree.share { + for (project_id, project) in removed_connection.hosted_projects { + if let Some(share) = project.share { broadcast( connection_id, share.guests.keys().copied().collect(), |conn_id| { self.peer - .send(conn_id, proto::UnshareWorktree { worktree_id }) + .send(conn_id, proto::UnshareProject { project_id }) }, ) .await?; } } - for (worktree_id, peer_ids) in removed_connection.guest_worktree_ids { + for (project_id, peer_ids) in removed_connection.guest_project_ids { broadcast(connection_id, peer_ids, |conn_id| { self.peer.send( conn_id, - proto::RemoveCollaborator { - worktree_id, + proto::RemoveProjectCollaborator { + project_id, peer_id: connection_id.0, }, ) @@ -207,9 +207,9 @@ impl Server { Ok(()) } - async fn open_worktree( + async fn register_worktree( mut self: Arc, - request: TypedEnvelope, + request: TypedEnvelope, ) -> tide::Result<()> { let receipt = request.receipt(); let host_user_id = self.state().user_id_for_connection(request.sender_id)?; @@ -232,38 +232,54 @@ impl Server { } let contact_user_ids = contact_user_ids.into_iter().collect::>(); - let worktree_id = self.state_mut().add_worktree(Worktree { - host_connection_id: request.sender_id, - host_user_id, - authorized_user_ids: contact_user_ids.clone(), - root_name: request.payload.root_name, - share: None, - }); + let ok = self.state_mut().register_worktree( + request.project_id, + request.worktree_id, + Worktree { + authorized_user_ids: contact_user_ids.clone(), + root_name: request.payload.root_name, + }, + ); - self.peer - .respond(receipt, proto::OpenWorktreeResponse { worktree_id }) - .await?; - self.update_contacts_for_users(&contact_user_ids).await?; + if ok { + self.peer.respond(receipt, proto::Ack {}).await?; + self.update_contacts_for_users(&contact_user_ids).await?; + } else { + self.peer + .respond_with_error( + receipt, + proto::Error { + message: "no such project".to_string(), + }, + ) + .await?; + } Ok(()) } - async fn close_worktree( + async fn unregister_worktree( mut self: Arc, - request: TypedEnvelope, + request: TypedEnvelope, ) -> tide::Result<()> { + let project_id = request.payload.project_id; let worktree_id = request.payload.worktree_id; - let worktree = self - .state_mut() - .remove_worktree(worktree_id, request.sender_id)?; + let worktree = + self.state_mut() + .unregister_worktree(project_id, worktree_id, request.sender_id)?; if let Some(share) = worktree.share { broadcast( request.sender_id, share.guests.keys().copied().collect(), |conn_id| { - self.peer - .send(conn_id, proto::UnshareWorktree { worktree_id }) + self.peer.send( + conn_id, + proto::UnregisterWorktree { + project_id, + worktree_id, + }, + ) }, ) .await?; diff --git a/crates/server/src/rpc/store.rs b/crates/server/src/rpc/store.rs index 2062683b7cda8de04e8d06d6667c5d9ee994435a..5b0b6d9554fb50000af4f6f5d2dad03fd510c23c 100644 --- a/crates/server/src/rpc/store.rs +++ b/crates/server/src/rpc/store.rs @@ -8,29 +8,38 @@ use std::collections::hash_map; pub struct Store { connections: HashMap, connections_by_user_id: HashMap>, - worktrees: HashMap, - visible_worktrees_by_user_id: HashMap>, + projects: HashMap, + visible_projects_by_user_id: HashMap>, channels: HashMap, next_worktree_id: u64, } struct ConnectionState { user_id: UserId, - worktrees: HashSet, + projects: HashSet, channels: HashSet, } -pub struct Worktree { +pub struct Project { pub host_connection_id: ConnectionId, pub host_user_id: UserId, + pub share: Option, + worktrees: HashMap, +} + +pub struct Worktree { pub authorized_user_ids: Vec, pub root_name: String, - pub share: Option, } -pub struct WorktreeShare { +#[derive(Default)] +pub struct ProjectShare { pub guests: HashMap, pub active_replica_ids: HashSet, + pub worktrees: HashMap, +} + +pub struct WorktreeShare { pub entries: HashMap, } @@ -43,8 +52,8 @@ pub type ReplicaId = u16; #[derive(Default)] pub struct RemovedConnectionState { - pub hosted_worktrees: HashMap, - pub guest_worktree_ids: HashMap>, + pub hosted_projects: HashMap, + pub guest_project_ids: HashMap>, pub contact_ids: HashSet, } @@ -69,7 +78,7 @@ impl Store { connection_id, ConnectionState { user_id, - worktrees: Default::default(), + projects: Default::default(), channels: Default::default(), }, ); @@ -106,7 +115,7 @@ impl Store { let mut result = RemovedConnectionState::default(); for worktree_id in connection.worktrees.clone() { - if let Ok(worktree) = self.remove_worktree(worktree_id, connection_id) { + if let Ok(worktree) = self.unregister_worktree(worktree_id, connection_id) { result .contact_ids .extend(worktree.authorized_user_ids.iter().copied()); @@ -174,12 +183,12 @@ impl Store { pub fn contacts_for_user(&self, user_id: UserId) -> Vec { let mut contacts = HashMap::default(); - for worktree_id in self - .visible_worktrees_by_user_id + for project_id in self + .visible_projects_by_user_id .get(&user_id) .unwrap_or(&HashSet::default()) { - let worktree = &self.worktrees[worktree_id]; + let project = &self.projects[project_id]; let mut guests = HashSet::default(); if let Ok(share) = worktree.share() { @@ -190,18 +199,22 @@ impl Store { } } - if let Ok(host_user_id) = self.user_id_for_connection(worktree.host_connection_id) { + if let Ok(host_user_id) = self.user_id_for_connection(project.host_connection_id) { contacts .entry(host_user_id) .or_insert_with(|| proto::Contact { user_id: host_user_id.to_proto(), - worktrees: Vec::new(), + projects: Vec::new(), }) - .worktrees - .push(proto::WorktreeMetadata { - id: *worktree_id, - root_name: worktree.root_name.clone(), - is_shared: worktree.share.is_some(), + .projects + .push(proto::ProjectMetadata { + id: *project_id, + worktree_root_names: project + .worktrees + .iter() + .map(|worktree| worktree.root_name.clone()) + .collect(), + is_shared: project.share.is_some(), guests: guests.into_iter().collect(), }); } @@ -210,41 +223,75 @@ impl Store { contacts.into_values().collect() } - pub fn add_worktree(&mut self, worktree: Worktree) -> u64 { - let worktree_id = self.next_worktree_id; - for authorized_user_id in &worktree.authorized_user_ids { - self.visible_worktrees_by_user_id - .entry(*authorized_user_id) - .or_default() - .insert(worktree_id); - } - self.next_worktree_id += 1; - if let Some(connection) = self.connections.get_mut(&worktree.host_connection_id) { - connection.worktrees.insert(worktree_id); - } - self.worktrees.insert(worktree_id, worktree); + pub fn register_project( + &mut self, + host_connection_id: ConnectionId, + host_user_id: UserId, + ) -> u64 { + let project_id = self.next_project_id; + self.projects.insert( + project_id, + Project { + host_connection_id, + host_user_id, + share: None, + worktrees: Default::default(), + }, + ); + self.next_project_id += 1; + project_id + } - #[cfg(test)] - self.check_invariants(); + pub fn register_worktree( + &mut self, + project_id: u64, + worktree_id: u64, + worktree: Worktree, + ) -> bool { + if let Some(project) = self.projects.get_mut(&project_id) { + for authorized_user_id in &worktree.authorized_user_ids { + self.visible_projects_by_user_id + .entry(*authorized_user_id) + .or_default() + .insert(project_id); + } + if let Some(connection) = self.connections.get_mut(&project.host_connection_id) { + connection.projects.insert(project_id); + } + project.worktrees.insert(worktree_id, worktree); + + #[cfg(test)] + self.check_invariants(); + true + } else { + false + } + } - worktree_id + pub fn unregister_project(&mut self, project_id: u64) { + todo!() } - pub fn remove_worktree( + pub fn unregister_worktree( &mut self, + project_id: u64, worktree_id: u64, acting_connection_id: ConnectionId, ) -> tide::Result { - let worktree = if let hash_map::Entry::Occupied(e) = self.worktrees.entry(worktree_id) { - if e.get().host_connection_id != acting_connection_id { - Err(anyhow!("not your worktree"))?; - } - e.remove() - } else { - return Err(anyhow!("no such worktree"))?; - }; + let project = self + .projects + .get_mut(&project_id) + .ok_or_else(|| anyhow!("no such project"))?; + if project.host_connection_id != acting_connection_id { + Err(anyhow!("not your worktree"))?; + } + + let worktree = project + .worktrees + .remove(&worktree_id) + .ok_or_else(|| anyhow!("no such worktree"))?; - if let Some(connection) = self.connections.get_mut(&worktree.host_connection_id) { + if let Some(connection) = self.connections.get_mut(&project.host_connection_id) { connection.worktrees.remove(&worktree_id); } @@ -271,20 +318,31 @@ impl Store { Ok(worktree) } + pub fn share_project(&mut self, project_id: u64, connection_id: ConnectionId) -> bool { + if let Some(project) = self.projects.get_mut(&project_id) { + if project.host_connection_id == connection_id { + project.share = Some(ProjectShare::default()); + return true; + } + } + false + } + pub fn share_worktree( &mut self, + project_id: u64, worktree_id: u64, connection_id: ConnectionId, entries: HashMap, ) -> Option> { - if let Some(worktree) = self.worktrees.get_mut(&worktree_id) { - if worktree.host_connection_id == connection_id { - worktree.share = Some(WorktreeShare { - guests: Default::default(), - active_replica_ids: Default::default(), - entries, - }); - return Some(worktree.authorized_user_ids.clone()); + if let Some(project) = self.projects.get_mut(&project_id) { + if project.host_connection_id == connection_id { + if let Some(share) = project.share.as_mut() { + share + .worktrees + .insert(worktree_id, WorktreeShare { entries }); + return Some(project.authorized_user_ids()); + } } } None @@ -586,14 +644,14 @@ impl Worktree { } } - pub fn share(&self) -> tide::Result<&WorktreeShare> { + pub fn share(&self) -> tide::Result<&ProjectShare> { Ok(self .share .as_ref() .ok_or_else(|| anyhow!("worktree is not shared"))?) } - fn share_mut(&mut self) -> tide::Result<&mut WorktreeShare> { + fn share_mut(&mut self) -> tide::Result<&mut ProjectShare> { Ok(self .share .as_mut() From 697e641e8ec89081f4dad1dfbe33d59334cf004f Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Mon, 20 Dec 2021 16:08:34 +0100 Subject: [PATCH 140/196] Get back to a compiling state for client This temporarily disables some tests and there are still some server-side compiler errors. --- crates/project/src/project.rs | 226 +++++---------- crates/project/src/worktree.rs | 492 ++++++++++++++++----------------- crates/rpc/proto/zed.proto | 5 +- 3 files changed, 314 insertions(+), 409 deletions(-) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index f08b8a891c2159c81ff65a2e4e7b53ae729e78e9..2ee742af7e99f0013de729f6959915ca4f19da0e 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -11,12 +11,12 @@ use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet}; use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task}; use language::{Buffer, DiagnosticEntry, LanguageRegistry}; use lsp::DiagnosticSeverity; -use postage::{prelude::Stream, sink::Sink, watch}; +use postage::{prelude::Stream, watch}; use std::{ path::Path, sync::{atomic::AtomicBool, Arc}, }; -use util::{ResultExt, TryFutureExt as _}; +use util::TryFutureExt as _; pub use fs::*; pub use worktree::*; @@ -115,7 +115,7 @@ impl Project { let (remote_id_tx, remote_id_rx) = watch::channel(); let _maintain_remote_id_task = cx.spawn_weak({ let rpc = client.clone(); - move |this, cx| { + move |this, mut cx| { async move { let mut status = rpc.status(); while let Some(status) = status.recv().await { @@ -228,11 +228,11 @@ impl Project { fn set_remote_id(&mut self, remote_id: Option, cx: &mut ModelContext) { if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state { - cx.foreground().spawn(remote_id_tx.send(remote_id)).detach(); + *remote_id_tx.borrow_mut() = remote_id; } for worktree in &self.worktrees { - worktree.update(cx, |worktree, cx| { + worktree.update(cx, |worktree, _| { if let Some(worktree) = worktree.as_local_mut() { worktree.set_project_remote_id(remote_id); } @@ -259,7 +259,7 @@ impl Project { } } - pub fn replica_id(&self, cx: &AppContext) -> ReplicaId { + pub fn replica_id(&self) -> ReplicaId { match &self.client_state { ProjectClientState::Local { .. } => 0, ProjectClientState::Remote { replica_id, .. } => *replica_id, @@ -284,7 +284,7 @@ impl Project { pub fn share(&self, cx: &mut ModelContext) -> Task> { let rpc = self.client.clone(); cx.spawn(|this, mut cx| async move { - let remote_id = this.update(&mut cx, |this, cx| { + let remote_id = this.update(&mut cx, |this, _| { if let ProjectClientState::Local { is_shared, remote_id_rx, @@ -339,24 +339,26 @@ impl Project { let path = Arc::from(abs_path); cx.spawn(|this, mut cx| async move { let worktree = - Worktree::open_local(client, user_store, path, fs, languages, &mut cx).await?; + Worktree::open_local(client.clone(), user_store, path, fs, languages, &mut cx) + .await?; this.update(&mut cx, |this, cx| { if let Some(project_id) = this.remote_id() { worktree.update(cx, |worktree, cx| { - worktree - .as_local_mut() - .unwrap() - .set_project_remote_id(Some(project_id)); - cx.foreground().spawn( - client - .request(proto::RegisterWorktree { - project_id, - root_name: worktree.root_name().to_string(), - authorized_logins: worktree.authorized_logins(), - worktree_id: worktree.id() as u64, - }) - .log_err(), - ); + let worktree = worktree.as_local_mut().unwrap(); + worktree.set_project_remote_id(Some(project_id)); + let serialized_worktree = worktree.to_proto(cx); + let authorized_logins = worktree.authorized_logins(); + cx.foreground() + .spawn(async move { + client + .request(proto::RegisterWorktree { + project_id, + worktree: Some(serialized_worktree), + authorized_logins, + }) + .log_err(); + }) + .detach(); }); } this.add_worktree(worktree.clone(), cx); @@ -365,31 +367,6 @@ impl Project { }) } - pub fn add_remote_worktree( - &mut self, - remote_id: u64, - cx: &mut ModelContext, - ) -> Task>> { - let rpc = self.client.clone(); - let languages = self.languages.clone(); - let user_store = self.user_store.clone(); - cx.spawn(|this, mut cx| async move { - let worktree = - Worktree::remote(rpc.clone(), remote_id, languages, user_store, &mut cx).await?; - this.update(&mut cx, |this, cx| { - cx.subscribe(&worktree, move |this, _, event, cx| match event { - worktree::Event::Closed => { - this.close_remote_worktree(remote_id, cx); - cx.notify(); - } - }) - .detach(); - this.add_worktree(worktree.clone(), cx); - }); - Ok(worktree) - }) - } - fn add_worktree(&mut self, worktree: ModelHandle, cx: &mut ModelContext) { cx.observe(&worktree, |_, _, cx| cx.notify()).detach(); if self.active_worktree.is_none() { @@ -446,82 +423,6 @@ impl Project { self.active_entry } - pub fn share_worktree(&self, remote_id: u64, cx: &mut ModelContext) { - let rpc = self.client.clone(); - cx.spawn(|this, mut cx| { - async move { - rpc.authenticate_and_connect(&cx).await?; - - let task = this.update(&mut cx, |this, cx| { - for worktree in &this.worktrees { - let task = worktree.update(cx, |worktree, cx| { - worktree.as_local_mut().and_then(|worktree| { - if worktree.remote_id() == Some(remote_id) { - Some(worktree.share(cx)) - } else { - None - } - }) - }); - if task.is_some() { - return task; - } - } - None - }); - - if let Some(task) = task { - task.await?; - } - - Ok(()) - } - .log_err() - }) - .detach(); - } - - pub fn unshare_worktree(&mut self, remote_id: u64, cx: &mut ModelContext) { - for worktree in &self.worktrees { - if worktree.update(cx, |worktree, cx| { - if let Some(worktree) = worktree.as_local_mut() { - if worktree.remote_id() == Some(remote_id) { - worktree.unshare(cx); - return true; - } - } - false - }) { - break; - } - } - } - - pub fn close_remote_worktree(&mut self, id: u64, cx: &mut ModelContext) { - let mut reset_active = None; - self.worktrees.retain(|worktree| { - let keep = worktree.update(cx, |worktree, cx| { - if let Some(worktree) = worktree.as_remote_mut() { - if worktree.remote_id() == id { - worktree.close_all_buffers(cx); - return false; - } - } - true - }); - if !keep { - cx.emit(Event::WorktreeRemoved(worktree.id())); - reset_active = Some(worktree.id()); - } - keep - }); - - if self.active_worktree == reset_active { - self.active_worktree = self.worktrees.first().map(|w| w.id()); - cx.notify(); - } - } - // RPC message handlers fn handle_add_collaborator( @@ -541,9 +442,11 @@ impl Project { async move { let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?; - this.collaborators - .insert(collaborator.peer_id, collaborator); - cx.notify(); + this.update(&mut cx, |this, cx| { + this.collaborators + .insert(collaborator.peer_id, collaborator); + cx.notify(); + }); Ok(()) } .log_err() @@ -576,34 +479,43 @@ impl Project { fn handle_register_worktree( &mut self, envelope: TypedEnvelope, - _: Arc, + client: Arc, cx: &mut ModelContext, ) -> Result<()> { - let peer_id = PeerId(envelope.payload.peer_id); - let replica_id = self - .collaborators - .remove(&peer_id) - .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))? - .replica_id; - for worktree in &self.worktrees { - worktree.update(cx, |worktree, cx| { - worktree.remove_collaborator(peer_id, replica_id, cx); - }) - } + let remote_id = self.remote_id().ok_or_else(|| anyhow!("invalid project"))?; + let replica_id = self.replica_id(); + let worktree = envelope + .payload + .worktree + .ok_or_else(|| anyhow!("invalid worktree"))?; + let user_store = self.user_store.clone(); + let languages = self.languages.clone(); + cx.spawn(|this, mut cx| { + async move { + let worktree = Worktree::remote( + remote_id, replica_id, worktree, client, user_store, languages, &mut cx, + ) + .await?; + this.update(&mut cx, |this, cx| this.add_worktree(worktree, cx)); + Ok(()) + } + .log_err() + }) + .detach(); Ok(()) } fn handle_update_worktree( &mut self, - mut envelope: TypedEnvelope, + envelope: TypedEnvelope, _: Arc, cx: &mut ModelContext, ) -> Result<()> { if let Some(worktree) = self.worktree_for_id(envelope.payload.worktree_id as usize) { - worktree - .as_remote_mut() - .unwrap() - .update_from_remote(envelope, cx); + worktree.update(cx, |worktree, cx| { + let worktree = worktree.as_remote_mut().unwrap(); + worktree.update_from_remote(envelope, cx) + })?; } Ok(()) } @@ -615,7 +527,9 @@ impl Project { cx: &mut ModelContext, ) -> Result<()> { if let Some(worktree) = self.worktree_for_id(envelope.payload.worktree_id as usize) { - worktree.handle_update_buffer(envelope, cx)?; + worktree.update(cx, |worktree, cx| { + worktree.handle_update_buffer(envelope, cx) + })?; } Ok(()) } @@ -627,7 +541,9 @@ impl Project { cx: &mut ModelContext, ) -> Result<()> { if let Some(worktree) = self.worktree_for_id(envelope.payload.worktree_id as usize) { - worktree.handle_buffer_saved(envelope, cx); + worktree.update(cx, |worktree, cx| { + worktree.handle_buffer_saved(envelope, cx) + })?; } Ok(()) } @@ -730,14 +646,16 @@ impl Entity for Project { type Event = Event; fn release(&mut self, cx: &mut gpui::MutableAppContext) { - if let Some(project_id) = *self.remote_id.borrow() { - let rpc = self.client.clone(); - cx.spawn(|_| async move { - if let Err(err) = rpc.send(proto::UnregisterProject { project_id }).await { - log::error!("error unregistering project: {}", err); - } - }) - .detach(); + if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state { + if let Some(project_id) = *remote_id_rx.borrow() { + let rpc = self.client.clone(); + cx.spawn(|_| async move { + if let Err(err) = rpc.send(proto::UnregisterProject { project_id }).await { + log::error!("error unregistering project: {}", err); + } + }) + .detach(); + } } } } @@ -874,6 +792,6 @@ mod tests { let client = client::Client::new(); let http_client = FakeHttpClient::new(|_| async move { Ok(ServerResponse::new(404)) }); let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx)); - cx.add_model(|cx| Project::new(languages, client, user_store, fs, cx)) + cx.add_model(|cx| Project::local(languages, client, user_store, fs, cx)) } } diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 011275bd3b491ab26a02e28561da5bf36d6eeb1c..44259f9aeddeae9dfab7f941ea2db33d31cf5060 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -266,13 +266,6 @@ impl Worktree { } } - pub fn authorized_logins(&self) -> Vec { - match self { - Worktree::Local(worktree) => worktree.config.collaborators.clone(), - Worktree::Remote(worktree) => Vec::new(), - } - } - pub fn remove_collaborator( &mut self, peer_id: PeerId, @@ -281,7 +274,7 @@ impl Worktree { ) { match self { Worktree::Local(worktree) => worktree.remove_collaborator(peer_id, replica_id, cx), - Worktree::Remote(worktree) => worktree.remove_collaborator(peer_id, replica_id, cx), + Worktree::Remote(worktree) => worktree.remove_collaborator(replica_id, cx), } } @@ -438,7 +431,6 @@ impl Worktree { pub fn handle_update_buffer( &mut self, envelope: TypedEnvelope, - _: Arc, cx: &mut ModelContext, ) -> Result<()> { let payload = envelope.payload.clone(); @@ -536,7 +528,6 @@ impl Worktree { pub fn handle_buffer_saved( &mut self, envelope: TypedEnvelope, - _: Arc, cx: &mut ModelContext, ) -> Result<()> { let payload = envelope.payload.clone(); @@ -978,6 +969,10 @@ impl LocalWorktree { self.project_remote_id = id; } + pub fn authorized_logins(&self) -> Vec { + self.config.collaborators.clone() + } + pub fn languages(&self) -> &LanguageRegistry { &self.languages } @@ -1280,7 +1275,7 @@ impl LocalWorktree { }) .detach(); - this.update(&mut cx, |worktree, cx| { + this.update(&mut cx, |worktree, _| { let worktree = worktree.as_local_mut().unwrap(); worktree.share = Some(ShareState { snapshots_tx: snapshots_to_send_tx, @@ -1291,21 +1286,19 @@ impl LocalWorktree { }) } - fn to_proto(&self, cx: &mut ModelContext) -> impl Future { + pub fn to_proto(&self, cx: &mut ModelContext) -> proto::Worktree { let id = cx.model_id() as u64; let snapshot = self.snapshot(); let root_name = self.root_name.clone(); - async move { - proto::Worktree { - id, - root_name, - entries: snapshot - .entries_by_path - .cursor::<()>() - .filter(|e| !e.is_ignored) - .map(Into::into) - .collect(), - } + proto::Worktree { + id, + root_name, + entries: snapshot + .entries_by_path + .cursor::<()>() + .filter(|e| !e.is_ignored) + .map(Into::into) + .collect(), } } } @@ -1443,7 +1436,7 @@ impl RemoteWorktree { self.snapshot.clone() } - fn update_from_remote( + pub fn update_from_remote( &mut self, envelope: TypedEnvelope, cx: &mut ModelContext, @@ -1459,12 +1452,7 @@ impl RemoteWorktree { Ok(()) } - pub fn remove_collaborator( - &mut self, - peer_id: PeerId, - replica_id: ReplicaId, - cx: &mut ModelContext, - ) { + pub fn remove_collaborator(&mut self, replica_id: ReplicaId, cx: &mut ModelContext) { for (_, buffer) in &self.open_buffers { if let Some(buffer) = buffer.upgrade(cx) { buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx)); @@ -3058,172 +3046,172 @@ mod tests { assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text())); } - #[gpui::test] - async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) { - let dir = temp_tree(json!({ - "a": { - "file1": "", - "file2": "", - "file3": "", - }, - "b": { - "c": { - "file4": "", - "file5": "", - } - } - })); - - let user_id = 5; - let mut client = Client::new(); - let server = FakeServer::for_client(user_id, &mut client, &cx).await; - let user_store = server.build_user_store(client.clone(), &mut cx).await; - let tree = Worktree::open_local( - client, - user_store.clone(), - dir.path(), - Arc::new(RealFs), - Default::default(), - &mut cx.to_async(), - ) - .await - .unwrap(); - - let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| { - let buffer = tree.update(cx, |tree, cx| tree.open_buffer(path, cx)); - async move { buffer.await.unwrap() } - }; - let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| { - tree.read_with(cx, |tree, _| { - tree.entry_for_path(path) - .expect(&format!("no entry for path {}", path)) - .id - }) - }; - - let buffer2 = buffer_for_path("a/file2", &mut cx).await; - let buffer3 = buffer_for_path("a/file3", &mut cx).await; - let buffer4 = buffer_for_path("b/c/file4", &mut cx).await; - let buffer5 = buffer_for_path("b/c/file5", &mut cx).await; - - let file2_id = id_for_path("a/file2", &cx); - let file3_id = id_for_path("a/file3", &cx); - let file4_id = id_for_path("b/c/file4", &cx); - - // Wait for the initial scan. - cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) - .await; - - // Create a remote copy of this worktree. - let initial_snapshot = tree.read_with(&cx, |tree, _| tree.snapshot()); - let worktree_id = 1; - let proto_message = tree.update(&mut cx, |tree, cx| tree.as_local().unwrap().to_proto(cx)); - let open_worktree = server.receive::().await.unwrap(); - server - .respond( - open_worktree.receipt(), - proto::OpenWorktreeResponse { worktree_id: 1 }, - ) - .await; - - let remote = Worktree::remote( - proto::JoinWorktreeResponse { - worktree: Some(proto_message.await), - replica_id: 1, - collaborators: Vec::new(), - }, - Client::new(), - user_store, - Default::default(), - &mut cx.to_async(), - ) - .await - .unwrap(); - - cx.read(|cx| { - assert!(!buffer2.read(cx).is_dirty()); - assert!(!buffer3.read(cx).is_dirty()); - assert!(!buffer4.read(cx).is_dirty()); - assert!(!buffer5.read(cx).is_dirty()); - }); - - // Rename and delete files and directories. - tree.flush_fs_events(&cx).await; - std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap(); - std::fs::remove_file(dir.path().join("b/c/file5")).unwrap(); - std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap(); - std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap(); - tree.flush_fs_events(&cx).await; - - let expected_paths = vec![ - "a", - "a/file1", - "a/file2.new", - "b", - "d", - "d/file3", - "d/file4", - ]; - - cx.read(|app| { - assert_eq!( - tree.read(app) - .paths() - .map(|p| p.to_str().unwrap()) - .collect::>(), - expected_paths - ); - - assert_eq!(id_for_path("a/file2.new", &cx), file2_id); - assert_eq!(id_for_path("d/file3", &cx), file3_id); - assert_eq!(id_for_path("d/file4", &cx), file4_id); - - assert_eq!( - buffer2.read(app).file().unwrap().path().as_ref(), - Path::new("a/file2.new") - ); - assert_eq!( - buffer3.read(app).file().unwrap().path().as_ref(), - Path::new("d/file3") - ); - assert_eq!( - buffer4.read(app).file().unwrap().path().as_ref(), - Path::new("d/file4") - ); - assert_eq!( - buffer5.read(app).file().unwrap().path().as_ref(), - Path::new("b/c/file5") - ); - - assert!(!buffer2.read(app).file().unwrap().is_deleted()); - assert!(!buffer3.read(app).file().unwrap().is_deleted()); - assert!(!buffer4.read(app).file().unwrap().is_deleted()); - assert!(buffer5.read(app).file().unwrap().is_deleted()); - }); - - // Update the remote worktree. Check that it becomes consistent with the - // local worktree. - remote.update(&mut cx, |remote, cx| { - let update_message = - tree.read(cx) - .snapshot() - .build_update(&initial_snapshot, worktree_id, true); - remote - .as_remote_mut() - .unwrap() - .snapshot - .apply_update(update_message) - .unwrap(); - - assert_eq!( - remote - .paths() - .map(|p| p.to_str().unwrap()) - .collect::>(), - expected_paths - ); - }); - } + // #[gpui::test] + // async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) { + // let dir = temp_tree(json!({ + // "a": { + // "file1": "", + // "file2": "", + // "file3": "", + // }, + // "b": { + // "c": { + // "file4": "", + // "file5": "", + // } + // } + // })); + + // let user_id = 5; + // let mut client = Client::new(); + // let server = FakeServer::for_client(user_id, &mut client, &cx).await; + // let user_store = server.build_user_store(client.clone(), &mut cx).await; + // let tree = Worktree::open_local( + // client, + // user_store.clone(), + // dir.path(), + // Arc::new(RealFs), + // Default::default(), + // &mut cx.to_async(), + // ) + // .await + // .unwrap(); + + // let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| { + // let buffer = tree.update(cx, |tree, cx| tree.open_buffer(path, cx)); + // async move { buffer.await.unwrap() } + // }; + // let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| { + // tree.read_with(cx, |tree, _| { + // tree.entry_for_path(path) + // .expect(&format!("no entry for path {}", path)) + // .id + // }) + // }; + + // let buffer2 = buffer_for_path("a/file2", &mut cx).await; + // let buffer3 = buffer_for_path("a/file3", &mut cx).await; + // let buffer4 = buffer_for_path("b/c/file4", &mut cx).await; + // let buffer5 = buffer_for_path("b/c/file5", &mut cx).await; + + // let file2_id = id_for_path("a/file2", &cx); + // let file3_id = id_for_path("a/file3", &cx); + // let file4_id = id_for_path("b/c/file4", &cx); + + // // Wait for the initial scan. + // cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + // .await; + + // // Create a remote copy of this worktree. + // let initial_snapshot = tree.read_with(&cx, |tree, _| tree.snapshot()); + // let worktree_id = 1; + // let proto_message = tree.update(&mut cx, |tree, cx| tree.as_local().unwrap().to_proto(cx)); + // let open_worktree = server.receive::().await.unwrap(); + // server + // .respond( + // open_worktree.receipt(), + // proto::OpenWorktreeResponse { worktree_id: 1 }, + // ) + // .await; + + // let remote = Worktree::remote( + // proto::JoinWorktreeResponse { + // worktree: Some(proto_message.await), + // replica_id: 1, + // collaborators: Vec::new(), + // }, + // Client::new(), + // user_store, + // Default::default(), + // &mut cx.to_async(), + // ) + // .await + // .unwrap(); + + // cx.read(|cx| { + // assert!(!buffer2.read(cx).is_dirty()); + // assert!(!buffer3.read(cx).is_dirty()); + // assert!(!buffer4.read(cx).is_dirty()); + // assert!(!buffer5.read(cx).is_dirty()); + // }); + + // // Rename and delete files and directories. + // tree.flush_fs_events(&cx).await; + // std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap(); + // std::fs::remove_file(dir.path().join("b/c/file5")).unwrap(); + // std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap(); + // std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap(); + // tree.flush_fs_events(&cx).await; + + // let expected_paths = vec![ + // "a", + // "a/file1", + // "a/file2.new", + // "b", + // "d", + // "d/file3", + // "d/file4", + // ]; + + // cx.read(|app| { + // assert_eq!( + // tree.read(app) + // .paths() + // .map(|p| p.to_str().unwrap()) + // .collect::>(), + // expected_paths + // ); + + // assert_eq!(id_for_path("a/file2.new", &cx), file2_id); + // assert_eq!(id_for_path("d/file3", &cx), file3_id); + // assert_eq!(id_for_path("d/file4", &cx), file4_id); + + // assert_eq!( + // buffer2.read(app).file().unwrap().path().as_ref(), + // Path::new("a/file2.new") + // ); + // assert_eq!( + // buffer3.read(app).file().unwrap().path().as_ref(), + // Path::new("d/file3") + // ); + // assert_eq!( + // buffer4.read(app).file().unwrap().path().as_ref(), + // Path::new("d/file4") + // ); + // assert_eq!( + // buffer5.read(app).file().unwrap().path().as_ref(), + // Path::new("b/c/file5") + // ); + + // assert!(!buffer2.read(app).file().unwrap().is_deleted()); + // assert!(!buffer3.read(app).file().unwrap().is_deleted()); + // assert!(!buffer4.read(app).file().unwrap().is_deleted()); + // assert!(buffer5.read(app).file().unwrap().is_deleted()); + // }); + + // // Update the remote worktree. Check that it becomes consistent with the + // // local worktree. + // remote.update(&mut cx, |remote, cx| { + // let update_message = + // tree.read(cx) + // .snapshot() + // .build_update(&initial_snapshot, worktree_id, true); + // remote + // .as_remote_mut() + // .unwrap() + // .snapshot + // .apply_update(update_message) + // .unwrap(); + + // assert_eq!( + // remote + // .paths() + // .map(|p| p.to_str().unwrap()) + // .collect::>(), + // expected_paths + // ); + // }); + // } #[gpui::test] async fn test_rescan_with_gitignore(mut cx: gpui::TestAppContext) { @@ -3277,61 +3265,61 @@ mod tests { }); } - #[gpui::test] - async fn test_open_and_share_worktree(mut cx: gpui::TestAppContext) { - let user_id = 100; - let mut client = Client::new(); - let server = FakeServer::for_client(user_id, &mut client, &cx).await; - let user_store = server.build_user_store(client.clone(), &mut cx).await; - - let fs = Arc::new(FakeFs::new()); - fs.insert_tree( - "/path", - json!({ - "to": { - "the-dir": { - ".zed.toml": r#"collaborators = ["friend-1", "friend-2"]"#, - "a.txt": "a-contents", - }, - }, - }), - ) - .await; - - let worktree = Worktree::open_local( - client.clone(), - user_store, - "/path/to/the-dir".as_ref(), - fs, - Default::default(), - &mut cx.to_async(), - ) - .await - .unwrap(); - - let open_worktree = server.receive::().await.unwrap(); - assert_eq!( - open_worktree.payload, - proto::OpenWorktree { - root_name: "the-dir".to_string(), - authorized_logins: vec!["friend-1".to_string(), "friend-2".to_string()], - } - ); - - server - .respond( - open_worktree.receipt(), - proto::OpenWorktreeResponse { worktree_id: 5 }, - ) - .await; - let remote_id = worktree - .update(&mut cx, |tree, _| tree.as_local().unwrap().next_remote_id()) - .await; - assert_eq!(remote_id, Some(5)); - - cx.update(move |_| drop(worktree)); - server.receive::().await.unwrap(); - } + // #[gpui::test] + // async fn test_open_and_share_worktree(mut cx: gpui::TestAppContext) { + // let user_id = 100; + // let mut client = Client::new(); + // let server = FakeServer::for_client(user_id, &mut client, &cx).await; + // let user_store = server.build_user_store(client.clone(), &mut cx).await; + + // let fs = Arc::new(FakeFs::new()); + // fs.insert_tree( + // "/path", + // json!({ + // "to": { + // "the-dir": { + // ".zed.toml": r#"collaborators = ["friend-1", "friend-2"]"#, + // "a.txt": "a-contents", + // }, + // }, + // }), + // ) + // .await; + + // let worktree = Worktree::open_local( + // client.clone(), + // user_store, + // "/path/to/the-dir".as_ref(), + // fs, + // Default::default(), + // &mut cx.to_async(), + // ) + // .await + // .unwrap(); + + // let open_worktree = server.receive::().await.unwrap(); + // assert_eq!( + // open_worktree.payload, + // proto::OpenWorktree { + // root_name: "the-dir".to_string(), + // authorized_logins: vec!["friend-1".to_string(), "friend-2".to_string()], + // } + // ); + + // server + // .respond( + // open_worktree.receipt(), + // proto::OpenWorktreeResponse { worktree_id: 5 }, + // ) + // .await; + // let remote_id = worktree + // .update(&mut cx, |tree, _| tree.as_local().unwrap().next_remote_id()) + // .await; + // assert_eq!(remote_id, Some(5)); + + // cx.update(move |_| drop(worktree)); + // server.receive::().await.unwrap(); + // } #[gpui::test] async fn test_buffer_deduping(mut cx: gpui::TestAppContext) { @@ -4087,7 +4075,7 @@ mod tests { let update = scanner .snapshot() - .build_update(&prev_snapshot, 0, include_ignored); + .build_update(&prev_snapshot, 0, 0, include_ignored); prev_snapshot.apply_update(update).unwrap(); assert_eq!( prev_snapshot.to_vec(true), diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index 53f1226e72f46558300f11768b044bd2016ebeb2..4a0081ce7fd7cbbdc7679e8c7c0f9a57717f0708 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -96,9 +96,8 @@ message LeaveProject { message RegisterWorktree { uint64 project_id = 1; - uint64 worktree_id = 2; - string root_name = 3; - repeated string authorized_logins = 4; + Worktree worktree = 2; + repeated string authorized_logins = 3; } message UnregisterWorktree { From 614ee4eac7b71a50524175278b9227cba4e4389e Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 20 Dec 2021 11:36:59 -0800 Subject: [PATCH 141/196] Send worktree info only when sharing worktree Co-Authored-By: Antonio Scandurra Co-Authored-By: Nathan Sobo --- crates/contacts_panel/src/contacts_panel.rs | 107 ++++---------- crates/diagnostics/src/diagnostics.rs | 2 +- crates/gpui/src/executor.rs | 8 ++ crates/project/src/project.rs | 88 +++++++----- crates/project/src/worktree.rs | 146 +++++++++++--------- crates/project_panel/src/project_panel.rs | 5 +- crates/rpc/proto/zed.proto | 7 +- crates/rpc/src/proto.rs | 3 +- crates/theme/src/theme.rs | 8 +- crates/workspace/src/workspace.rs | 34 ++--- 10 files changed, 200 insertions(+), 208 deletions(-) diff --git a/crates/contacts_panel/src/contacts_panel.rs b/crates/contacts_panel/src/contacts_panel.rs index 8cf8b9191b6f478dd2d96fe4276af7208003fab4..e3db1931afc9b219aef1898cb78f235dc4becd89 100644 --- a/crates/contacts_panel/src/contacts_panel.rs +++ b/crates/contacts_panel/src/contacts_panel.rs @@ -11,16 +11,10 @@ use postage::watch; use theme::Theme; use workspace::{Settings, Workspace}; -action!(JoinWorktree, u64); -action!(LeaveWorktree, u64); -action!(ShareWorktree, u64); -action!(UnshareWorktree, u64); +action!(JoinProject, u64); pub fn init(cx: &mut MutableAppContext) { - cx.add_action(ContactsPanel::share_worktree); - cx.add_action(ContactsPanel::unshare_worktree); - cx.add_action(ContactsPanel::join_worktree); - cx.add_action(ContactsPanel::leave_worktree); + cx.add_action(ContactsPanel::join_project); } pub struct ContactsPanel { @@ -63,44 +57,8 @@ impl ContactsPanel { } } - fn share_worktree( - workspace: &mut Workspace, - action: &ShareWorktree, - cx: &mut ViewContext, - ) { - workspace - .project() - .update(cx, |p, cx| p.share_worktree(action.0, cx)); - } - - fn unshare_worktree( - workspace: &mut Workspace, - action: &UnshareWorktree, - cx: &mut ViewContext, - ) { - workspace - .project() - .update(cx, |p, cx| p.unshare_worktree(action.0, cx)); - } - - fn join_worktree( - workspace: &mut Workspace, - action: &JoinWorktree, - cx: &mut ViewContext, - ) { - workspace - .project() - .update(cx, |p, cx| p.add_remote_worktree(action.0, cx).detach()); - } - - fn leave_worktree( - workspace: &mut Workspace, - action: &LeaveWorktree, - cx: &mut ViewContext, - ) { - workspace - .project() - .update(cx, |p, cx| p.close_remote_worktree(action.0, cx)); + fn join_project(_: &mut Workspace, _: &JoinProject, _: &mut ViewContext) { + todo!(); } fn update_contacts(&mut self, _: ModelHandle, cx: &mut ViewContext) { @@ -116,16 +74,12 @@ impl ContactsPanel { cx: &mut LayoutContext, ) -> ElementBox { let theme = &theme.contacts_panel; - let worktree_count = collaborator.worktrees.len(); + let project_count = collaborator.projects.len(); let font_cache = cx.font_cache(); - let line_height = theme.unshared_worktree.name.text.line_height(font_cache); - let cap_height = theme.unshared_worktree.name.text.cap_height(font_cache); - let baseline_offset = theme - .unshared_worktree - .name - .text - .baseline_offset(font_cache) - + (theme.unshared_worktree.height - line_height) / 2.; + let line_height = theme.unshared_project.name.text.line_height(font_cache); + let cap_height = theme.unshared_project.name.text.cap_height(font_cache); + let baseline_offset = theme.unshared_project.name.text.baseline_offset(font_cache) + + (theme.unshared_project.height - line_height) / 2.; let tree_branch_width = theme.tree_branch_width; let tree_branch_color = theme.tree_branch_color; let host_avatar_height = theme @@ -161,11 +115,11 @@ impl ContactsPanel { ) .with_children( collaborator - .worktrees + .projects .iter() .enumerate() - .map(|(ix, worktree)| { - let worktree_id = worktree.id; + .map(|(ix, project)| { + let project_id = project.id; Flex::row() .with_child( @@ -182,7 +136,7 @@ impl ContactsPanel { vec2f(start_x, start_y), vec2f( start_x + tree_branch_width, - if ix + 1 == worktree_count { + if ix + 1 == project_count { end_y } else { bounds.max_y() @@ -210,28 +164,27 @@ impl ContactsPanel { .with_child({ let is_host = Some(collaborator.user.id) == current_user_id; let is_guest = !is_host - && worktree + && project .guests .iter() .any(|guest| Some(guest.id) == current_user_id); - let is_shared = worktree.is_shared; + let is_shared = project.is_shared; MouseEventHandler::new::( - worktree_id as usize, + project_id as usize, cx, |mouse_state, _| { - let style = match (worktree.is_shared, mouse_state.hovered) - { - (false, false) => &theme.unshared_worktree, - (false, true) => &theme.hovered_unshared_worktree, - (true, false) => &theme.shared_worktree, - (true, true) => &theme.hovered_shared_worktree, + let style = match (project.is_shared, mouse_state.hovered) { + (false, false) => &theme.unshared_project, + (false, true) => &theme.hovered_unshared_project, + (true, false) => &theme.shared_project, + (true, true) => &theme.hovered_shared_project, }; Flex::row() .with_child( Label::new( - worktree.root_name.clone(), + project.worktree_root_names.join(", "), style.name.text.clone(), ) .aligned() @@ -240,7 +193,7 @@ impl ContactsPanel { .with_style(style.name.container) .boxed(), ) - .with_children(worktree.guests.iter().filter_map( + .with_children(project.guests.iter().filter_map( |participant| { participant.avatar.clone().map(|avatar| { Image::new(avatar) @@ -268,23 +221,15 @@ impl ContactsPanel { CursorStyle::Arrow }) .on_click(move |cx| { - if is_shared { - if is_host { - cx.dispatch_action(UnshareWorktree(worktree_id)); - } else if is_guest { - cx.dispatch_action(LeaveWorktree(worktree_id)); - } else { - cx.dispatch_action(JoinWorktree(worktree_id)) - } - } else if is_host { - cx.dispatch_action(ShareWorktree(worktree_id)); + if !is_host && !is_guest { + cx.dispatch_action(JoinProject(project_id)) } }) .expanded(1.0) .boxed() }) .constrained() - .with_height(theme.unshared_worktree.height) + .with_height(theme.unshared_project.height) .boxed() }), ) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index f39803f1482cc027fe3b2422f8b1be3e36242d04..def194b72474fd06a33a5edf1d5f914e6a5a56f1 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -213,7 +213,7 @@ impl workspace::Item for ProjectDiagnostics { }) .detach(); - ProjectDiagnosticsEditor::new(project.read(cx).replica_id(cx), settings, cx) + ProjectDiagnosticsEditor::new(project.read(cx).replica_id(), settings, cx) } fn project_path(&self) -> Option { diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index c5f976e6f53363143348f56871df13a0bd67672a..23b870c11f09539c44a3ad0efb629420cee3be46 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -54,6 +54,7 @@ type AnyLocalTask = async_task::Task>; #[must_use] pub enum Task { + Ready(Option), Local { any_task: AnyLocalTask, result_type: PhantomData, @@ -594,6 +595,10 @@ pub fn deterministic(seed: u64) -> (Rc, Arc) { } impl Task { + pub fn ready(value: T) -> Self { + Self::Ready(Some(value)) + } + fn local(any_task: AnyLocalTask) -> Self { Self::Local { any_task, @@ -603,6 +608,7 @@ impl Task { pub fn detach(self) { match self { + Task::Ready(_) => {} Task::Local { any_task, .. } => any_task.detach(), Task::Send { any_task, .. } => any_task.detach(), } @@ -621,6 +627,7 @@ impl Task { impl fmt::Debug for Task { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { + Task::Ready(value) => value.fmt(f), Task::Local { any_task, .. } => any_task.fmt(f), Task::Send { any_task, .. } => any_task.fmt(f), } @@ -632,6 +639,7 @@ impl Future for Task { fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { match unsafe { self.get_unchecked_mut() } { + Task::Ready(value) => Poll::Ready(value.take().unwrap()), Task::Local { any_task, .. } => { any_task.poll(cx).map(|value| *value.downcast().unwrap()) } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 2ee742af7e99f0013de729f6959915ca4f19da0e..0fd26f4797ce174d0049c0f6708009b92fac477f 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -213,7 +213,8 @@ impl Project { subscriptions: vec![ client.subscribe_to_entity(remote_id, cx, Self::handle_add_collaborator), client.subscribe_to_entity(remote_id, cx, Self::handle_remove_collaborator), - client.subscribe_to_entity(remote_id, cx, Self::handle_register_worktree), + client.subscribe_to_entity(remote_id, cx, Self::handle_share_worktree), + client.subscribe_to_entity(remote_id, cx, Self::handle_unregister_worktree), client.subscribe_to_entity(remote_id, cx, Self::handle_update_worktree), client.subscribe_to_entity(remote_id, cx, Self::handle_update_buffer), client.subscribe_to_entity(remote_id, cx, Self::handle_buffer_saved), @@ -231,14 +232,6 @@ impl Project { *remote_id_tx.borrow_mut() = remote_id; } - for worktree in &self.worktrees { - worktree.update(cx, |worktree, _| { - if let Some(worktree) = worktree.as_local_mut() { - worktree.set_project_remote_id(remote_id); - } - }); - } - self.subscriptions.clear(); if let Some(remote_id) = remote_id { self.subscriptions.extend([ @@ -307,7 +300,11 @@ impl Project { this.update(&mut cx, |this, cx| { for worktree in &this.worktrees { worktree.update(cx, |worktree, cx| { - worktree.as_local_mut().unwrap().share(cx).detach(); + worktree + .as_local_mut() + .unwrap() + .share(remote_id, cx) + .detach(); }); } }); @@ -327,6 +324,13 @@ impl Project { } } + fn is_shared(&self) -> bool { + match &self.client_state { + ProjectClientState::Local { is_shared, .. } => *is_shared, + ProjectClientState::Remote { .. } => false, + } + } + pub fn add_local_worktree( &mut self, abs_path: &Path, @@ -337,32 +341,35 @@ impl Project { let user_store = self.user_store.clone(); let languages = self.languages.clone(); let path = Arc::from(abs_path); - cx.spawn(|this, mut cx| async move { + cx.spawn(|project, mut cx| async move { let worktree = Worktree::open_local(client.clone(), user_store, path, fs, languages, &mut cx) .await?; - this.update(&mut cx, |this, cx| { - if let Some(project_id) = this.remote_id() { - worktree.update(cx, |worktree, cx| { - let worktree = worktree.as_local_mut().unwrap(); - worktree.set_project_remote_id(Some(project_id)); - let serialized_worktree = worktree.to_proto(cx); - let authorized_logins = worktree.authorized_logins(); - cx.foreground() - .spawn(async move { - client - .request(proto::RegisterWorktree { - project_id, - worktree: Some(serialized_worktree), - authorized_logins, - }) - .log_err(); - }) - .detach(); - }); - } - this.add_worktree(worktree.clone(), cx); + + let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| { + project.add_worktree(worktree.clone(), cx); + (project.remote_id(), project.is_shared()) }); + + if let Some(project_id) = remote_project_id { + let register_message = worktree.update(&mut cx, |worktree, _| { + let worktree = worktree.as_local_mut().unwrap(); + proto::RegisterWorktree { + project_id, + root_name: worktree.root_name().to_string(), + authorized_logins: worktree.authorized_logins(), + } + }); + client.request(register_message).await?; + if is_shared { + worktree + .update(&mut cx, |worktree, cx| { + worktree.as_local_mut().unwrap().share(project_id, cx) + }) + .await?; + } + } + Ok(worktree) }) } @@ -476,9 +483,9 @@ impl Project { Ok(()) } - fn handle_register_worktree( + fn handle_share_worktree( &mut self, - envelope: TypedEnvelope, + envelope: TypedEnvelope, client: Arc, cx: &mut ModelContext, ) -> Result<()> { @@ -505,6 +512,19 @@ impl Project { Ok(()) } + fn handle_unregister_worktree( + &mut self, + envelope: TypedEnvelope, + _: Arc, + cx: &mut ModelContext, + ) -> Result<()> { + self.worktrees.retain(|worktree| { + worktree.read(cx).as_remote().unwrap().remote_id() != envelope.payload.worktree_id + }); + cx.notify(); + Ok(()) + } + fn handle_update_worktree( &mut self, envelope: TypedEnvelope, diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 44259f9aeddeae9dfab7f941ea2db33d31cf5060..0ec03af08d0bb18473381dce90e99e7ce24c3055 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -208,7 +208,7 @@ impl Worktree { } Worktree::Remote(RemoteWorktree { - project_remote_id, + project_id: project_remote_id, remote_id, replica_id, snapshot, @@ -236,6 +236,14 @@ impl Worktree { } } + pub fn as_remote(&self) -> Option<&RemoteWorktree> { + if let Worktree::Remote(worktree) = self { + Some(worktree) + } else { + None + } + } + pub fn as_local_mut(&mut self) -> Option<&mut LocalWorktree> { if let Worktree::Local(worktree) = self { Some(worktree) @@ -483,8 +491,10 @@ impl Worktree { let sender_id = envelope.original_sender_id()?; let this = self.as_local().unwrap(); let project_id = this - .project_remote_id - .ok_or_else(|| anyhow!("can't save buffer while disconnected"))?; + .share + .as_ref() + .ok_or_else(|| anyhow!("can't save buffer while disconnected"))? + .project_id; let buffer = this .shared_buffers @@ -756,13 +766,12 @@ impl Worktree { operation: Operation, cx: &mut ModelContext, ) { - if let Some((rpc, project_id)) = match self { + if let Some((project_id, rpc)) = match self { Worktree::Local(worktree) => worktree - .project_remote_id - .map(|id| (worktree.client.clone(), id)), - Worktree::Remote(worktree) => { - Some((worktree.client.clone(), worktree.project_remote_id)) - } + .share + .as_ref() + .map(|share| (share.project_id, worktree.client.clone())), + Worktree::Remote(worktree) => Some((worktree.project_id, worktree.client.clone())), } { cx.spawn(|worktree, mut cx| async move { if let Err(error) = rpc @@ -809,7 +818,6 @@ pub struct LocalWorktree { background_snapshot: Arc>, last_scan_state_rx: watch::Receiver, _background_scanner_task: Option>, - project_remote_id: Option, poll_task: Option>, share: Option, loading_buffers: LoadingBuffers, @@ -826,11 +834,12 @@ pub struct LocalWorktree { } struct ShareState { + project_id: u64, snapshots_tx: Sender, } pub struct RemoteWorktree { - project_remote_id: u64, + project_id: u64, remote_id: u64, snapshot: Snapshot, snapshot_rx: watch::Receiver, @@ -913,7 +922,6 @@ impl LocalWorktree { let tree = Self { snapshot: snapshot.clone(), config, - project_remote_id: None, background_snapshot: Arc::new(Mutex::new(snapshot)), last_scan_state_rx, _background_scanner_task: None, @@ -965,10 +973,6 @@ impl LocalWorktree { Ok((tree, scan_states_tx)) } - pub fn set_project_remote_id(&mut self, id: Option) { - self.project_remote_id = id; - } - pub fn authorized_logins(&self) -> Vec { self.config.collaborators.clone() } @@ -1244,63 +1248,54 @@ impl LocalWorktree { }) } - pub fn share(&mut self, cx: &mut ModelContext) -> Task> { + pub fn share( + &mut self, + project_id: u64, + cx: &mut ModelContext, + ) -> Task> { + if self.share.is_some() { + return Task::ready(Ok(())); + } + let snapshot = self.snapshot(); let rpc = self.client.clone(); - let project_id = self.project_remote_id; let worktree_id = cx.model_id() as u64; - cx.spawn(|this, mut cx| async move { - let project_id = project_id.ok_or_else(|| anyhow!("no project id"))?; + let (snapshots_to_send_tx, snapshots_to_send_rx) = smol::channel::unbounded::(); + self.share = Some(ShareState { + project_id, + snapshots_tx: snapshots_to_send_tx, + }); - let (snapshots_to_send_tx, snapshots_to_send_rx) = - smol::channel::unbounded::(); - cx.background() - .spawn({ - let rpc = rpc.clone(); - async move { - let mut prev_snapshot = snapshot; - while let Ok(snapshot) = snapshots_to_send_rx.recv().await { - let message = snapshot.build_update( - &prev_snapshot, - project_id, - worktree_id, - false, - ); - match rpc.send(message).await { - Ok(()) => prev_snapshot = snapshot, - Err(err) => log::error!("error sending snapshot diff {}", err), - } + cx.background() + .spawn({ + let rpc = rpc.clone(); + let snapshot = snapshot.clone(); + async move { + let mut prev_snapshot = snapshot; + while let Ok(snapshot) = snapshots_to_send_rx.recv().await { + let message = + snapshot.build_update(&prev_snapshot, project_id, worktree_id, false); + match rpc.send(message).await { + Ok(()) => prev_snapshot = snapshot, + Err(err) => log::error!("error sending snapshot diff {}", err), } } - }) - .detach(); + } + }) + .detach(); - this.update(&mut cx, |worktree, _| { - let worktree = worktree.as_local_mut().unwrap(); - worktree.share = Some(ShareState { - snapshots_tx: snapshots_to_send_tx, - }); - }); + let share_message = cx.background().spawn(async move { + proto::ShareWorktree { + project_id, + worktree: Some(snapshot.to_proto()), + } + }); + cx.foreground().spawn(async move { + rpc.request(share_message.await).await?; Ok(()) }) } - - pub fn to_proto(&self, cx: &mut ModelContext) -> proto::Worktree { - let id = cx.model_id() as u64; - let snapshot = self.snapshot(); - let root_name = self.root_name.clone(); - proto::Worktree { - id, - root_name, - entries: snapshot - .entries_by_path - .cursor::<()>() - .filter(|e| !e.is_ignored) - .map(Into::into) - .collect(), - } - } } fn build_gitignore(abs_path: &Path, fs: &dyn Fs) -> Result { @@ -1339,6 +1334,10 @@ impl fmt::Debug for LocalWorktree { } impl RemoteWorktree { + pub fn remote_id(&self) -> u64 { + self.remote_id + } + fn get_open_buffer( &mut self, path: &Path, @@ -1368,7 +1367,7 @@ impl RemoteWorktree { ) -> Task>> { let rpc = self.client.clone(); let replica_id = self.replica_id; - let project_id = self.project_remote_id; + let project_id = self.project_id; let remote_worktree_id = self.remote_id; let root_path = self.snapshot.abs_path.clone(); let path: Arc = Arc::from(path); @@ -1481,6 +1480,20 @@ impl Snapshot { self.id } + pub fn to_proto(&self) -> proto::Worktree { + let root_name = self.root_name.clone(); + proto::Worktree { + id: self.id as u64, + root_name, + entries: self + .entries_by_path + .cursor::<()>() + .filter(|e| !e.is_ignored) + .map(Into::into) + .collect(), + } + } + pub fn build_update( &self, other: &Self, @@ -1540,6 +1553,7 @@ impl Snapshot { proto::UpdateWorktree { project_id, worktree_id, + root_name: self.root_name().to_string(), updated_entries, removed_entries, } @@ -1902,7 +1916,7 @@ impl language::File for File { self.worktree.update(cx, |worktree, cx| match worktree { Worktree::Local(worktree) => { let rpc = worktree.client.clone(); - let project_id = worktree.project_remote_id; + let project_id = worktree.share.as_ref().map(|share| share.project_id); let save = worktree.save(self.path.clone(), text, cx); cx.background().spawn(async move { let entry = save.await?; @@ -1921,7 +1935,7 @@ impl language::File for File { } Worktree::Remote(worktree) => { let rpc = worktree.client.clone(); - let project_id = worktree.project_remote_id; + let project_id = worktree.project_id; cx.foreground().spawn(async move { let response = rpc .request(proto::SaveBuffer { @@ -1961,7 +1975,7 @@ impl language::File for File { let worktree_id = self.worktree.id() as u64; self.worktree.update(cx, |worktree, cx| { if let Worktree::Remote(worktree) = worktree { - let project_id = worktree.project_remote_id; + let project_id = worktree.project_id; let rpc = worktree.client.clone(); cx.background() .spawn(async move { diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index d8ce1df3c7ee2af1a713cdd17a048ce12f862cfc..c96e478c26abf0a09e91ec32257b44fceea67d6c 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -617,12 +617,13 @@ mod tests { ) .await; - let project = cx.add_model(|_| { - Project::new( + let project = cx.add_model(|cx| { + Project::local( params.languages.clone(), params.client.clone(), params.user_store.clone(), params.fs.clone(), + cx, ) }); let root1 = project diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index 4a0081ce7fd7cbbdc7679e8c7c0f9a57717f0708..b86a4c1e301e563b73fd67f40857d466c896b321 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -96,7 +96,7 @@ message LeaveProject { message RegisterWorktree { uint64 project_id = 1; - Worktree worktree = 2; + string root_name = 2; repeated string authorized_logins = 3; } @@ -113,8 +113,9 @@ message ShareWorktree { message UpdateWorktree { uint64 project_id = 1; uint64 worktree_id = 2; - repeated Entry updated_entries = 3; - repeated uint64 removed_entries = 4; + string root_name = 3; + repeated Entry updated_entries = 4; + repeated uint64 removed_entries = 5; } message AddProjectCollaborator { diff --git a/crates/rpc/src/proto.rs b/crates/rpc/src/proto.rs index 8eb62d8ce0e8347de937c0969d8e52a9b73a35e3..de338fe43fa88d05353764c7d6adf0031d76e034 100644 --- a/crates/rpc/src/proto.rs +++ b/crates/rpc/src/proto.rs @@ -152,6 +152,7 @@ messages!( ShareWorktree, UnregisterProject, UnregisterWorktree, + UnshareProject, UpdateBuffer, UpdateContacts, UpdateWorktree, @@ -183,7 +184,7 @@ entity_messages!( OpenBuffer, CloseBuffer, SaveBuffer, - RegisterWorktree, + ShareWorktree, UnregisterWorktree, UpdateBuffer, UpdateWorktree, diff --git a/crates/theme/src/theme.rs b/crates/theme/src/theme.rs index 6f685ce70d2118c6068ddcf26ff90239815d4de3..6fca6966896c2290f2bc2ed2a7c69b15ec50b872 100644 --- a/crates/theme/src/theme.rs +++ b/crates/theme/src/theme.rs @@ -155,10 +155,10 @@ pub struct ContactsPanel { pub host_username: ContainedText, pub tree_branch_width: f32, pub tree_branch_color: Color, - pub shared_worktree: WorktreeRow, - pub hovered_shared_worktree: WorktreeRow, - pub unshared_worktree: WorktreeRow, - pub hovered_unshared_worktree: WorktreeRow, + pub shared_project: WorktreeRow, + pub hovered_shared_project: WorktreeRow, + pub unshared_project: WorktreeRow, + pub hovered_unshared_project: WorktreeRow, } #[derive(Deserialize, Default)] diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 6f0aa7503295625a24342acea2a08494647c56f2..090aae5567707390580b9e0d9a0ff197fc1acf71 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -358,12 +358,13 @@ pub struct Workspace { impl Workspace { pub fn new(params: &WorkspaceParams, cx: &mut ViewContext) -> Self { - let project = cx.add_model(|_| { - Project::new( + let project = cx.add_model(|cx| { + Project::local( params.languages.clone(), params.client.clone(), params.user_store.clone(), params.fs.clone(), + cx, ) }); cx.observe(&project, |_, _, cx| cx.notify()).detach(); @@ -988,24 +989,25 @@ impl Workspace { } fn render_collaborators(&self, theme: &Theme, cx: &mut RenderContext) -> Vec { - let mut elements = Vec::new(); - if let Some(active_worktree) = self.project.read(cx).active_worktree() { - let collaborators = active_worktree - .read(cx) - .collaborators() - .values() - .cloned() - .collect::>(); - for collaborator in collaborators { - elements.push(self.render_avatar( + let mut collaborators = self + .project + .read(cx) + .collaborators() + .values() + .cloned() + .collect::>(); + collaborators.sort_unstable_by_key(|collaborator| collaborator.replica_id); + collaborators + .into_iter() + .map(|collaborator| { + self.render_avatar( Some(&collaborator.user), Some(collaborator.replica_id), theme, cx, - )); - } - } - elements + ) + }) + .collect() } fn render_avatar( From 55910c0d79fcbf83eee578ebdd1cb852d82e358a Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 20 Dec 2021 16:30:29 -0800 Subject: [PATCH 142/196] Get the server and integration tests compiling --- crates/project/src/project.rs | 189 +++-- crates/project_panel/src/project_panel.rs | 8 +- crates/rpc/proto/zed.proto | 5 +- crates/rpc/src/proto.rs | 1 + crates/server/src/db.rs | 4 +- crates/server/src/rpc.rs | 904 ++++++++++++---------- crates/server/src/rpc/store.rs | 360 +++++---- crates/workspace/src/workspace.rs | 16 +- 8 files changed, 831 insertions(+), 656 deletions(-) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 0fd26f4797ce174d0049c0f6708009b92fac477f..e02b109b52a08f04335ea9eea84259e669c380f3 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -8,7 +8,9 @@ use clock::ReplicaId; use collections::HashMap; use futures::Future; use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet}; -use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task}; +use gpui::{ + AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, +}; use language::{Buffer, DiagnosticEntry, LanguageRegistry}; use lsp::DiagnosticSeverity; use postage::{prelude::Stream, watch}; @@ -42,6 +44,7 @@ enum ProjectClientState { _maintain_remote_id_task: Task>, }, Remote { + sharing_has_stopped: bool, remote_id: u64, replica_id: ReplicaId, }, @@ -106,59 +109,61 @@ pub struct ProjectEntry { impl Project { pub fn local( - languages: Arc, client: Arc, user_store: ModelHandle, + languages: Arc, fs: Arc, - cx: &mut ModelContext, - ) -> Self { - let (remote_id_tx, remote_id_rx) = watch::channel(); - let _maintain_remote_id_task = cx.spawn_weak({ - let rpc = client.clone(); - move |this, mut cx| { - async move { - let mut status = rpc.status(); - while let Some(status) = status.recv().await { - if let Some(this) = this.upgrade(&cx) { - let remote_id = if let client::Status::Connected { .. } = status { - let response = rpc.request(proto::RegisterProject {}).await?; - Some(response.project_id) - } else { - None - }; - this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx)); + cx: &mut MutableAppContext, + ) -> ModelHandle { + cx.add_model(|cx: &mut ModelContext| { + let (remote_id_tx, remote_id_rx) = watch::channel(); + let _maintain_remote_id_task = cx.spawn_weak({ + let rpc = client.clone(); + move |this, mut cx| { + async move { + let mut status = rpc.status(); + while let Some(status) = status.recv().await { + if let Some(this) = this.upgrade(&cx) { + let remote_id = if let client::Status::Connected { .. } = status { + let response = rpc.request(proto::RegisterProject {}).await?; + Some(response.project_id) + } else { + None + }; + this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx)); + } } + Ok(()) } - Ok(()) + .log_err() } - .log_err() - } - }); + }); - Self { - worktrees: Default::default(), - collaborators: Default::default(), - client_state: ProjectClientState::Local { - is_shared: false, - remote_id_tx, - remote_id_rx, - _maintain_remote_id_task, - }, - subscriptions: Vec::new(), - active_worktree: None, - active_entry: None, - languages, - client, - user_store, - fs, - } + Self { + worktrees: Default::default(), + collaborators: Default::default(), + client_state: ProjectClientState::Local { + is_shared: false, + remote_id_tx, + remote_id_rx, + _maintain_remote_id_task, + }, + subscriptions: Vec::new(), + active_worktree: None, + active_entry: None, + languages, + client, + user_store, + fs, + } + }) } - pub async fn open_remote( + pub async fn remote( remote_id: u64, - languages: Arc, client: Arc, user_store: ModelHandle, + languages: Arc, fs: Arc, cx: &mut AsyncAppContext, ) -> Result> { @@ -211,6 +216,7 @@ impl Project { user_store, fs, subscriptions: vec![ + client.subscribe_to_entity(remote_id, cx, Self::handle_unshare_project), client.subscribe_to_entity(remote_id, cx, Self::handle_add_collaborator), client.subscribe_to_entity(remote_id, cx, Self::handle_remove_collaborator), client.subscribe_to_entity(remote_id, cx, Self::handle_share_worktree), @@ -221,6 +227,7 @@ impl Project { ], client, client_state: ProjectClientState::Remote { + sharing_has_stopped: false, remote_id, replica_id, }, @@ -252,6 +259,27 @@ impl Project { } } + pub fn next_remote_id(&self) -> impl Future { + let mut id = None; + let mut watch = None; + match &self.client_state { + ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()), + ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id), + } + + async move { + if let Some(id) = id { + return id; + } + let mut watch = watch.unwrap(); + loop { + if let Some(Some(id)) = watch.recv().await { + return id; + } + } + } + } + pub fn replica_id(&self) -> ReplicaId { match &self.client_state { ProjectClientState::Local { .. } => 0, @@ -277,7 +305,7 @@ impl Project { pub fn share(&self, cx: &mut ModelContext) -> Task> { let rpc = self.client.clone(); cx.spawn(|this, mut cx| async move { - let remote_id = this.update(&mut cx, |this, _| { + let project_id = this.update(&mut cx, |this, _| { if let ProjectClientState::Local { is_shared, remote_id_rx, @@ -285,25 +313,22 @@ impl Project { } = &mut this.client_state { *is_shared = true; - Ok(*remote_id_rx.borrow()) + remote_id_rx + .borrow() + .ok_or_else(|| anyhow!("no project id")) } else { Err(anyhow!("can't share a remote project")) } })?; - let remote_id = remote_id.ok_or_else(|| anyhow!("no project id"))?; - rpc.send(proto::ShareProject { - project_id: remote_id, - }) - .await?; - + rpc.send(proto::ShareProject { project_id }).await?; this.update(&mut cx, |this, cx| { for worktree in &this.worktrees { worktree.update(cx, |worktree, cx| { worktree .as_local_mut() .unwrap() - .share(remote_id, cx) + .share(project_id, cx) .detach(); }); } @@ -312,6 +337,41 @@ impl Project { }) } + pub fn unshare(&self, cx: &mut ModelContext) -> Task> { + let rpc = self.client.clone(); + cx.spawn(|this, mut cx| async move { + let project_id = this.update(&mut cx, |this, _| { + if let ProjectClientState::Local { + is_shared, + remote_id_rx, + .. + } = &mut this.client_state + { + *is_shared = true; + remote_id_rx + .borrow() + .ok_or_else(|| anyhow!("no project id")) + } else { + Err(anyhow!("can't share a remote project")) + } + })?; + + rpc.send(proto::UnshareProject { project_id }).await?; + + Ok(()) + }) + } + + pub fn is_read_only(&self) -> bool { + match &self.client_state { + ProjectClientState::Local { .. } => false, + ProjectClientState::Remote { + sharing_has_stopped, + .. + } => *sharing_has_stopped, + } + } + pub fn open_buffer( &self, path: ProjectPath, @@ -333,14 +393,14 @@ impl Project { pub fn add_local_worktree( &mut self, - abs_path: &Path, + abs_path: impl AsRef, cx: &mut ModelContext, ) -> Task>> { let fs = self.fs.clone(); let client = self.client.clone(); let user_store = self.user_store.clone(); let languages = self.languages.clone(); - let path = Arc::from(abs_path); + let path = Arc::from(abs_path.as_ref()); cx.spawn(|project, mut cx| async move { let worktree = Worktree::open_local(client.clone(), user_store, path, fs, languages, &mut cx) @@ -352,10 +412,12 @@ impl Project { }); if let Some(project_id) = remote_project_id { + let worktree_id = worktree.id() as u64; let register_message = worktree.update(&mut cx, |worktree, _| { let worktree = worktree.as_local_mut().unwrap(); proto::RegisterWorktree { project_id, + worktree_id, root_name: worktree.root_name().to_string(), authorized_logins: worktree.authorized_logins(), } @@ -432,6 +494,25 @@ impl Project { // RPC message handlers + fn handle_unshare_project( + &mut self, + _: TypedEnvelope, + _: Arc, + cx: &mut ModelContext, + ) -> Result<()> { + if let ProjectClientState::Remote { + sharing_has_stopped, + .. + } = &mut self.client_state + { + *sharing_has_stopped = true; + cx.notify(); + Ok(()) + } else { + unreachable!() + } + } + fn handle_add_collaborator( &mut self, mut envelope: TypedEnvelope, @@ -812,6 +893,6 @@ mod tests { let client = client::Client::new(); let http_client = FakeHttpClient::new(|_| async move { Ok(ServerResponse::new(404)) }); let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx)); - cx.add_model(|cx| Project::local(languages, client, user_store, fs, cx)) + cx.update(|cx| Project::local(client, user_store, languages, fs, cx)) } } diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index c96e478c26abf0a09e91ec32257b44fceea67d6c..bf8a1c418a4b06d68121c819efabde4e9880e01e 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -617,18 +617,18 @@ mod tests { ) .await; - let project = cx.add_model(|cx| { + let project = cx.update(|cx| { Project::local( - params.languages.clone(), params.client.clone(), params.user_store.clone(), + params.languages.clone(), params.fs.clone(), cx, ) }); let root1 = project .update(&mut cx, |project, cx| { - project.add_local_worktree("/root1".as_ref(), cx) + project.add_local_worktree("/root1", cx) }) .await .unwrap(); @@ -637,7 +637,7 @@ mod tests { .await; let root2 = project .update(&mut cx, |project, cx| { - project.add_local_worktree("/root2".as_ref(), cx) + project.add_local_worktree("/root2", cx) }) .await .unwrap(); diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index b86a4c1e301e563b73fd67f40857d466c896b321..d8fa9bc8e5c6c02df499ab81c26aacfd5450942c 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -96,8 +96,9 @@ message LeaveProject { message RegisterWorktree { uint64 project_id = 1; - string root_name = 2; - repeated string authorized_logins = 3; + uint64 worktree_id = 2; + string root_name = 3; + repeated string authorized_logins = 4; } message UnregisterWorktree { diff --git a/crates/rpc/src/proto.rs b/crates/rpc/src/proto.rs index de338fe43fa88d05353764c7d6adf0031d76e034..5b328c02eced78fadf717f2e8297a3fb4b6ba826 100644 --- a/crates/rpc/src/proto.rs +++ b/crates/rpc/src/proto.rs @@ -186,6 +186,7 @@ entity_messages!( SaveBuffer, ShareWorktree, UnregisterWorktree, + UnshareProject, UpdateBuffer, UpdateWorktree, ); diff --git a/crates/server/src/db.rs b/crates/server/src/db.rs index ebc861be03d5ddaaa6a85165812a60a9f4494ac5..e3267bad0e7aa7ac1f725649d82e7d4d7ee72bf2 100644 --- a/crates/server/src/db.rs +++ b/crates/server/src/db.rs @@ -443,7 +443,9 @@ impl Db { macro_rules! id_type { ($name:ident) => { - #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, sqlx::Type, Serialize)] + #[derive( + Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, sqlx::Type, Serialize, + )] #[sqlx(transparent)] #[serde(transparent)] pub struct $name(pub i32); diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index faed6e5a39fb0a923d16c3c6f17bdc9f4d8d36b3..2e60f014367ce6315efa38d84cdaf31b2838ed33 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -43,6 +43,7 @@ pub struct Server { const MESSAGE_COUNT_PER_PAGE: usize = 100; const MAX_MESSAGE_LEN: usize = 1024; +const NO_SUCH_PROJECT: &'static str = "no such project"; impl Server { pub fn new( @@ -60,12 +61,15 @@ impl Server { server .add_handler(Server::ping) + .add_handler(Server::register_project) + .add_handler(Server::unregister_project) + .add_handler(Server::share_project) + .add_handler(Server::unshare_project) + .add_handler(Server::join_project) + .add_handler(Server::leave_project) .add_handler(Server::register_worktree) .add_handler(Server::unregister_worktree) .add_handler(Server::share_worktree) - .add_handler(Server::unshare_worktree) - .add_handler(Server::join_worktree) - .add_handler(Server::leave_worktree) .add_handler(Server::update_worktree) .add_handler(Server::open_buffer) .add_handler(Server::close_buffer) @@ -207,162 +211,85 @@ impl Server { Ok(()) } - async fn register_worktree( + async fn register_project( mut self: Arc, - request: TypedEnvelope, + request: TypedEnvelope, ) -> tide::Result<()> { - let receipt = request.receipt(); - let host_user_id = self.state().user_id_for_connection(request.sender_id)?; - - let mut contact_user_ids = HashSet::default(); - contact_user_ids.insert(host_user_id); - for github_login in request.payload.authorized_logins { - match self.app_state.db.create_user(&github_login, false).await { - Ok(contact_user_id) => { - contact_user_ids.insert(contact_user_id); - } - Err(err) => { - let message = err.to_string(); - self.peer - .respond_with_error(receipt, proto::Error { message }) - .await?; - return Ok(()); - } - } - } - - let contact_user_ids = contact_user_ids.into_iter().collect::>(); - let ok = self.state_mut().register_worktree( - request.project_id, - request.worktree_id, - Worktree { - authorized_user_ids: contact_user_ids.clone(), - root_name: request.payload.root_name, - }, - ); - - if ok { - self.peer.respond(receipt, proto::Ack {}).await?; - self.update_contacts_for_users(&contact_user_ids).await?; - } else { - self.peer - .respond_with_error( - receipt, - proto::Error { - message: "no such project".to_string(), - }, - ) - .await?; - } - + let mut state = self.state_mut(); + let user_id = state.user_id_for_connection(request.sender_id)?; + state.register_project(request.sender_id, user_id); Ok(()) } - async fn unregister_worktree( + async fn unregister_project( mut self: Arc, - request: TypedEnvelope, + request: TypedEnvelope, ) -> tide::Result<()> { - let project_id = request.payload.project_id; - let worktree_id = request.payload.worktree_id; - let worktree = - self.state_mut() - .unregister_worktree(project_id, worktree_id, request.sender_id)?; - - if let Some(share) = worktree.share { - broadcast( - request.sender_id, - share.guests.keys().copied().collect(), - |conn_id| { - self.peer.send( - conn_id, - proto::UnregisterWorktree { - project_id, - worktree_id, - }, - ) - }, - ) - .await?; - } - self.update_contacts_for_users(&worktree.authorized_user_ids) - .await?; + self.state_mut() + .unregister_project(request.payload.project_id, request.sender_id); Ok(()) } - async fn share_worktree( + async fn share_project( mut self: Arc, - mut request: TypedEnvelope, + request: TypedEnvelope, ) -> tide::Result<()> { - let worktree = request - .payload - .worktree - .as_mut() - .ok_or_else(|| anyhow!("missing worktree"))?; - let entries = mem::take(&mut worktree.entries) - .into_iter() - .map(|entry| (entry.id, entry)) - .collect(); - - let contact_user_ids = - self.state_mut() - .share_worktree(worktree.id, request.sender_id, entries); - if let Some(contact_user_ids) = contact_user_ids { - self.peer - .respond(request.receipt(), proto::ShareWorktreeResponse {}) - .await?; - self.update_contacts_for_users(&contact_user_ids).await?; - } else { - self.peer - .respond_with_error( - request.receipt(), - proto::Error { - message: "no such worktree".to_string(), - }, - ) - .await?; - } + self.state_mut() + .share_project(request.payload.project_id, request.sender_id); Ok(()) } - async fn unshare_worktree( + async fn unshare_project( mut self: Arc, - request: TypedEnvelope, + request: TypedEnvelope, ) -> tide::Result<()> { - let worktree_id = request.payload.worktree_id; - let worktree = self + let project_id = request.payload.project_id; + let project = self .state_mut() - .unshare_worktree(worktree_id, request.sender_id)?; + .unshare_project(project_id, request.sender_id)?; - broadcast(request.sender_id, worktree.connection_ids, |conn_id| { + broadcast(request.sender_id, project.connection_ids, |conn_id| { self.peer - .send(conn_id, proto::UnshareWorktree { worktree_id }) + .send(conn_id, proto::UnshareProject { project_id }) }) .await?; - self.update_contacts_for_users(&worktree.authorized_user_ids) + self.update_contacts_for_users(&project.authorized_user_ids) .await?; Ok(()) } - async fn join_worktree( + async fn join_project( mut self: Arc, - request: TypedEnvelope, + request: TypedEnvelope, ) -> tide::Result<()> { - let worktree_id = request.payload.worktree_id; + let project_id = request.payload.project_id; let user_id = self.state().user_id_for_connection(request.sender_id)?; let response_data = self .state_mut() - .join_worktree(request.sender_id, user_id, worktree_id) + .join_project(request.sender_id, user_id, project_id) .and_then(|joined| { - let share = joined.worktree.share()?; + let share = joined.project.share()?; let peer_count = share.guests.len(); let mut collaborators = Vec::with_capacity(peer_count); collaborators.push(proto::Collaborator { - peer_id: joined.worktree.host_connection_id.0, + peer_id: joined.project.host_connection_id.0, replica_id: 0, - user_id: joined.worktree.host_user_id.to_proto(), + user_id: joined.project.host_user_id.to_proto(), }); + let worktrees = joined + .project + .worktrees + .values() + .filter_map(|worktree| { + worktree.share.as_ref().map(|share| proto::Worktree { + id: project_id, + root_name: worktree.root_name.clone(), + entries: share.entries.values().cloned().collect(), + }) + }) + .collect(); for (peer_conn_id, (peer_replica_id, peer_user_id)) in &share.guests { if *peer_conn_id != request.sender_id { collaborators.push(proto::Collaborator { @@ -372,17 +299,13 @@ impl Server { }); } } - let response = proto::JoinWorktreeResponse { - worktree: Some(proto::Worktree { - id: worktree_id, - root_name: joined.worktree.root_name.clone(), - entries: share.entries.values().cloned().collect(), - }), + let response = proto::JoinProjectResponse { + worktrees, replica_id: joined.replica_id as u32, collaborators, }; - let connection_ids = joined.worktree.connection_ids(); - let contact_user_ids = joined.worktree.authorized_user_ids.clone(); + let connection_ids = joined.project.connection_ids(); + let contact_user_ids = joined.project.authorized_user_ids(); Ok((response, connection_ids, contact_user_ids)) }); @@ -391,8 +314,8 @@ impl Server { broadcast(request.sender_id, connection_ids, |conn_id| { self.peer.send( conn_id, - proto::AddCollaborator { - worktree_id, + proto::AddProjectCollaborator { + project_id: project_id, collaborator: Some(proto::Collaborator { peer_id: request.sender_id.0, replica_id: response.replica_id, @@ -420,19 +343,19 @@ impl Server { Ok(()) } - async fn leave_worktree( + async fn leave_project( mut self: Arc, - request: TypedEnvelope, + request: TypedEnvelope, ) -> tide::Result<()> { let sender_id = request.sender_id; - let worktree_id = request.payload.worktree_id; - let worktree = self.state_mut().leave_worktree(sender_id, worktree_id); + let project_id = request.payload.project_id; + let worktree = self.state_mut().leave_project(sender_id, project_id); if let Some(worktree) = worktree { broadcast(sender_id, worktree.connection_ids, |conn_id| { self.peer.send( conn_id, - proto::RemoveCollaborator { - worktree_id, + proto::RemoveProjectCollaborator { + project_id, peer_id: sender_id.0, }, ) @@ -444,16 +367,133 @@ impl Server { Ok(()) } + async fn register_worktree( + mut self: Arc, + request: TypedEnvelope, + ) -> tide::Result<()> { + let receipt = request.receipt(); + let host_user_id = self.state().user_id_for_connection(request.sender_id)?; + + let mut contact_user_ids = HashSet::default(); + contact_user_ids.insert(host_user_id); + for github_login in request.payload.authorized_logins { + match self.app_state.db.create_user(&github_login, false).await { + Ok(contact_user_id) => { + contact_user_ids.insert(contact_user_id); + } + Err(err) => { + let message = err.to_string(); + self.peer + .respond_with_error(receipt, proto::Error { message }) + .await?; + return Ok(()); + } + } + } + + let contact_user_ids = contact_user_ids.into_iter().collect::>(); + let ok = self.state_mut().register_worktree( + request.payload.project_id, + request.payload.worktree_id, + Worktree { + authorized_user_ids: contact_user_ids.clone(), + root_name: request.payload.root_name, + share: None, + }, + ); + + if ok { + self.peer.respond(receipt, proto::Ack {}).await?; + self.update_contacts_for_users(&contact_user_ids).await?; + } else { + self.peer + .respond_with_error( + receipt, + proto::Error { + message: NO_SUCH_PROJECT.to_string(), + }, + ) + .await?; + } + + Ok(()) + } + + async fn unregister_worktree( + mut self: Arc, + request: TypedEnvelope, + ) -> tide::Result<()> { + let project_id = request.payload.project_id; + let worktree_id = request.payload.worktree_id; + let (worktree, guest_connection_ids) = + self.state_mut() + .unregister_worktree(project_id, worktree_id, request.sender_id)?; + + broadcast(request.sender_id, guest_connection_ids, |conn_id| { + self.peer.send( + conn_id, + proto::UnregisterWorktree { + project_id, + worktree_id, + }, + ) + }) + .await?; + self.update_contacts_for_users(&worktree.authorized_user_ids) + .await?; + Ok(()) + } + + async fn share_worktree( + mut self: Arc, + mut request: TypedEnvelope, + ) -> tide::Result<()> { + let worktree = request + .payload + .worktree + .as_mut() + .ok_or_else(|| anyhow!("missing worktree"))?; + let entries = mem::take(&mut worktree.entries) + .into_iter() + .map(|entry| (entry.id, entry)) + .collect(); + + let contact_user_ids = self.state_mut().share_worktree( + request.payload.project_id, + worktree.id, + request.sender_id, + entries, + ); + if let Some(contact_user_ids) = contact_user_ids { + self.peer.respond(request.receipt(), proto::Ack {}).await?; + self.update_contacts_for_users(&contact_user_ids).await?; + } else { + self.peer + .respond_with_error( + request.receipt(), + proto::Error { + message: "no such worktree".to_string(), + }, + ) + .await?; + } + Ok(()) + } + async fn update_worktree( mut self: Arc, request: TypedEnvelope, ) -> tide::Result<()> { - let connection_ids = self.state_mut().update_worktree( - request.sender_id, - request.payload.worktree_id, - &request.payload.removed_entries, - &request.payload.updated_entries, - )?; + let connection_ids = self + .state_mut() + .update_worktree( + request.sender_id, + request.payload.project_id, + request.payload.worktree_id, + &request.payload.removed_entries, + &request.payload.updated_entries, + ) + .ok_or_else(|| anyhow!("no such worktree"))?; broadcast(request.sender_id, connection_ids, |connection_id| { self.peer @@ -471,7 +511,9 @@ impl Server { let receipt = request.receipt(); let host_connection_id = self .state() - .worktree_host_connection_id(request.sender_id, request.payload.worktree_id)?; + .read_project(request.payload.project_id, request.sender_id) + .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))? + .host_connection_id; let response = self .peer .forward_request(request.sender_id, host_connection_id, request.payload) @@ -486,7 +528,9 @@ impl Server { ) -> tide::Result<()> { let host_connection_id = self .state() - .worktree_host_connection_id(request.sender_id, request.payload.worktree_id)?; + .read_project(request.payload.project_id, request.sender_id) + .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))? + .host_connection_id; self.peer .forward_send(request.sender_id, host_connection_id, request.payload) .await?; @@ -501,10 +545,11 @@ impl Server { let guests; { let state = self.state(); - host = state - .worktree_host_connection_id(request.sender_id, request.payload.worktree_id)?; - guests = state - .worktree_guest_connection_ids(request.sender_id, request.payload.worktree_id)?; + let project = state + .read_project(request.payload.project_id, request.sender_id) + .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?; + host = project.host_connection_id; + guests = project.guest_connection_ids() } let sender = request.sender_id; @@ -536,7 +581,8 @@ impl Server { ) -> tide::Result<()> { let receiver_ids = self .state() - .worktree_connection_ids(request.sender_id, request.payload.worktree_id)?; + .project_connection_ids(request.payload.project_id, request.sender_id) + .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?; broadcast(request.sender_id, receiver_ids, |connection_id| { self.peer .forward_send(request.sender_id, connection_id, request.payload.clone()) @@ -552,7 +598,8 @@ impl Server { ) -> tide::Result<()> { let receiver_ids = self .state() - .worktree_connection_ids(request.sender_id, request.payload.worktree_id)?; + .project_connection_ids(request.payload.project_id, request.sender_id) + .ok_or_else(|| anyhow!(NO_SUCH_PROJECT))?; broadcast(request.sender_id, receiver_ids, |connection_id| { self.peer .forward_send(request.sender_id, connection_id, request.payload.clone()) @@ -959,7 +1006,6 @@ mod tests { self, test::FakeHttpClient, Channel, ChannelDetails, ChannelList, Client, Credentials, EstablishConnectionError, UserStore, }, - contacts_panel::JoinWorktree, editor::{Editor, EditorSettings, Input, MultiBuffer}, fs::{FakeFs, Fs as _}, language::{ @@ -967,25 +1013,22 @@ mod tests { LanguageRegistry, LanguageServerConfig, Point, }, lsp, - project::{ProjectPath, Worktree}, - test::test_app_state, - workspace::Workspace, + project::Project, }; #[gpui::test] - async fn test_share_worktree(mut cx_a: TestAppContext, mut cx_b: TestAppContext) { + async fn test_share_project(mut cx_a: TestAppContext, mut cx_b: TestAppContext) { let (window_b, _) = cx_b.add_window(|_| EmptyView); let lang_registry = Arc::new(LanguageRegistry::new()); + let fs = Arc::new(FakeFs::new()); + cx_a.foreground().forbid_parking(); // Connect to a server as 2 clients. let mut server = TestServer::start().await; let client_a = server.create_client(&mut cx_a, "user_a").await; let client_b = server.create_client(&mut cx_b, "user_b").await; - cx_a.foreground().forbid_parking(); - - // Share a local worktree as client A - let fs = Arc::new(FakeFs::new()); + // Share a project as client A fs.insert_tree( "/a", json!({ @@ -995,47 +1038,56 @@ mod tests { }), ) .await; - let worktree_a = Worktree::open_local( - client_a.clone(), - client_a.user_store.clone(), - "/a".as_ref(), - fs, - lang_registry.clone(), - &mut cx_a.to_async(), - ) - .await - .unwrap(); + let project_a = cx_a.update(|cx| { + Project::local( + client_a.clone(), + client_a.user_store.clone(), + lang_registry.clone(), + fs.clone(), + cx, + ) + }); + let worktree_a = project_a + .update(&mut cx_a, |p, cx| p.add_local_worktree("/a", cx)) + .await + .unwrap(); worktree_a .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) .await; - let worktree_id = worktree_a - .update(&mut cx_a, |tree, cx| tree.as_local_mut().unwrap().share(cx)) + let project_id = project_a + .update(&mut cx_a, |project, _| project.next_remote_id()) + .await; + project_a + .update(&mut cx_a, |project, cx| project.share(cx)) .await .unwrap(); - // Join that worktree as client B, and see that a guest has joined as client A. - let worktree_b = Worktree::open_remote( + // Join that project as client B, and see that a guest has joined as client A. + let project_b = Project::remote( + project_id, client_b.clone(), - worktree_id, - lang_registry.clone(), client_b.user_store.clone(), + lang_registry.clone(), + fs.clone(), &mut cx_b.to_async(), ) .await .unwrap(); + let worktree_b = project_b.update(&mut cx_b, |p, _| p.worktrees()[0].clone()); - let replica_id_b = worktree_b.read_with(&cx_b, |tree, _| { + let replica_id_b = project_b.read_with(&cx_b, |project, _| { assert_eq!( - tree.collaborators() + project + .collaborators() .get(&client_a.peer_id) .unwrap() .user .github_login, "user_a" ); - tree.replica_id() + project.replica_id() }); - worktree_a + project_a .condition(&cx_a, |tree, _| { tree.collaborators() .get(&client_b.peer_id) @@ -1093,30 +1145,24 @@ mod tests { // Dropping the worktree removes client B from client A's collaborators. cx_b.update(move |_| drop(worktree_b)); - worktree_a - .condition(&cx_a, |tree, _| tree.collaborators().is_empty()) + project_a + .condition(&cx_a, |project, _| project.collaborators().is_empty()) .await; } #[gpui::test] - async fn test_unshare_worktree(mut cx_a: TestAppContext, mut cx_b: TestAppContext) { + async fn test_unshare_project(mut cx_a: TestAppContext, mut cx_b: TestAppContext) { cx_b.update(zed::contacts_panel::init); - let mut app_state_a = cx_a.update(test_app_state); - let mut app_state_b = cx_b.update(test_app_state); + let lang_registry = Arc::new(LanguageRegistry::new()); + let fs = Arc::new(FakeFs::new()); // Connect to a server as 2 clients. let mut server = TestServer::start().await; let client_a = server.create_client(&mut cx_a, "user_a").await; let client_b = server.create_client(&mut cx_b, "user_b").await; - Arc::get_mut(&mut app_state_a).unwrap().client = client_a.clone(); - Arc::get_mut(&mut app_state_a).unwrap().user_store = client_a.user_store.clone(); - Arc::get_mut(&mut app_state_b).unwrap().client = client_b.clone(); - Arc::get_mut(&mut app_state_b).unwrap().user_store = client_b.user_store.clone(); - cx_a.foreground().forbid_parking(); - // Share a local worktree as client A - let fs = Arc::new(FakeFs::new()); + // Share a project as client A fs.insert_tree( "/a", json!({ @@ -1126,71 +1172,55 @@ mod tests { }), ) .await; - let worktree_a = Worktree::open_local( - app_state_a.client.clone(), - app_state_a.user_store.clone(), - "/a".as_ref(), - fs, - app_state_a.languages.clone(), - &mut cx_a.to_async(), - ) - .await - .unwrap(); + let project_a = cx_a.update(|cx| { + Project::local( + client_a.clone(), + client_a.user_store.clone(), + lang_registry.clone(), + fs.clone(), + cx, + ) + }); + let worktree_a = project_a + .update(&mut cx_a, |p, cx| p.add_local_worktree("/a", cx)) + .await + .unwrap(); worktree_a .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) .await; - - let remote_worktree_id = worktree_a - .update(&mut cx_a, |tree, cx| tree.as_local_mut().unwrap().share(cx)) + let project_id = project_a + .update(&mut cx_a, |project, _| project.next_remote_id()) + .await; + project_a + .update(&mut cx_a, |project, cx| project.share(cx)) .await .unwrap(); - let (window_b, workspace_b) = - cx_b.add_window(|cx| Workspace::new(&app_state_b.as_ref().into(), cx)); - cx_b.update(|cx| { - cx.dispatch_action( - window_b, - vec![workspace_b.id()], - &JoinWorktree(remote_worktree_id), - ); - }); - workspace_b - .condition(&cx_b, |workspace, cx| workspace.worktrees(cx).len() == 1) - .await; + // Join that project as client B + let project_b = Project::remote( + project_id, + client_b.clone(), + client_b.user_store.clone(), + lang_registry.clone(), + fs.clone(), + &mut cx_b.to_async(), + ) + .await + .unwrap(); - let local_worktree_id_b = workspace_b.read_with(&cx_b, |workspace, cx| { - let active_pane = workspace.active_pane().read(cx); - assert!(active_pane.active_item().is_none()); - workspace.worktrees(cx).first().unwrap().id() - }); - workspace_b - .update(&mut cx_b, |workspace, cx| { - workspace.open_entry( - ProjectPath { - worktree_id: local_worktree_id_b, - path: Path::new("a.txt").into(), - }, - cx, - ) - }) - .unwrap() + let worktree_b = project_b.read_with(&cx_b, |p, _| p.worktrees()[0].clone()); + worktree_b + .update(&mut cx_b, |tree, cx| tree.open_buffer("a.txt", cx)) .await .unwrap(); - workspace_b.read_with(&cx_b, |workspace, cx| { - let active_pane = workspace.active_pane().read(cx); - assert!(active_pane.active_item().is_some()); - }); - worktree_a.update(&mut cx_a, |tree, cx| { - tree.as_local_mut().unwrap().unshare(cx); - }); - workspace_b - .condition(&cx_b, |workspace, cx| workspace.worktrees(cx).len() == 0) + project_a + .update(&mut cx_a, |project, cx| project.unshare(cx)) + .await + .unwrap(); + project_b + .condition(&mut cx_b, |project, _| project.is_read_only()) .await; - workspace_b.read_with(&cx_b, |workspace, cx| { - let active_pane = workspace.active_pane().read(cx); - assert!(active_pane.active_item().is_none()); - }); } #[gpui::test] @@ -1201,6 +1231,7 @@ mod tests { ) { cx_a.foreground().forbid_parking(); let lang_registry = Arc::new(LanguageRegistry::new()); + let fs = Arc::new(FakeFs::new()); // Connect to a server as 3 clients. let mut server = TestServer::start().await; @@ -1208,8 +1239,6 @@ mod tests { let client_b = server.create_client(&mut cx_b, "user_b").await; let client_c = server.create_client(&mut cx_c, "user_c").await; - let fs = Arc::new(FakeFs::new()); - // Share a worktree as client A. fs.insert_tree( "/a", @@ -1220,46 +1249,55 @@ mod tests { }), ) .await; - - let worktree_a = Worktree::open_local( - client_a.clone(), - client_a.user_store.clone(), - "/a".as_ref(), - fs.clone(), - lang_registry.clone(), - &mut cx_a.to_async(), - ) - .await - .unwrap(); + let project_a = cx_a.update(|cx| { + Project::local( + client_a.clone(), + client_a.user_store.clone(), + lang_registry.clone(), + fs.clone(), + cx, + ) + }); + let worktree_a = project_a + .update(&mut cx_a, |p, cx| p.add_local_worktree("/a", cx)) + .await + .unwrap(); worktree_a .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) .await; - let worktree_id = worktree_a - .update(&mut cx_a, |tree, cx| tree.as_local_mut().unwrap().share(cx)) + let project_id = project_a + .update(&mut cx_a, |project, _| project.next_remote_id()) + .await; + project_a + .update(&mut cx_a, |project, cx| project.share(cx)) .await .unwrap(); // Join that worktree as clients B and C. - let worktree_b = Worktree::open_remote( + let project_b = Project::remote( + project_id, client_b.clone(), - worktree_id, - lang_registry.clone(), client_b.user_store.clone(), + lang_registry.clone(), + fs.clone(), &mut cx_b.to_async(), ) .await .unwrap(); - let worktree_c = Worktree::open_remote( + let project_c = Project::remote( + project_id, client_c.clone(), - worktree_id, - lang_registry.clone(), client_c.user_store.clone(), + lang_registry.clone(), + fs.clone(), &mut cx_c.to_async(), ) .await .unwrap(); // Open and edit a buffer as both guests B and C. + let worktree_b = project_b.read_with(&cx_b, |p, _| p.worktrees()[0].clone()); + let worktree_c = project_c.read_with(&cx_c, |p, _| p.worktrees()[0].clone()); let buffer_b = worktree_b .update(&mut cx_b, |tree, cx| tree.open_buffer("file1", cx)) .await @@ -1343,14 +1381,14 @@ mod tests { async fn test_buffer_conflict_after_save(mut cx_a: TestAppContext, mut cx_b: TestAppContext) { cx_a.foreground().forbid_parking(); let lang_registry = Arc::new(LanguageRegistry::new()); + let fs = Arc::new(FakeFs::new()); // Connect to a server as 2 clients. let mut server = TestServer::start().await; let client_a = server.create_client(&mut cx_a, "user_a").await; let client_b = server.create_client(&mut cx_b, "user_b").await; - // Share a local worktree as client A - let fs = Arc::new(FakeFs::new()); + // Share a project as client A fs.insert_tree( "/dir", json!({ @@ -1360,35 +1398,44 @@ mod tests { ) .await; - let worktree_a = Worktree::open_local( - client_a.clone(), - client_a.user_store.clone(), - "/dir".as_ref(), - fs, - lang_registry.clone(), - &mut cx_a.to_async(), - ) - .await - .unwrap(); + let project_a = cx_a.update(|cx| { + Project::local( + client_a.clone(), + client_a.user_store.clone(), + lang_registry.clone(), + fs.clone(), + cx, + ) + }); + let worktree_a = project_a + .update(&mut cx_a, |p, cx| p.add_local_worktree("/dir", cx)) + .await + .unwrap(); worktree_a .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) .await; - let worktree_id = worktree_a - .update(&mut cx_a, |tree, cx| tree.as_local_mut().unwrap().share(cx)) + let project_id = project_a + .update(&mut cx_a, |project, _| project.next_remote_id()) + .await; + project_a + .update(&mut cx_a, |project, cx| project.share(cx)) .await .unwrap(); - // Join that worktree as client B, and see that a guest has joined as client A. - let worktree_b = Worktree::open_remote( + // Join that project as client B + let project_b = Project::remote( + project_id, client_b.clone(), - worktree_id, - lang_registry.clone(), client_b.user_store.clone(), + lang_registry.clone(), + fs.clone(), &mut cx_b.to_async(), ) .await .unwrap(); + let worktree_b = project_b.update(&mut cx_b, |p, _| p.worktrees()[0].clone()); + // Open a buffer as client B let buffer_b = worktree_b .update(&mut cx_b, |worktree, cx| worktree.open_buffer("a.txt", cx)) .await @@ -1430,14 +1477,14 @@ mod tests { ) { cx_a.foreground().forbid_parking(); let lang_registry = Arc::new(LanguageRegistry::new()); + let fs = Arc::new(FakeFs::new()); // Connect to a server as 2 clients. let mut server = TestServer::start().await; let client_a = server.create_client(&mut cx_a, "user_a").await; let client_b = server.create_client(&mut cx_b, "user_b").await; - // Share a local worktree as client A - let fs = Arc::new(FakeFs::new()); + // Share a project as client A fs.insert_tree( "/dir", json!({ @@ -1446,44 +1493,56 @@ mod tests { }), ) .await; - let worktree_a = Worktree::open_local( - client_a.clone(), - client_a.user_store.clone(), - "/dir".as_ref(), - fs, - lang_registry.clone(), - &mut cx_a.to_async(), - ) - .await - .unwrap(); + let project_a = cx_a.update(|cx| { + Project::local( + client_a.clone(), + client_a.user_store.clone(), + lang_registry.clone(), + fs.clone(), + cx, + ) + }); + let worktree_a = project_a + .update(&mut cx_a, |p, cx| p.add_local_worktree("/dir", cx)) + .await + .unwrap(); worktree_a .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) .await; - let worktree_id = worktree_a - .update(&mut cx_a, |tree, cx| tree.as_local_mut().unwrap().share(cx)) + let project_id = project_a + .update(&mut cx_a, |project, _| project.next_remote_id()) + .await; + project_a + .update(&mut cx_a, |project, cx| project.share(cx)) .await .unwrap(); - // Join that worktree as client B, and see that a guest has joined as client A. - let worktree_b = Worktree::open_remote( + // Join that project as client B + let project_b = Project::remote( + project_id, client_b.clone(), - worktree_id, - lang_registry.clone(), client_b.user_store.clone(), + lang_registry.clone(), + fs.clone(), &mut cx_b.to_async(), ) .await .unwrap(); + let worktree_b = project_b.update(&mut cx_b, |p, _| p.worktrees()[0].clone()); + // Open a buffer as client A let buffer_a = worktree_a .update(&mut cx_a, |tree, cx| tree.open_buffer("a.txt", cx)) .await .unwrap(); + + // Start opening the same buffer as client B let buffer_b = cx_b .background() .spawn(worktree_b.update(&mut cx_b, |worktree, cx| worktree.open_buffer("a.txt", cx))); - task::yield_now().await; + + // Edit the buffer as client A while client B is still opening it. buffer_a.update(&mut cx_a, |buf, cx| buf.edit([0..0], "z", cx)); let text = buffer_a.read_with(&cx_a, |buf, _| buf.text()); @@ -1498,14 +1557,14 @@ mod tests { ) { cx_a.foreground().forbid_parking(); let lang_registry = Arc::new(LanguageRegistry::new()); + let fs = Arc::new(FakeFs::new()); // Connect to a server as 2 clients. let mut server = TestServer::start().await; let client_a = server.create_client(&mut cx_a, "user_a").await; let client_b = server.create_client(&mut cx_b, "user_b").await; - // Share a local worktree as client A - let fs = Arc::new(FakeFs::new()); + // Share a project as client A fs.insert_tree( "/dir", json!({ @@ -1514,45 +1573,58 @@ mod tests { }), ) .await; - let worktree_a = Worktree::open_local( - client_a.clone(), - client_a.user_store.clone(), - "/dir".as_ref(), - fs, - lang_registry.clone(), - &mut cx_a.to_async(), - ) - .await - .unwrap(); + let project_a = cx_a.update(|cx| { + Project::local( + client_a.clone(), + client_a.user_store.clone(), + lang_registry.clone(), + fs.clone(), + cx, + ) + }); + let worktree_a = project_a + .update(&mut cx_a, |p, cx| p.add_local_worktree("/dir", cx)) + .await + .unwrap(); worktree_a .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) .await; - let worktree_id = worktree_a - .update(&mut cx_a, |tree, cx| tree.as_local_mut().unwrap().share(cx)) + let project_id = project_a + .update(&mut cx_a, |project, _| project.next_remote_id()) + .await; + project_a + .update(&mut cx_a, |project, cx| project.share(cx)) .await .unwrap(); - // Join that worktree as client B, and see that a guest has joined as client A. - let worktree_b = Worktree::open_remote( + // Join that project as client B + let project_b = Project::remote( + project_id, client_b.clone(), - worktree_id, - lang_registry.clone(), client_b.user_store.clone(), + lang_registry.clone(), + fs.clone(), &mut cx_b.to_async(), ) .await .unwrap(); - worktree_a - .condition(&cx_a, |tree, _| tree.collaborators().len() == 1) + let worktree_b = project_b.update(&mut cx_b, |p, _| p.worktrees()[0].clone()); + + // See that a guest has joined as client A. + project_a + .condition(&cx_a, |p, _| p.collaborators().len() == 1) .await; + // Begin opening a buffer as client B, but leave the project before the open completes. let buffer_b = cx_b .background() .spawn(worktree_b.update(&mut cx_b, |worktree, cx| worktree.open_buffer("a.txt", cx))); cx_b.update(|_| drop(worktree_b)); drop(buffer_b); - worktree_a - .condition(&cx_a, |tree, _| tree.collaborators().len() == 0) + + // See that the guest has left. + project_a + .condition(&cx_a, |p, _| p.collaborators().len() == 0) .await; } @@ -1560,14 +1632,14 @@ mod tests { async fn test_peer_disconnection(mut cx_a: TestAppContext, mut cx_b: TestAppContext) { cx_a.foreground().forbid_parking(); let lang_registry = Arc::new(LanguageRegistry::new()); + let fs = Arc::new(FakeFs::new()); // Connect to a server as 2 clients. let mut server = TestServer::start().await; let client_a = server.create_client(&mut cx_a, "user_a").await; let client_b = server.create_client(&mut cx_b, "user_b").await; - // Share a local worktree as client A - let fs = Arc::new(FakeFs::new()); + // Share a project as client A fs.insert_tree( "/a", json!({ @@ -1577,42 +1649,51 @@ mod tests { }), ) .await; - let worktree_a = Worktree::open_local( - client_a.clone(), - client_a.user_store.clone(), - "/a".as_ref(), - fs, - lang_registry.clone(), - &mut cx_a.to_async(), - ) - .await - .unwrap(); + let project_a = cx_a.update(|cx| { + Project::local( + client_a.clone(), + client_a.user_store.clone(), + lang_registry.clone(), + fs.clone(), + cx, + ) + }); + let worktree_a = project_a + .update(&mut cx_a, |p, cx| p.add_local_worktree("/a", cx)) + .await + .unwrap(); worktree_a .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) .await; - let worktree_id = worktree_a - .update(&mut cx_a, |tree, cx| tree.as_local_mut().unwrap().share(cx)) + let project_id = project_a + .update(&mut cx_a, |project, _| project.next_remote_id()) + .await; + project_a + .update(&mut cx_a, |project, cx| project.share(cx)) .await .unwrap(); - // Join that worktree as client B, and see that a guest has joined as client A. - let _worktree_b = Worktree::open_remote( + // Join that project as client B + let _project_b = Project::remote( + project_id, client_b.clone(), - worktree_id, - lang_registry.clone(), client_b.user_store.clone(), + lang_registry.clone(), + fs.clone(), &mut cx_b.to_async(), ) .await .unwrap(); - worktree_a - .condition(&cx_a, |tree, _| tree.collaborators().len() == 1) + + // See that a guest has joined as client A. + project_a + .condition(&cx_a, |p, _| p.collaborators().len() == 1) .await; // Drop client B's connection and ensure client A observes client B leaving the worktree. client_b.disconnect(&cx_b.to_async()).await.unwrap(); - worktree_a - .condition(&cx_a, |tree, _| tree.collaborators().len() == 0) + project_a + .condition(&cx_a, |p, _| p.collaborators().len() == 0) .await; } @@ -1622,28 +1703,30 @@ mod tests { mut cx_b: TestAppContext, ) { cx_a.foreground().forbid_parking(); + let mut lang_registry = Arc::new(LanguageRegistry::new()); + let fs = Arc::new(FakeFs::new()); + + // Set up a fake language server. let (language_server_config, mut fake_language_server) = LanguageServerConfig::fake(cx_a.background()).await; - let mut lang_registry = LanguageRegistry::new(); - lang_registry.add(Arc::new(Language::new( - LanguageConfig { - name: "Rust".to_string(), - path_suffixes: vec!["rs".to_string()], - language_server: Some(language_server_config), - ..Default::default() - }, - Some(tree_sitter_rust::language()), - ))); - - let lang_registry = Arc::new(lang_registry); + Arc::get_mut(&mut lang_registry) + .unwrap() + .add(Arc::new(Language::new( + LanguageConfig { + name: "Rust".to_string(), + path_suffixes: vec!["rs".to_string()], + language_server: Some(language_server_config), + ..Default::default() + }, + Some(tree_sitter_rust::language()), + ))); // Connect to a server as 2 clients. let mut server = TestServer::start().await; let client_a = server.create_client(&mut cx_a, "user_a").await; let client_b = server.create_client(&mut cx_b, "user_b").await; - // Share a local worktree as client A - let fs = Arc::new(FakeFs::new()); + // Share a project as client A fs.insert_tree( "/a", json!({ @@ -1653,25 +1736,31 @@ mod tests { }), ) .await; - let worktree_a = Worktree::open_local( - client_a.clone(), - client_a.user_store.clone(), - "/a".as_ref(), - fs, - lang_registry.clone(), - &mut cx_a.to_async(), - ) - .await - .unwrap(); + let project_a = cx_a.update(|cx| { + Project::local( + client_a.clone(), + client_a.user_store.clone(), + lang_registry.clone(), + fs.clone(), + cx, + ) + }); + let worktree_a = project_a + .update(&mut cx_a, |p, cx| p.add_local_worktree("/a", cx)) + .await + .unwrap(); worktree_a .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) .await; - let worktree_id = worktree_a - .update(&mut cx_a, |tree, cx| tree.as_local_mut().unwrap().share(cx)) + let project_id = project_a + .update(&mut cx_a, |project, _| project.next_remote_id()) + .await; + project_a + .update(&mut cx_a, |project, cx| project.share(cx)) .await .unwrap(); - // Cause language server to start. + // Cause the language server to start. let _ = cx_a .background() .spawn(worktree_a.update(&mut cx_a, |worktree, cx| { @@ -1706,15 +1795,17 @@ mod tests { .await; // Join the worktree as client B. - let worktree_b = Worktree::open_remote( + let project_b = Project::remote( + project_id, client_b.clone(), - worktree_id, - lang_registry.clone(), client_b.user_store.clone(), + lang_registry.clone(), + fs.clone(), &mut cx_b.to_async(), ) .await .unwrap(); + let worktree_b = project_b.update(&mut cx_b, |p, _| p.worktrees()[0].clone()); // Open the file with the errors. let buffer_b = cx_b @@ -2175,6 +2266,7 @@ mod tests { ) { cx_a.foreground().forbid_parking(); let lang_registry = Arc::new(LanguageRegistry::new()); + let fs = Arc::new(FakeFs::new()); // Connect to a server as 3 clients. let mut server = TestServer::start().await; @@ -2182,8 +2274,6 @@ mod tests { let client_b = server.create_client(&mut cx_b, "user_b").await; let client_c = server.create_client(&mut cx_c, "user_c").await; - let fs = Arc::new(FakeFs::new()); - // Share a worktree as client A. fs.insert_tree( "/a", @@ -2193,16 +2283,22 @@ mod tests { ) .await; - let worktree_a = Worktree::open_local( - client_a.clone(), - client_a.user_store.clone(), - "/a".as_ref(), - fs.clone(), - lang_registry.clone(), - &mut cx_a.to_async(), - ) - .await - .unwrap(); + let project_a = cx_a.update(|cx| { + Project::local( + client_a.clone(), + client_a.user_store.clone(), + lang_registry.clone(), + fs.clone(), + cx, + ) + }); + let worktree_a = project_a + .update(&mut cx_a, |p, cx| p.add_local_worktree("/a", cx)) + .await + .unwrap(); + worktree_a + .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) + .await; client_a .user_store @@ -2223,16 +2319,20 @@ mod tests { }) .await; - let worktree_id = worktree_a - .update(&mut cx_a, |tree, cx| tree.as_local_mut().unwrap().share(cx)) + let project_id = project_a + .update(&mut cx_a, |project, _| project.next_remote_id()) + .await; + project_a + .update(&mut cx_a, |project, cx| project.share(cx)) .await .unwrap(); - let _worktree_b = Worktree::open_remote( + let _project_b = Project::remote( + project_id, client_b.clone(), - worktree_id, - lang_registry.clone(), client_b.user_store.clone(), + lang_registry.clone(), + fs.clone(), &mut cx_b.to_async(), ) .await @@ -2257,13 +2357,13 @@ mod tests { }) .await; - worktree_a - .condition(&cx_a, |worktree, _| { - worktree.collaborators().contains_key(&client_b.peer_id) + project_a + .condition(&cx_a, |project, _| { + project.collaborators().contains_key(&client_b.peer_id) }) .await; - cx_a.update(move |_| drop(worktree_a)); + cx_a.update(move |_| drop(project_a)); client_a .user_store .condition(&cx_a, |user_store, _| contacts(user_store) == vec![]) @@ -2283,12 +2383,12 @@ mod tests { .iter() .map(|contact| { let worktrees = contact - .worktrees + .projects .iter() - .map(|w| { + .map(|p| { ( - w.root_name.as_str(), - w.guests.iter().map(|p| p.github_login.as_str()).collect(), + p.worktree_root_names[0].as_str(), + p.guests.iter().map(|p| p.github_login.as_str()).collect(), ) }) .collect(); diff --git a/crates/server/src/rpc/store.rs b/crates/server/src/rpc/store.rs index 5b0b6d9554fb50000af4f6f5d2dad03fd510c23c..4ed6454afd1b50ff50b1f030abe4ef6e38343b9f 100644 --- a/crates/server/src/rpc/store.rs +++ b/crates/server/src/rpc/store.rs @@ -11,7 +11,7 @@ pub struct Store { projects: HashMap, visible_projects_by_user_id: HashMap>, channels: HashMap, - next_worktree_id: u64, + next_project_id: u64, } struct ConnectionState { @@ -24,19 +24,19 @@ pub struct Project { pub host_connection_id: ConnectionId, pub host_user_id: UserId, pub share: Option, - worktrees: HashMap, + pub worktrees: HashMap, } pub struct Worktree { pub authorized_user_ids: Vec, pub root_name: String, + pub share: Option, } #[derive(Default)] pub struct ProjectShare { pub guests: HashMap, pub active_replica_ids: HashSet, - pub worktrees: HashMap, } pub struct WorktreeShare { @@ -57,9 +57,9 @@ pub struct RemovedConnectionState { pub contact_ids: HashSet, } -pub struct JoinedWorktree<'a> { +pub struct JoinedProject<'a> { pub replica_id: ReplicaId, - pub worktree: &'a Worktree, + pub project: &'a Project, } pub struct UnsharedWorktree { @@ -67,7 +67,7 @@ pub struct UnsharedWorktree { pub authorized_user_ids: Vec, } -pub struct LeftWorktree { +pub struct LeftProject { pub connection_ids: Vec, pub authorized_user_ids: Vec, } @@ -114,17 +114,17 @@ impl Store { } let mut result = RemovedConnectionState::default(); - for worktree_id in connection.worktrees.clone() { - if let Ok(worktree) = self.unregister_worktree(worktree_id, connection_id) { - result - .contact_ids - .extend(worktree.authorized_user_ids.iter().copied()); - result.hosted_worktrees.insert(worktree_id, worktree); - } else if let Some(worktree) = self.leave_worktree(connection_id, worktree_id) { + for project_id in connection.projects.clone() { + if let Some((project, authorized_user_ids)) = + self.unregister_project(project_id, connection_id) + { + result.contact_ids.extend(authorized_user_ids); + result.hosted_projects.insert(project_id, project); + } else if let Some(project) = self.leave_project(connection_id, project_id) { result - .guest_worktree_ids - .insert(worktree_id, worktree.connection_ids); - result.contact_ids.extend(worktree.authorized_user_ids); + .guest_project_ids + .insert(project_id, project.connection_ids); + result.contact_ids.extend(project.authorized_user_ids); } } @@ -191,7 +191,7 @@ impl Store { let project = &self.projects[project_id]; let mut guests = HashSet::default(); - if let Ok(share) = worktree.share() { + if let Ok(share) = project.share() { for guest_connection_id in share.guests.keys() { if let Ok(user_id) = self.user_id_for_connection(*guest_connection_id) { guests.insert(user_id.to_proto()); @@ -200,6 +200,12 @@ impl Store { } if let Ok(host_user_id) = self.user_id_for_connection(project.host_connection_id) { + let mut worktree_root_names = project + .worktrees + .values() + .map(|worktree| worktree.root_name.clone()) + .collect::>(); + worktree_root_names.sort_unstable(); contacts .entry(host_user_id) .or_insert_with(|| proto::Contact { @@ -209,11 +215,7 @@ impl Store { .projects .push(proto::ProjectMetadata { id: *project_id, - worktree_root_names: project - .worktrees - .iter() - .map(|worktree| worktree.root_name.clone()) - .collect(), + worktree_root_names, is_shared: project.share.is_some(), guests: guests.into_iter().collect(), }); @@ -268,7 +270,20 @@ impl Store { } } - pub fn unregister_project(&mut self, project_id: u64) { + pub fn unregister_project( + &mut self, + project_id: u64, + connection_id: ConnectionId, + ) -> Option<(Project, Vec)> { + match self.projects.entry(project_id) { + hash_map::Entry::Occupied(e) => { + if e.get().host_connection_id != connection_id { + return None; + } + } + hash_map::Entry::Vacant(_) => return None, + } + todo!() } @@ -277,7 +292,7 @@ impl Store { project_id: u64, worktree_id: u64, acting_connection_id: ConnectionId, - ) -> tide::Result { + ) -> tide::Result<(Worktree, Vec)> { let project = self .projects .get_mut(&project_id) @@ -291,31 +306,25 @@ impl Store { .remove(&worktree_id) .ok_or_else(|| anyhow!("no such worktree"))?; - if let Some(connection) = self.connections.get_mut(&project.host_connection_id) { - connection.worktrees.remove(&worktree_id); - } - - if let Some(share) = &worktree.share { - for connection_id in share.guests.keys() { - if let Some(connection) = self.connections.get_mut(connection_id) { - connection.worktrees.remove(&worktree_id); - } - } + let mut guest_connection_ids = Vec::new(); + if let Some(share) = &project.share { + guest_connection_ids.extend(share.guests.keys()); } for authorized_user_id in &worktree.authorized_user_ids { - if let Some(visible_worktrees) = self - .visible_worktrees_by_user_id - .get_mut(&authorized_user_id) + if let Some(visible_projects) = + self.visible_projects_by_user_id.get_mut(authorized_user_id) { - visible_worktrees.remove(&worktree_id); + if !project.has_authorized_user_id(*authorized_user_id) { + visible_projects.remove(&project_id); + } } } #[cfg(test)] self.check_invariants(); - Ok(worktree) + Ok((worktree, guest_connection_ids)) } pub fn share_project(&mut self, project_id: u64, connection_id: ConnectionId) -> bool { @@ -328,47 +337,27 @@ impl Store { false } - pub fn share_worktree( + pub fn unshare_project( &mut self, project_id: u64, - worktree_id: u64, - connection_id: ConnectionId, - entries: HashMap, - ) -> Option> { - if let Some(project) = self.projects.get_mut(&project_id) { - if project.host_connection_id == connection_id { - if let Some(share) = project.share.as_mut() { - share - .worktrees - .insert(worktree_id, WorktreeShare { entries }); - return Some(project.authorized_user_ids()); - } - } - } - None - } - - pub fn unshare_worktree( - &mut self, - worktree_id: u64, acting_connection_id: ConnectionId, ) -> tide::Result { - let worktree = if let Some(worktree) = self.worktrees.get_mut(&worktree_id) { - worktree + let project = if let Some(project) = self.projects.get_mut(&project_id) { + project } else { - return Err(anyhow!("no such worktree"))?; + return Err(anyhow!("no such project"))?; }; - if worktree.host_connection_id != acting_connection_id { - return Err(anyhow!("not your worktree"))?; + if project.host_connection_id != acting_connection_id { + return Err(anyhow!("not your project"))?; } - let connection_ids = worktree.connection_ids(); - let authorized_user_ids = worktree.authorized_user_ids.clone(); - if let Some(share) = worktree.share.take() { + let connection_ids = project.connection_ids(); + let authorized_user_ids = project.authorized_user_ids(); + if let Some(share) = project.share.take() { for connection_id in share.guests.into_keys() { if let Some(connection) = self.connections.get_mut(&connection_id) { - connection.worktrees.remove(&worktree_id); + connection.projects.remove(&project_id); } } @@ -380,34 +369,51 @@ impl Store { authorized_user_ids, }) } else { - Err(anyhow!("worktree is not shared"))? + Err(anyhow!("project is not shared"))? + } + } + + pub fn share_worktree( + &mut self, + project_id: u64, + worktree_id: u64, + connection_id: ConnectionId, + entries: HashMap, + ) -> Option> { + let project = self.projects.get_mut(&project_id)?; + let worktree = project.worktrees.get_mut(&worktree_id)?; + if project.host_connection_id == connection_id && project.share.is_some() { + worktree.share = Some(WorktreeShare { entries }); + Some(project.authorized_user_ids()) + } else { + None } } - pub fn join_worktree( + pub fn join_project( &mut self, connection_id: ConnectionId, user_id: UserId, - worktree_id: u64, - ) -> tide::Result { + project_id: u64, + ) -> tide::Result { let connection = self .connections .get_mut(&connection_id) .ok_or_else(|| anyhow!("no such connection"))?; - let worktree = self - .worktrees - .get_mut(&worktree_id) - .and_then(|worktree| { - if worktree.authorized_user_ids.contains(&user_id) { - Some(worktree) + let project = self + .projects + .get_mut(&project_id) + .and_then(|project| { + if project.has_authorized_user_id(user_id) { + Some(project) } else { None } }) - .ok_or_else(|| anyhow!("no such worktree"))?; + .ok_or_else(|| anyhow!("no such project"))?; - let share = worktree.share_mut()?; - connection.worktrees.insert(worktree_id); + let share = project.share_mut()?; + connection.projects.insert(project_id); let mut replica_id = 1; while share.active_replica_ids.contains(&replica_id) { @@ -419,33 +425,33 @@ impl Store { #[cfg(test)] self.check_invariants(); - Ok(JoinedWorktree { + Ok(JoinedProject { replica_id, - worktree: &self.worktrees[&worktree_id], + project: &self.projects[&project_id], }) } - pub fn leave_worktree( + pub fn leave_project( &mut self, connection_id: ConnectionId, - worktree_id: u64, - ) -> Option { - let worktree = self.worktrees.get_mut(&worktree_id)?; - let share = worktree.share.as_mut()?; + project_id: u64, + ) -> Option { + let project = self.projects.get_mut(&project_id)?; + let share = project.share.as_mut()?; let (replica_id, _) = share.guests.remove(&connection_id)?; share.active_replica_ids.remove(&replica_id); if let Some(connection) = self.connections.get_mut(&connection_id) { - connection.worktrees.remove(&worktree_id); + connection.projects.remove(&project_id); } - let connection_ids = worktree.connection_ids(); - let authorized_user_ids = worktree.authorized_user_ids.clone(); + let connection_ids = project.connection_ids(); + let authorized_user_ids = project.authorized_user_ids(); #[cfg(test)] self.check_invariants(); - Some(LeftWorktree { + Some(LeftProject { connection_ids, authorized_user_ids, }) @@ -454,115 +460,75 @@ impl Store { pub fn update_worktree( &mut self, connection_id: ConnectionId, + project_id: u64, worktree_id: u64, removed_entries: &[u64], updated_entries: &[proto::Entry], - ) -> tide::Result> { - let worktree = self.write_worktree(worktree_id, connection_id)?; - let share = worktree.share_mut()?; + ) -> Option> { + let project = self.write_project(project_id, connection_id)?; + let share = project.worktrees.get_mut(&worktree_id)?.share.as_mut()?; for entry_id in removed_entries { share.entries.remove(&entry_id); } for entry in updated_entries { share.entries.insert(entry.id, entry.clone()); } - Ok(worktree.connection_ids()) + Some(project.connection_ids()) } - pub fn worktree_host_connection_id( + pub fn project_connection_ids( &self, - connection_id: ConnectionId, - worktree_id: u64, - ) -> tide::Result { - Ok(self - .read_worktree(worktree_id, connection_id)? - .host_connection_id) - } - - pub fn worktree_guest_connection_ids( - &self, - connection_id: ConnectionId, - worktree_id: u64, - ) -> tide::Result> { - Ok(self - .read_worktree(worktree_id, connection_id)? - .share()? - .guests - .keys() - .copied() - .collect()) - } - - pub fn worktree_connection_ids( - &self, - connection_id: ConnectionId, - worktree_id: u64, - ) -> tide::Result> { - Ok(self - .read_worktree(worktree_id, connection_id)? - .connection_ids()) + project_id: u64, + acting_connection_id: ConnectionId, + ) -> Option> { + Some( + self.read_project(project_id, acting_connection_id)? + .connection_ids(), + ) } pub fn channel_connection_ids(&self, channel_id: ChannelId) -> Option> { Some(self.channels.get(&channel_id)?.connection_ids()) } - fn read_worktree( - &self, - worktree_id: u64, - connection_id: ConnectionId, - ) -> tide::Result<&Worktree> { - let worktree = self - .worktrees - .get(&worktree_id) - .ok_or_else(|| anyhow!("worktree not found"))?; - - if worktree.host_connection_id == connection_id - || worktree.share()?.guests.contains_key(&connection_id) + pub fn read_project(&self, project_id: u64, connection_id: ConnectionId) -> Option<&Project> { + let project = self.projects.get(&project_id)?; + if project.host_connection_id == connection_id + || project.share.as_ref()?.guests.contains_key(&connection_id) { - Ok(worktree) + Some(project) } else { - Err(anyhow!( - "{} is not a member of worktree {}", - connection_id, - worktree_id - ))? + None } } - fn write_worktree( + fn write_project( &mut self, - worktree_id: u64, + project_id: u64, connection_id: ConnectionId, - ) -> tide::Result<&mut Worktree> { - let worktree = self - .worktrees - .get_mut(&worktree_id) - .ok_or_else(|| anyhow!("worktree not found"))?; - - if worktree.host_connection_id == connection_id - || worktree - .share - .as_ref() - .map_or(false, |share| share.guests.contains_key(&connection_id)) + ) -> Option<&mut Project> { + let project = self.projects.get_mut(&project_id)?; + if project.host_connection_id == connection_id + || project.share.as_ref()?.guests.contains_key(&connection_id) { - Ok(worktree) + Some(project) } else { - Err(anyhow!( - "{} is not a member of worktree {}", - connection_id, - worktree_id - ))? + None } } #[cfg(test)] fn check_invariants(&self) { for (connection_id, connection) in &self.connections { - for worktree_id in &connection.worktrees { - let worktree = &self.worktrees.get(&worktree_id).unwrap(); - if worktree.host_connection_id != *connection_id { - assert!(worktree.share().unwrap().guests.contains_key(connection_id)); + for project_id in &connection.projects { + let project = &self.projects.get(&project_id).unwrap(); + if project.host_connection_id != *connection_id { + assert!(project + .share + .as_ref() + .unwrap() + .guests + .contains_key(connection_id)); } } for channel_id in &connection.channels { @@ -585,22 +551,22 @@ impl Store { } } - for (worktree_id, worktree) in &self.worktrees { - let host_connection = self.connections.get(&worktree.host_connection_id).unwrap(); - assert!(host_connection.worktrees.contains(worktree_id)); + for (project_id, project) in &self.projects { + let host_connection = self.connections.get(&project.host_connection_id).unwrap(); + assert!(host_connection.projects.contains(project_id)); - for authorized_user_ids in &worktree.authorized_user_ids { - let visible_worktree_ids = self - .visible_worktrees_by_user_id - .get(authorized_user_ids) + for authorized_user_ids in project.authorized_user_ids() { + let visible_project_ids = self + .visible_projects_by_user_id + .get(&authorized_user_ids) .unwrap(); - assert!(visible_worktree_ids.contains(worktree_id)); + assert!(visible_project_ids.contains(project_id)); } - if let Some(share) = &worktree.share { + if let Some(share) = &project.share { for guest_connection_id in share.guests.keys() { let guest_connection = self.connections.get(guest_connection_id).unwrap(); - assert!(guest_connection.worktrees.contains(worktree_id)); + assert!(guest_connection.projects.contains(project_id)); } assert_eq!(share.active_replica_ids.len(), share.guests.len(),); assert_eq!( @@ -614,10 +580,10 @@ impl Store { } } - for (user_id, visible_worktree_ids) in &self.visible_worktrees_by_user_id { - for worktree_id in visible_worktree_ids { - let worktree = self.worktrees.get(worktree_id).unwrap(); - assert!(worktree.authorized_user_ids.contains(user_id)); + for (user_id, visible_project_ids) in &self.visible_projects_by_user_id { + for project_id in visible_project_ids { + let project = self.projects.get(project_id).unwrap(); + assert!(project.authorized_user_ids().contains(user_id)); } } @@ -630,7 +596,33 @@ impl Store { } } -impl Worktree { +impl Project { + pub fn has_authorized_user_id(&self, user_id: UserId) -> bool { + self.worktrees + .values() + .any(|worktree| worktree.authorized_user_ids.contains(&user_id)) + } + + pub fn authorized_user_ids(&self) -> Vec { + let mut ids = self + .worktrees + .values() + .flat_map(|worktree| worktree.authorized_user_ids.iter()) + .copied() + .collect::>(); + ids.sort_unstable(); + ids.dedup(); + ids + } + + pub fn guest_connection_ids(&self) -> Vec { + if let Some(share) = &self.share { + share.guests.keys().copied().collect() + } else { + Vec::new() + } + } + pub fn connection_ids(&self) -> Vec { if let Some(share) = &self.share { share diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 747a914ffe8ca8aaa55a91b0f792f5dd77fe2712..8818bbd5d1c3f49f91bc02c9181b0dc7dcd17aee 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -391,15 +391,13 @@ pub struct Workspace { impl Workspace { pub fn new(params: &WorkspaceParams, cx: &mut ViewContext) -> Self { - let project = cx.add_model(|cx| { - Project::local( - params.languages.clone(), - params.client.clone(), - params.user_store.clone(), - params.fs.clone(), - cx, - ) - }); + let project = Project::local( + params.client.clone(), + params.user_store.clone(), + params.languages.clone(), + params.fs.clone(), + cx, + ); cx.observe(&project, |_, _, cx| cx.notify()).detach(); let pane = cx.add_view(|_| Pane::new(params.settings.clone())); From 5d8d7de68d0fb6ee91ae0c8c2f640689288b7326 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 20 Dec 2021 18:06:58 -0800 Subject: [PATCH 143/196] Fix accidental usages of local worktree id instead of remote id --- crates/project/src/worktree.rs | 23 +++++++++++++++-------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 0ec03af08d0bb18473381dce90e99e7ce24c3055..3810fef5843bbc1c69376455cf4077eadb6ee673 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -766,18 +766,25 @@ impl Worktree { operation: Operation, cx: &mut ModelContext, ) { - if let Some((project_id, rpc)) = match self { - Worktree::Local(worktree) => worktree - .share - .as_ref() - .map(|share| (share.project_id, worktree.client.clone())), - Worktree::Remote(worktree) => Some((worktree.project_id, worktree.client.clone())), + if let Some((project_id, worktree_id, rpc)) = match self { + Worktree::Local(worktree) => worktree.share.as_ref().map(|share| { + ( + share.project_id, + worktree.id() as u64, + worktree.client.clone(), + ) + }), + Worktree::Remote(worktree) => Some(( + worktree.project_id, + worktree.remote_id, + worktree.client.clone(), + )), } { cx.spawn(|worktree, mut cx| async move { if let Err(error) = rpc .request(proto::UpdateBuffer { project_id, - worktree_id: worktree.id() as u64, + worktree_id, buffer_id, operations: vec![language::proto::serialize_operation(&operation)], }) @@ -1972,10 +1979,10 @@ impl language::File for File { } fn buffer_removed(&self, buffer_id: u64, cx: &mut MutableAppContext) { - let worktree_id = self.worktree.id() as u64; self.worktree.update(cx, |worktree, cx| { if let Worktree::Remote(worktree) = worktree { let project_id = worktree.project_id; + let worktree_id = worktree.remote_id; let rpc = worktree.client.clone(); cx.background() .spawn(async move { From a138955943e20433d5795189fa60b9d8ea74820e Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 20 Dec 2021 18:07:34 -0800 Subject: [PATCH 144/196] Fix logic for waiting for project's remote id --- crates/project/src/project.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index e02b109b52a08f04335ea9eea84259e669c380f3..fa036c8419aea0206191025d5bc1445b5078db59 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -273,9 +273,11 @@ impl Project { } let mut watch = watch.unwrap(); loop { - if let Some(Some(id)) = watch.recv().await { + let id = *watch.borrow(); + if let Some(id) = id { return id; } + watch.recv().await; } } } From 870b73aa363661cafd9ecc274bea644d6d7142f5 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 20 Dec 2021 18:07:51 -0800 Subject: [PATCH 145/196] Send a LeaveProject message when dropping a remote project --- crates/project/src/project.rs | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index fa036c8419aea0206191025d5bc1445b5078db59..91bdb1e31ca75c4162ae4b8ef50dc41714fb9876 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -749,12 +749,24 @@ impl Entity for Project { type Event = Event; fn release(&mut self, cx: &mut gpui::MutableAppContext) { - if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state { - if let Some(project_id) = *remote_id_rx.borrow() { + match &self.client_state { + ProjectClientState::Local { remote_id_rx, .. } => { + if let Some(project_id) = *remote_id_rx.borrow() { + let rpc = self.client.clone(); + cx.spawn(|_| async move { + if let Err(err) = rpc.send(proto::UnregisterProject { project_id }).await { + log::error!("error unregistering project: {}", err); + } + }) + .detach(); + } + } + ProjectClientState::Remote { remote_id, .. } => { let rpc = self.client.clone(); + let project_id = *remote_id; cx.spawn(|_| async move { - if let Err(err) = rpc.send(proto::UnregisterProject { project_id }).await { - log::error!("error unregistering project: {}", err); + if let Err(err) = rpc.send(proto::LeaveProject { project_id }).await { + log::error!("error leaving project: {}", err); } }) .detach(); From 636931373ee371645cbb6c6177119ad19d8c538e Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 20 Dec 2021 18:08:06 -0800 Subject: [PATCH 146/196] Add missing RPC handlers for local projects --- crates/project/src/project.rs | 43 ++++++++++++++++++++++++++++++----- 1 file changed, 37 insertions(+), 6 deletions(-) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 91bdb1e31ca75c4162ae4b8ef50dc41714fb9876..76aec72e907022c60312e3ab8cf858f950065ee8 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -241,13 +241,15 @@ impl Project { self.subscriptions.clear(); if let Some(remote_id) = remote_id { + let client = &self.client; self.subscriptions.extend([ - self.client - .subscribe_to_entity(remote_id, cx, Self::handle_update_worktree), - self.client - .subscribe_to_entity(remote_id, cx, Self::handle_update_buffer), - self.client - .subscribe_to_entity(remote_id, cx, Self::handle_buffer_saved), + client.subscribe_to_entity(remote_id, cx, Self::handle_open_buffer), + client.subscribe_to_entity(remote_id, cx, Self::handle_close_buffer), + client.subscribe_to_entity(remote_id, cx, Self::handle_add_collaborator), + client.subscribe_to_entity(remote_id, cx, Self::handle_remove_collaborator), + client.subscribe_to_entity(remote_id, cx, Self::handle_update_worktree), + client.subscribe_to_entity(remote_id, cx, Self::handle_update_buffer), + client.subscribe_to_entity(remote_id, cx, Self::handle_buffer_saved), ]); } } @@ -637,6 +639,35 @@ impl Project { Ok(()) } + pub fn handle_open_buffer( + &mut self, + envelope: TypedEnvelope, + rpc: Arc, + cx: &mut ModelContext, + ) -> anyhow::Result<()> { + if let Some(worktree) = self.worktree_for_id(envelope.payload.worktree_id as usize) { + return worktree.update(cx, |worktree, cx| { + worktree.handle_open_buffer(envelope, rpc, cx) + }); + } else { + Err(anyhow!("no such worktree")) + } + } + + pub fn handle_close_buffer( + &mut self, + envelope: TypedEnvelope, + rpc: Arc, + cx: &mut ModelContext, + ) -> anyhow::Result<()> { + if let Some(worktree) = self.worktree_for_id(envelope.payload.worktree_id as usize) { + worktree.update(cx, |worktree, cx| { + worktree.handle_close_buffer(envelope, rpc, cx) + })?; + } + Ok(()) + } + pub fn handle_buffer_saved( &mut self, envelope: TypedEnvelope, From 788bb4a3682f0f85accceae7238a12307d742a2a Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 20 Dec 2021 18:08:53 -0800 Subject: [PATCH 147/196] Get some RPC integration tests passing --- crates/server/src/rpc.rs | 32 ++++++++++++++++++++------------ 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index 2e60f014367ce6315efa38d84cdaf31b2838ed33..0237d4a1a8fc32a7881c2be25456db8093a845be 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -215,9 +215,17 @@ impl Server { mut self: Arc, request: TypedEnvelope, ) -> tide::Result<()> { - let mut state = self.state_mut(); - let user_id = state.user_id_for_connection(request.sender_id)?; - state.register_project(request.sender_id, user_id); + let project_id = { + let mut state = self.state_mut(); + let user_id = state.user_id_for_connection(request.sender_id)?; + state.register_project(request.sender_id, user_id) + }; + self.peer + .respond( + request.receipt(), + proto::RegisterProjectResponse { project_id }, + ) + .await?; Ok(()) } @@ -281,10 +289,10 @@ impl Server { let worktrees = joined .project .worktrees - .values() - .filter_map(|worktree| { + .iter() + .filter_map(|(id, worktree)| { worktree.share.as_ref().map(|share| proto::Worktree { - id: project_id, + id: *id, root_name: worktree.root_name.clone(), entries: share.entries.values().cloned().collect(), }) @@ -1062,7 +1070,7 @@ mod tests { .await .unwrap(); - // Join that project as client B, and see that a guest has joined as client A. + // Join that project as client B let project_b = Project::remote( project_id, client_b.clone(), @@ -1143,8 +1151,8 @@ mod tests { .condition(&cx_a, |tree, cx| !tree.has_open_buffer("b.txt", cx)) .await; - // Dropping the worktree removes client B from client A's collaborators. - cx_b.update(move |_| drop(worktree_b)); + // Dropping the client B's project removes client B from client A's collaborators. + cx_b.update(move |_| drop(project_b)); project_a .condition(&cx_a, |project, _| project.collaborators().is_empty()) .await; @@ -1155,12 +1163,12 @@ mod tests { cx_b.update(zed::contacts_panel::init); let lang_registry = Arc::new(LanguageRegistry::new()); let fs = Arc::new(FakeFs::new()); + cx_a.foreground().forbid_parking(); // Connect to a server as 2 clients. let mut server = TestServer::start().await; let client_a = server.create_client(&mut cx_a, "user_a").await; let client_b = server.create_client(&mut cx_b, "user_b").await; - cx_a.foreground().forbid_parking(); // Share a project as client A fs.insert_tree( @@ -1224,14 +1232,14 @@ mod tests { } #[gpui::test] - async fn test_propagate_saves_and_fs_changes_in_shared_worktree( + async fn test_propagate_saves_and_fs_changes( mut cx_a: TestAppContext, mut cx_b: TestAppContext, mut cx_c: TestAppContext, ) { - cx_a.foreground().forbid_parking(); let lang_registry = Arc::new(LanguageRegistry::new()); let fs = Arc::new(FakeFs::new()); + cx_a.foreground().forbid_parking(); // Connect to a server as 3 clients. let mut server = TestServer::start().await; From 4053d683d9b77604457a5814ec88d91ab14da7ed Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 21 Dec 2021 08:35:08 +0100 Subject: [PATCH 148/196] Re-enable commented out worktree test --- crates/project/src/worktree.rs | 310 ++++++++++++++++----------------- 1 file changed, 149 insertions(+), 161 deletions(-) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 3810fef5843bbc1c69376455cf4077eadb6ee673..59ea1f612e9dd570b9d28789f4f241e93cc23bd6 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -3067,172 +3067,160 @@ mod tests { assert_eq!(new_text, buffer.read_with(&cx, |buffer, _| buffer.text())); } - // #[gpui::test] - // async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) { - // let dir = temp_tree(json!({ - // "a": { - // "file1": "", - // "file2": "", - // "file3": "", - // }, - // "b": { - // "c": { - // "file4": "", - // "file5": "", - // } - // } - // })); + #[gpui::test] + async fn test_rescan_and_remote_updates(mut cx: gpui::TestAppContext) { + let dir = temp_tree(json!({ + "a": { + "file1": "", + "file2": "", + "file3": "", + }, + "b": { + "c": { + "file4": "", + "file5": "", + } + } + })); - // let user_id = 5; - // let mut client = Client::new(); - // let server = FakeServer::for_client(user_id, &mut client, &cx).await; - // let user_store = server.build_user_store(client.clone(), &mut cx).await; - // let tree = Worktree::open_local( - // client, - // user_store.clone(), - // dir.path(), - // Arc::new(RealFs), - // Default::default(), - // &mut cx.to_async(), - // ) - // .await - // .unwrap(); + let user_id = 5; + let mut client = Client::new(); + let server = FakeServer::for_client(user_id, &mut client, &cx).await; + let user_store = server.build_user_store(client.clone(), &mut cx).await; + let tree = Worktree::open_local( + client, + user_store.clone(), + dir.path(), + Arc::new(RealFs), + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); - // let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| { - // let buffer = tree.update(cx, |tree, cx| tree.open_buffer(path, cx)); - // async move { buffer.await.unwrap() } - // }; - // let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| { - // tree.read_with(cx, |tree, _| { - // tree.entry_for_path(path) - // .expect(&format!("no entry for path {}", path)) - // .id - // }) - // }; - - // let buffer2 = buffer_for_path("a/file2", &mut cx).await; - // let buffer3 = buffer_for_path("a/file3", &mut cx).await; - // let buffer4 = buffer_for_path("b/c/file4", &mut cx).await; - // let buffer5 = buffer_for_path("b/c/file5", &mut cx).await; - - // let file2_id = id_for_path("a/file2", &cx); - // let file3_id = id_for_path("a/file3", &cx); - // let file4_id = id_for_path("b/c/file4", &cx); - - // // Wait for the initial scan. - // cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) - // .await; + let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| { + let buffer = tree.update(cx, |tree, cx| tree.open_buffer(path, cx)); + async move { buffer.await.unwrap() } + }; + let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| { + tree.read_with(cx, |tree, _| { + tree.entry_for_path(path) + .expect(&format!("no entry for path {}", path)) + .id + }) + }; - // // Create a remote copy of this worktree. - // let initial_snapshot = tree.read_with(&cx, |tree, _| tree.snapshot()); - // let worktree_id = 1; - // let proto_message = tree.update(&mut cx, |tree, cx| tree.as_local().unwrap().to_proto(cx)); - // let open_worktree = server.receive::().await.unwrap(); - // server - // .respond( - // open_worktree.receipt(), - // proto::OpenWorktreeResponse { worktree_id: 1 }, - // ) - // .await; + let buffer2 = buffer_for_path("a/file2", &mut cx).await; + let buffer3 = buffer_for_path("a/file3", &mut cx).await; + let buffer4 = buffer_for_path("b/c/file4", &mut cx).await; + let buffer5 = buffer_for_path("b/c/file5", &mut cx).await; - // let remote = Worktree::remote( - // proto::JoinWorktreeResponse { - // worktree: Some(proto_message.await), - // replica_id: 1, - // collaborators: Vec::new(), - // }, - // Client::new(), - // user_store, - // Default::default(), - // &mut cx.to_async(), - // ) - // .await - // .unwrap(); + let file2_id = id_for_path("a/file2", &cx); + let file3_id = id_for_path("a/file3", &cx); + let file4_id = id_for_path("b/c/file4", &cx); - // cx.read(|cx| { - // assert!(!buffer2.read(cx).is_dirty()); - // assert!(!buffer3.read(cx).is_dirty()); - // assert!(!buffer4.read(cx).is_dirty()); - // assert!(!buffer5.read(cx).is_dirty()); - // }); - - // // Rename and delete files and directories. - // tree.flush_fs_events(&cx).await; - // std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap(); - // std::fs::remove_file(dir.path().join("b/c/file5")).unwrap(); - // std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap(); - // std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap(); - // tree.flush_fs_events(&cx).await; - - // let expected_paths = vec![ - // "a", - // "a/file1", - // "a/file2.new", - // "b", - // "d", - // "d/file3", - // "d/file4", - // ]; - - // cx.read(|app| { - // assert_eq!( - // tree.read(app) - // .paths() - // .map(|p| p.to_str().unwrap()) - // .collect::>(), - // expected_paths - // ); - - // assert_eq!(id_for_path("a/file2.new", &cx), file2_id); - // assert_eq!(id_for_path("d/file3", &cx), file3_id); - // assert_eq!(id_for_path("d/file4", &cx), file4_id); - - // assert_eq!( - // buffer2.read(app).file().unwrap().path().as_ref(), - // Path::new("a/file2.new") - // ); - // assert_eq!( - // buffer3.read(app).file().unwrap().path().as_ref(), - // Path::new("d/file3") - // ); - // assert_eq!( - // buffer4.read(app).file().unwrap().path().as_ref(), - // Path::new("d/file4") - // ); - // assert_eq!( - // buffer5.read(app).file().unwrap().path().as_ref(), - // Path::new("b/c/file5") - // ); - - // assert!(!buffer2.read(app).file().unwrap().is_deleted()); - // assert!(!buffer3.read(app).file().unwrap().is_deleted()); - // assert!(!buffer4.read(app).file().unwrap().is_deleted()); - // assert!(buffer5.read(app).file().unwrap().is_deleted()); - // }); - - // // Update the remote worktree. Check that it becomes consistent with the - // // local worktree. - // remote.update(&mut cx, |remote, cx| { - // let update_message = - // tree.read(cx) - // .snapshot() - // .build_update(&initial_snapshot, worktree_id, true); - // remote - // .as_remote_mut() - // .unwrap() - // .snapshot - // .apply_update(update_message) - // .unwrap(); - - // assert_eq!( - // remote - // .paths() - // .map(|p| p.to_str().unwrap()) - // .collect::>(), - // expected_paths - // ); - // }); - // } + // Wait for the initial scan. + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + + // Create a remote copy of this worktree. + let initial_snapshot = tree.read_with(&cx, |tree, _| tree.snapshot()); + let remote = Worktree::remote( + 1, + 1, + initial_snapshot.to_proto(), + Client::new(), + user_store, + Default::default(), + &mut cx.to_async(), + ) + .await + .unwrap(); + + cx.read(|cx| { + assert!(!buffer2.read(cx).is_dirty()); + assert!(!buffer3.read(cx).is_dirty()); + assert!(!buffer4.read(cx).is_dirty()); + assert!(!buffer5.read(cx).is_dirty()); + }); + + // Rename and delete files and directories. + tree.flush_fs_events(&cx).await; + std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap(); + std::fs::remove_file(dir.path().join("b/c/file5")).unwrap(); + std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap(); + std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap(); + tree.flush_fs_events(&cx).await; + + let expected_paths = vec![ + "a", + "a/file1", + "a/file2.new", + "b", + "d", + "d/file3", + "d/file4", + ]; + + cx.read(|app| { + assert_eq!( + tree.read(app) + .paths() + .map(|p| p.to_str().unwrap()) + .collect::>(), + expected_paths + ); + + assert_eq!(id_for_path("a/file2.new", &cx), file2_id); + assert_eq!(id_for_path("d/file3", &cx), file3_id); + assert_eq!(id_for_path("d/file4", &cx), file4_id); + + assert_eq!( + buffer2.read(app).file().unwrap().path().as_ref(), + Path::new("a/file2.new") + ); + assert_eq!( + buffer3.read(app).file().unwrap().path().as_ref(), + Path::new("d/file3") + ); + assert_eq!( + buffer4.read(app).file().unwrap().path().as_ref(), + Path::new("d/file4") + ); + assert_eq!( + buffer5.read(app).file().unwrap().path().as_ref(), + Path::new("b/c/file5") + ); + + assert!(!buffer2.read(app).file().unwrap().is_deleted()); + assert!(!buffer3.read(app).file().unwrap().is_deleted()); + assert!(!buffer4.read(app).file().unwrap().is_deleted()); + assert!(buffer5.read(app).file().unwrap().is_deleted()); + }); + + // Update the remote worktree. Check that it becomes consistent with the + // local worktree. + remote.update(&mut cx, |remote, cx| { + let update_message = + tree.read(cx) + .snapshot() + .build_update(&initial_snapshot, 1, 1, true); + remote + .as_remote_mut() + .unwrap() + .snapshot + .apply_update(update_message) + .unwrap(); + + assert_eq!( + remote + .paths() + .map(|p| p.to_str().unwrap()) + .collect::>(), + expected_paths + ); + }); + } #[gpui::test] async fn test_rescan_with_gitignore(mut cx: gpui::TestAppContext) { From 1e8ef8a4c10811bf0c467d5bb5e0bb0c062c1adc Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 21 Dec 2021 09:49:13 +0100 Subject: [PATCH 149/196] Register local worktrees after acquiring a project remote id --- Cargo.lock | 1 + crates/project/src/project.rs | 21 +++++++++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/Cargo.lock b/Cargo.lock index 3b186a2893c53702d19d6f1f5cb1d5edc4aeb660..1544ef8302da558c1153e81434b1deae1217feae 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5652,6 +5652,7 @@ version = "0.1.0" dependencies = [ "anyhow", "client", + "clock", "gpui", "language", "log", diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 76aec72e907022c60312e3ab8cf858f950065ee8..34e2e477046efce005ae783f520fca8eed84d20f 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -130,6 +130,27 @@ impl Project { } else { None }; + + if let Some(project_id) = remote_id { + let mut registrations = Vec::new(); + this.read_with(&cx, |this, cx| { + for worktree in &this.worktrees { + let worktree_id = worktree.id() as u64; + let worktree = worktree.read(cx).as_local().unwrap(); + registrations.push(rpc.request( + proto::RegisterWorktree { + project_id, + worktree_id, + root_name: worktree.root_name().to_string(), + authorized_logins: worktree.authorized_logins(), + }, + )); + } + }); + for registration in registrations { + registration.await?; + } + } this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx)); } } From 40da3b233f36aa6aa0c4a67c806f644dbd141446 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 21 Dec 2021 09:50:11 +0100 Subject: [PATCH 150/196] Get more integration tests passing --- crates/project/src/project.rs | 67 +++++++++++------------ crates/project/src/worktree.rs | 8 +-- crates/project_panel/src/project_panel.rs | 6 +- crates/rpc/src/proto.rs | 1 + crates/server/src/rpc.rs | 1 + crates/workspace/Cargo.toml | 1 + crates/workspace/src/workspace.rs | 45 ++++++--------- 7 files changed, 58 insertions(+), 71 deletions(-) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 34e2e477046efce005ae783f520fca8eed84d20f..b805006b936391c4f89675e621aad1935574e009 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -25,7 +25,6 @@ pub use worktree::*; pub struct Project { worktrees: Vec>, - active_worktree: Option, active_entry: Option, languages: Arc, client: Arc, @@ -170,7 +169,6 @@ impl Project { _maintain_remote_id_task, }, subscriptions: Vec::new(), - active_worktree: None, active_entry: None, languages, client, @@ -230,7 +228,6 @@ impl Project { Ok(cx.add_model(|cx| Self { worktrees, - active_worktree: None, active_entry: None, collaborators, languages, @@ -270,6 +267,7 @@ impl Project { client.subscribe_to_entity(remote_id, cx, Self::handle_remove_collaborator), client.subscribe_to_entity(remote_id, cx, Self::handle_update_worktree), client.subscribe_to_entity(remote_id, cx, Self::handle_update_buffer), + client.subscribe_to_entity(remote_id, cx, Self::handle_save_buffer), client.subscribe_to_entity(remote_id, cx, Self::handle_buffer_saved), ]); } @@ -320,10 +318,10 @@ impl Project { &self.worktrees } - pub fn worktree_for_id(&self, id: usize) -> Option> { + pub fn worktree_for_id(&self, id: usize, cx: &AppContext) -> Option> { self.worktrees .iter() - .find(|worktree| worktree.id() == id) + .find(|worktree| worktree.read(cx).id() == id) .cloned() } @@ -346,18 +344,19 @@ impl Project { } })?; - rpc.send(proto::ShareProject { project_id }).await?; + rpc.request(proto::ShareProject { project_id }).await?; + let mut tasks = Vec::new(); this.update(&mut cx, |this, cx| { for worktree in &this.worktrees { worktree.update(cx, |worktree, cx| { - worktree - .as_local_mut() - .unwrap() - .share(project_id, cx) - .detach(); + let worktree = worktree.as_local_mut().unwrap(); + tasks.push(worktree.share(project_id, cx)); }); } }); + for task in tasks { + task.await?; + } Ok(()) }) } @@ -402,7 +401,7 @@ impl Project { path: ProjectPath, cx: &mut ModelContext, ) -> Task>> { - if let Some(worktree) = self.worktree_for_id(path.worktree_id) { + if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) { worktree.update(cx, |worktree, cx| worktree.open_buffer(path.path, cx)) } else { cx.spawn(|_, _| async move { Err(anyhow!("no such worktree")) }) @@ -463,28 +462,13 @@ impl Project { fn add_worktree(&mut self, worktree: ModelHandle, cx: &mut ModelContext) { cx.observe(&worktree, |_, _, cx| cx.notify()).detach(); - if self.active_worktree.is_none() { - self.set_active_worktree(Some(worktree.id()), cx); - } self.worktrees.push(worktree); cx.notify(); } - fn set_active_worktree(&mut self, worktree_id: Option, cx: &mut ModelContext) { - if self.active_worktree != worktree_id { - self.active_worktree = worktree_id; - cx.notify(); - } - } - - pub fn active_worktree(&self) -> Option> { - self.active_worktree - .and_then(|worktree_id| self.worktree_for_id(worktree_id)) - } - pub fn set_active_path(&mut self, entry: Option, cx: &mut ModelContext) { let new_active_entry = entry.and_then(|project_path| { - let worktree = self.worktree_for_id(project_path.worktree_id)?; + let worktree = self.worktree_for_id(project_path.worktree_id, cx)?; let entry = worktree.read(cx).entry_for_path(project_path.path)?; Some(ProjectEntry { worktree_id: project_path.worktree_id, @@ -492,9 +476,6 @@ impl Project { }) }); if new_active_entry != self.active_entry { - if let Some(worktree_id) = new_active_entry.map(|e| e.worktree_id) { - self.set_active_worktree(Some(worktree_id), cx); - } self.active_entry = new_active_entry; cx.emit(Event::ActiveEntryChanged(new_active_entry)); } @@ -637,7 +618,7 @@ impl Project { _: Arc, cx: &mut ModelContext, ) -> Result<()> { - if let Some(worktree) = self.worktree_for_id(envelope.payload.worktree_id as usize) { + if let Some(worktree) = self.worktree_for_id(envelope.payload.worktree_id as usize, cx) { worktree.update(cx, |worktree, cx| { let worktree = worktree.as_remote_mut().unwrap(); worktree.update_from_remote(envelope, cx) @@ -652,7 +633,7 @@ impl Project { _: Arc, cx: &mut ModelContext, ) -> Result<()> { - if let Some(worktree) = self.worktree_for_id(envelope.payload.worktree_id as usize) { + if let Some(worktree) = self.worktree_for_id(envelope.payload.worktree_id as usize, cx) { worktree.update(cx, |worktree, cx| { worktree.handle_update_buffer(envelope, cx) })?; @@ -660,13 +641,27 @@ impl Project { Ok(()) } + pub fn handle_save_buffer( + &mut self, + envelope: TypedEnvelope, + rpc: Arc, + cx: &mut ModelContext, + ) -> Result<()> { + if let Some(worktree) = self.worktree_for_id(envelope.payload.worktree_id as usize, cx) { + worktree.update(cx, |worktree, cx| { + worktree.handle_save_buffer(envelope, rpc, cx) + })?; + } + Ok(()) + } + pub fn handle_open_buffer( &mut self, envelope: TypedEnvelope, rpc: Arc, cx: &mut ModelContext, ) -> anyhow::Result<()> { - if let Some(worktree) = self.worktree_for_id(envelope.payload.worktree_id as usize) { + if let Some(worktree) = self.worktree_for_id(envelope.payload.worktree_id as usize, cx) { return worktree.update(cx, |worktree, cx| { worktree.handle_open_buffer(envelope, rpc, cx) }); @@ -681,7 +676,7 @@ impl Project { rpc: Arc, cx: &mut ModelContext, ) -> anyhow::Result<()> { - if let Some(worktree) = self.worktree_for_id(envelope.payload.worktree_id as usize) { + if let Some(worktree) = self.worktree_for_id(envelope.payload.worktree_id as usize, cx) { worktree.update(cx, |worktree, cx| { worktree.handle_close_buffer(envelope, rpc, cx) })?; @@ -695,7 +690,7 @@ impl Project { _: Arc, cx: &mut ModelContext, ) -> Result<()> { - if let Some(worktree) = self.worktree_for_id(envelope.payload.worktree_id as usize) { + if let Some(worktree) = self.worktree_for_id(envelope.payload.worktree_id as usize, cx) { worktree.update(cx, |worktree, cx| { worktree.handle_buffer_saved(envelope, cx) })?; diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 59ea1f612e9dd570b9d28789f4f241e93cc23bd6..45aa357ded14950ce826e280447f8a672a9b7867 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -166,7 +166,7 @@ impl Worktree { let worktree = cx.update(|cx| { cx.add_model(|cx: &mut ModelContext| { let snapshot = Snapshot { - id: cx.model_id(), + id: remote_id as usize, scan_id: 0, abs_path: Path::new("").into(), root_name, @@ -1034,12 +1034,12 @@ impl LocalWorktree { path: &Path, cx: &mut ModelContext, ) -> Option> { - let handle = cx.handle(); + let worktree_id = self.id(); let mut result = None; self.open_buffers.retain(|_buffer_id, buffer| { if let Some(buffer) = buffer.upgrade(cx.as_ref()) { if let Some(file) = buffer.read(cx.as_ref()).file() { - if file.worktree_id() == handle.id() && file.path().as_ref() == path { + if file.worktree_id() == worktree_id && file.path().as_ref() == path { result = Some(buffer); } } @@ -1919,7 +1919,7 @@ impl language::File for File { version: clock::Global, cx: &mut MutableAppContext, ) -> Task> { - let worktree_id = self.worktree.id() as u64; + let worktree_id = self.worktree.read(cx).id() as u64; self.worktree.update(cx, |worktree, cx| match worktree { Worktree::Local(worktree) => { let rpc = worktree.client.clone(); diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index bf8a1c418a4b06d68121c819efabde4e9880e01e..041806034aef1cf4024c2829c988f39c5556b44d 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -118,7 +118,7 @@ impl ProjectPanel { worktree_id, entry_id, } => { - if let Some(worktree) = project.read(cx).worktree_for_id(*worktree_id) { + if let Some(worktree) = project.read(cx).worktree_for_id(*worktree_id, cx) { if let Some(entry) = worktree.read(cx).entry_for_id(*entry_id) { workspace .open_entry( @@ -307,7 +307,7 @@ impl ProjectPanel { fn selected_entry<'a>(&self, cx: &'a AppContext) -> Option<(&'a Worktree, &'a project::Entry)> { let selection = self.selection?; let project = self.project.read(cx); - let worktree = project.worktree_for_id(selection.worktree_id)?.read(cx); + let worktree = project.worktree_for_id(selection.worktree_id, cx)?.read(cx); Some((worktree, worktree.entry_for_id(selection.entry_id)?)) } @@ -374,7 +374,7 @@ impl ProjectPanel { fn expand_entry(&mut self, worktree_id: usize, entry_id: usize, cx: &mut ViewContext) { let project = self.project.read(cx); if let Some((worktree, expanded_dir_ids)) = project - .worktree_for_id(worktree_id) + .worktree_for_id(worktree_id, cx) .zip(self.expanded_dir_ids.get_mut(&worktree_id)) { let worktree = worktree.read(cx); diff --git a/crates/rpc/src/proto.rs b/crates/rpc/src/proto.rs index 5b328c02eced78fadf717f2e8297a3fb4b6ba826..9274049a7b601e1c25bbcdb816c8ed3d8b07ccbe 100644 --- a/crates/rpc/src/proto.rs +++ b/crates/rpc/src/proto.rs @@ -170,6 +170,7 @@ request_messages!( (RegisterWorktree, Ack), (SaveBuffer, BufferSaved), (SendChannelMessage, SendChannelMessageResponse), + (ShareProject, Ack), (ShareWorktree, Ack), (UpdateBuffer, Ack), ); diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index 0237d4a1a8fc32a7881c2be25456db8093a845be..d83142d0d93eb13b77f965dec98cc1755604b102 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -244,6 +244,7 @@ impl Server { ) -> tide::Result<()> { self.state_mut() .share_project(request.payload.project_id, request.sender_id); + self.peer.respond(request.receipt(), proto::Ack {}).await?; Ok(()) } diff --git a/crates/workspace/Cargo.toml b/crates/workspace/Cargo.toml index 497254074eb4b4c7eb4f62751fab6a002676d8e9..759f115a358f86d297cfc00aa5f5dde4f625efd8 100644 --- a/crates/workspace/Cargo.toml +++ b/crates/workspace/Cargo.toml @@ -11,6 +11,7 @@ test-support = ["client/test-support", "project/test-support"] [dependencies] client = { path = "../client" } +clock = { path = "../clock" } gpui = { path = "../gpui" } language = { path = "../language" } project = { path = "../project" } diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 8818bbd5d1c3f49f91bc02c9181b0dc7dcd17aee..7c98eda8121adda7a394dc7ef643a03796972cc3 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -6,6 +6,7 @@ mod status_bar; use anyhow::{anyhow, Result}; use client::{Authenticate, ChannelList, Client, User, UserStore}; +use clock::ReplicaId; use gpui::{ action, color::Color, @@ -641,7 +642,7 @@ impl Workspace { let worktree = match self .project .read(cx) - .worktree_for_id(project_path.worktree_id) + .worktree_for_id(project_path.worktree_id, cx) { Some(worktree) => worktree, None => { @@ -1007,17 +1008,12 @@ impl Workspace { Align::new( Flex::row() .with_children(self.render_collaborators(theme, cx)) - .with_child( - self.render_avatar( - self.user_store.read(cx).current_user().as_ref(), - self.project - .read(cx) - .active_worktree() - .map(|worktree| worktree.read(cx).replica_id()), - theme, - cx, - ), - ) + .with_child(self.render_avatar( + self.user_store.read(cx).current_user().as_ref(), + self.project.read(cx).replica_id(), + theme, + cx, + )) .with_children(self.render_connection_status()) .boxed(), ) @@ -1045,12 +1041,7 @@ impl Workspace { collaborators .into_iter() .map(|collaborator| { - self.render_avatar( - Some(&collaborator.user), - Some(collaborator.replica_id), - theme, - cx, - ) + self.render_avatar(Some(&collaborator.user), collaborator.replica_id, theme, cx) }) .collect() } @@ -1058,7 +1049,7 @@ impl Workspace { fn render_avatar( &self, user: Option<&Arc>, - replica_id: Option, + replica_id: ReplicaId, theme: &Theme, cx: &mut RenderContext, ) -> ElementBox { @@ -1076,15 +1067,13 @@ impl Workspace { .boxed(), ) .with_child( - AvatarRibbon::new(replica_id.map_or(Default::default(), |id| { - theme.editor.replica_selection_style(id).cursor - })) - .constrained() - .with_width(theme.workspace.titlebar.avatar_ribbon.width) - .with_height(theme.workspace.titlebar.avatar_ribbon.height) - .aligned() - .bottom() - .boxed(), + AvatarRibbon::new(theme.editor.replica_selection_style(replica_id).cursor) + .constrained() + .with_width(theme.workspace.titlebar.avatar_ribbon.width) + .with_height(theme.workspace.titlebar.avatar_ribbon.height) + .aligned() + .bottom() + .boxed(), ) .boxed(), ) From afec4152f41001b5ad1780b016b084f3618f8e35 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 21 Dec 2021 10:17:26 +0100 Subject: [PATCH 151/196] Update contacts as projects/worktrees get registered/unregistered --- crates/server/src/rpc.rs | 8 ++++++-- crates/server/src/rpc/store.rs | 26 ++++++++++++++++---------- 2 files changed, 22 insertions(+), 12 deletions(-) diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index d83142d0d93eb13b77f965dec98cc1755604b102..e5ca51a6e832e8a516c003e5d15ae849fc626d1a 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -233,8 +233,12 @@ impl Server { mut self: Arc, request: TypedEnvelope, ) -> tide::Result<()> { - self.state_mut() - .unregister_project(request.payload.project_id, request.sender_id); + let project = self + .state_mut() + .unregister_project(request.payload.project_id, request.sender_id) + .ok_or_else(|| anyhow!("no such project"))?; + self.update_contacts_for_users(project.authorized_user_ids().iter()) + .await?; Ok(()) } diff --git a/crates/server/src/rpc/store.rs b/crates/server/src/rpc/store.rs index 4ed6454afd1b50ff50b1f030abe4ef6e38343b9f..e4d740629fbb4816f892c6cfa589596036116795 100644 --- a/crates/server/src/rpc/store.rs +++ b/crates/server/src/rpc/store.rs @@ -115,10 +115,8 @@ impl Store { let mut result = RemovedConnectionState::default(); for project_id in connection.projects.clone() { - if let Some((project, authorized_user_ids)) = - self.unregister_project(project_id, connection_id) - { - result.contact_ids.extend(authorized_user_ids); + if let Some(project) = self.unregister_project(project_id, connection_id) { + result.contact_ids.extend(project.authorized_user_ids()); result.hosted_projects.insert(project_id, project); } else if let Some(project) = self.leave_project(connection_id, project_id) { result @@ -274,17 +272,25 @@ impl Store { &mut self, project_id: u64, connection_id: ConnectionId, - ) -> Option<(Project, Vec)> { + ) -> Option { match self.projects.entry(project_id) { hash_map::Entry::Occupied(e) => { - if e.get().host_connection_id != connection_id { - return None; + if e.get().host_connection_id == connection_id { + for user_id in e.get().authorized_user_ids() { + if let hash_map::Entry::Occupied(mut projects) = + self.visible_projects_by_user_id.entry(user_id) + { + projects.get_mut().remove(&project_id); + } + } + + Some(e.remove()) + } else { + None } } - hash_map::Entry::Vacant(_) => return None, + hash_map::Entry::Vacant(_) => None, } - - todo!() } pub fn unregister_worktree( From c6dd797f4e08545afd88176c4562022a1b1f7aa4 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 21 Dec 2021 10:20:05 +0100 Subject: [PATCH 152/196] Drop project instead of worktree to simulate client leaving --- crates/server/src/rpc.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index e5ca51a6e832e8a516c003e5d15ae849fc626d1a..b33d76bfc6410d51eb254fc82839eae536f7e510 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -1632,7 +1632,7 @@ mod tests { let buffer_b = cx_b .background() .spawn(worktree_b.update(&mut cx_b, |worktree, cx| worktree.open_buffer("a.txt", cx))); - cx_b.update(|_| drop(worktree_b)); + cx_b.update(|_| drop(project_b)); drop(buffer_b); // See that the guest has left. From 5d2c4807db45fc1505115b02178dbe2547bf608a Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 21 Dec 2021 10:25:37 +0100 Subject: [PATCH 153/196] Fix invalid theme variables --- crates/zed/assets/themes/_base.toml | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/crates/zed/assets/themes/_base.toml b/crates/zed/assets/themes/_base.toml index 7d25ee87fc2ab6c5341763138c244328e5d70ad5..958620521d8e9048feea444275fb79ec101f0cf5 100644 --- a/crates/zed/assets/themes/_base.toml +++ b/crates/zed/assets/themes/_base.toml @@ -156,30 +156,30 @@ host_username = { extends = "$text.0", padding.left = 8 } tree_branch_width = 1 tree_branch_color = "$surface.2" -[contacts_panel.worktree] +[contacts_panel.project] height = 24 padding = { left = 8 } guest_avatar = { corner_radius = 8, width = 14 } guest_avatar_spacing = 4 -[contacts_panel.worktree.name] +[contacts_panel.project.name] extends = "$text.1" margin = { right = 6 } -[contacts_panel.unshared_worktree] -extends = "$contacts_panel.worktree" +[contacts_panel.unshared_project] +extends = "$contacts_panel.project" -[contacts_panel.hovered_unshared_worktree] -extends = "$contacts_panel.unshared_worktree" +[contacts_panel.hovered_unshared_project] +extends = "$contacts_panel.unshared_project" background = "$state.hover" corner_radius = 6 -[contacts_panel.shared_worktree] -extends = "$contacts_panel.worktree" +[contacts_panel.shared_project] +extends = "$contacts_panel.project" name.color = "$text.0.color" -[contacts_panel.hovered_shared_worktree] -extends = "$contacts_panel.shared_worktree" +[contacts_panel.hovered_shared_project] +extends = "$contacts_panel.shared_project" background = "$state.hover" corner_radius = 6 From 17094ec542db09919d53c3fb6005d2a6b9058a53 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 21 Dec 2021 11:58:01 +0100 Subject: [PATCH 154/196] Allow opening of remote projects via the contacts panel --- crates/contacts_panel/src/contacts_panel.rs | 51 +++----- crates/server/src/rpc.rs | 1 - crates/workspace/src/workspace.rs | 131 +++++++++++++++----- crates/zed/src/main.rs | 1 - crates/zed/src/zed.rs | 58 +++++---- 5 files changed, 156 insertions(+), 86 deletions(-) diff --git a/crates/contacts_panel/src/contacts_panel.rs b/crates/contacts_panel/src/contacts_panel.rs index e3db1931afc9b219aef1898cb78f235dc4becd89..2fcdf79bceb5ad4466e219f4d5292592906e4946 100644 --- a/crates/contacts_panel/src/contacts_panel.rs +++ b/crates/contacts_panel/src/contacts_panel.rs @@ -1,21 +1,15 @@ +use std::sync::Arc; + use client::{Contact, UserStore}; use gpui::{ - action, elements::*, geometry::{rect::RectF, vector::vec2f}, platform::CursorStyle, - Element, ElementBox, Entity, LayoutContext, ModelHandle, MutableAppContext, RenderContext, - Subscription, View, ViewContext, + Element, ElementBox, Entity, LayoutContext, ModelHandle, RenderContext, Subscription, View, + ViewContext, }; use postage::watch; -use theme::Theme; -use workspace::{Settings, Workspace}; - -action!(JoinProject, u64); - -pub fn init(cx: &mut MutableAppContext) { - cx.add_action(ContactsPanel::join_project); -} +use workspace::{AppState, JoinProject, JoinProjectParams, Settings}; pub struct ContactsPanel { contacts: ListState, @@ -25,42 +19,33 @@ pub struct ContactsPanel { } impl ContactsPanel { - pub fn new( - user_store: ModelHandle, - settings: watch::Receiver, - cx: &mut ViewContext, - ) -> Self { + pub fn new(app_state: Arc, cx: &mut ViewContext) -> Self { Self { contacts: ListState::new( - user_store.read(cx).contacts().len(), + app_state.user_store.read(cx).contacts().len(), Orientation::Top, 1000., { - let user_store = user_store.clone(); - let settings = settings.clone(); + let app_state = app_state.clone(); move |ix, cx| { - let user_store = user_store.read(cx); + let user_store = app_state.user_store.read(cx); let contacts = user_store.contacts().clone(); let current_user_id = user_store.current_user().map(|user| user.id); Self::render_collaborator( &contacts[ix], current_user_id, - &settings.borrow().theme, + app_state.clone(), cx, ) } }, ), - _maintain_contacts: cx.observe(&user_store, Self::update_contacts), - user_store, - settings, + _maintain_contacts: cx.observe(&app_state.user_store, Self::update_contacts), + user_store: app_state.user_store.clone(), + settings: app_state.settings.clone(), } } - fn join_project(_: &mut Workspace, _: &JoinProject, _: &mut ViewContext) { - todo!(); - } - fn update_contacts(&mut self, _: ModelHandle, cx: &mut ViewContext) { self.contacts .reset(self.user_store.read(cx).contacts().len()); @@ -70,10 +55,10 @@ impl ContactsPanel { fn render_collaborator( collaborator: &Contact, current_user_id: Option, - theme: &Theme, + app_state: Arc, cx: &mut LayoutContext, ) -> ElementBox { - let theme = &theme.contacts_panel; + let theme = &app_state.settings.borrow().theme.contacts_panel; let project_count = collaborator.projects.len(); let font_cache = cx.font_cache(); let line_height = theme.unshared_project.name.text.line_height(font_cache); @@ -169,6 +154,7 @@ impl ContactsPanel { .iter() .any(|guest| Some(guest.id) == current_user_id); let is_shared = project.is_shared; + let app_state = app_state.clone(); MouseEventHandler::new::( project_id as usize, @@ -222,7 +208,10 @@ impl ContactsPanel { }) .on_click(move |cx| { if !is_host && !is_guest { - cx.dispatch_action(JoinProject(project_id)) + cx.dispatch_global_action(JoinProject(JoinProjectParams { + project_id, + app_state: app_state.clone(), + })); } }) .expanded(1.0) diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index b33d76bfc6410d51eb254fc82839eae536f7e510..0aa269e28a838bfbde1b0713513ff1175a1b4940 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -1165,7 +1165,6 @@ mod tests { #[gpui::test] async fn test_unshare_project(mut cx_a: TestAppContext, mut cx_b: TestAppContext) { - cx_b.update(zed::contacts_panel::init); let lang_registry = Arc::new(LanguageRegistry::new()); let fs = Arc::new(FakeFs::new()); cx_a.foreground().forbid_parking(); diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 7c98eda8121adda7a394dc7ef643a03796972cc3..7dfe81f84ea7746276813a520936d3ac07cb13a8 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -40,17 +40,21 @@ use theme::{Theme, ThemeRegistry}; action!(Open, Arc); action!(OpenNew, Arc); action!(OpenPaths, OpenParams); +action!(JoinProject, JoinProjectParams); action!(Save); action!(DebugElements); pub fn init(cx: &mut MutableAppContext) { cx.add_global_action(open); cx.add_global_action(move |action: &OpenPaths, cx: &mut MutableAppContext| { - open_paths(&action.0.paths, &action.0.app_state, cx).detach() + open_paths(&action.0.paths, &action.0.app_state, cx).detach(); }); cx.add_global_action(move |action: &OpenNew, cx: &mut MutableAppContext| { open_new(&action.0, cx) }); + cx.add_global_action(move |action: &JoinProject, cx: &mut MutableAppContext| { + join_project(action.0.project_id, &action.0.app_state, cx).detach(); + }); cx.add_action(Workspace::save_active_item); cx.add_action(Workspace::debug_elements); @@ -90,8 +94,11 @@ pub struct AppState { pub channel_list: ModelHandle, pub entry_openers: Arc<[Box]>, pub build_window_options: &'static dyn Fn() -> WindowOptions<'static>, - pub build_workspace: - &'static dyn Fn(&WorkspaceParams, &mut ViewContext) -> Workspace, + pub build_workspace: &'static dyn Fn( + ModelHandle, + &Arc, + &mut ViewContext, + ) -> Workspace, } #[derive(Clone)] @@ -100,6 +107,12 @@ pub struct OpenParams { pub app_state: Arc, } +#[derive(Clone)] +pub struct JoinProjectParams { + pub project_id: u64, + pub app_state: Arc, +} + pub trait EntryOpener { fn open( &self, @@ -338,6 +351,7 @@ impl Clone for Box { #[derive(Clone)] pub struct WorkspaceParams { + pub project: ModelHandle, pub client: Arc, pub fs: Arc, pub languages: Arc, @@ -350,7 +364,8 @@ pub struct WorkspaceParams { impl WorkspaceParams { #[cfg(any(test, feature = "test-support"))] pub fn test(cx: &mut MutableAppContext) -> Self { - let languages = LanguageRegistry::new(); + let fs = Arc::new(project::FakeFs::new()); + let languages = Arc::new(LanguageRegistry::new()); let client = Client::new(); let http_client = client::test::FakeHttpClient::new(|_| async move { Ok(client::http::ServerResponse::new(404)) @@ -359,17 +374,45 @@ impl WorkspaceParams { gpui::fonts::with_font_cache(cx.font_cache().clone(), || theme::Theme::default()); let settings = Settings::new("Courier", cx.font_cache(), Arc::new(theme)).unwrap(); let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx)); + let project = Project::local( + client.clone(), + user_store.clone(), + languages.clone(), + fs.clone(), + cx, + ); Self { + project, channel_list: cx .add_model(|cx| ChannelList::new(user_store.clone(), client.clone(), cx)), client, - fs: Arc::new(project::FakeFs::new()), - languages: Arc::new(languages), + fs, + languages, settings: watch::channel_with(settings).1, user_store, entry_openers: Arc::from([]), } } + + #[cfg(any(test, feature = "test-support"))] + pub fn local(app_state: &Arc, cx: &mut MutableAppContext) -> Self { + Self { + project: Project::local( + app_state.client.clone(), + app_state.user_store.clone(), + app_state.languages.clone(), + app_state.fs.clone(), + cx, + ), + client: app_state.client.clone(), + fs: app_state.fs.clone(), + languages: app_state.languages.clone(), + settings: app_state.settings.clone(), + user_store: app_state.user_store.clone(), + channel_list: app_state.channel_list.clone(), + entry_openers: app_state.entry_openers.clone(), + } + } } pub struct Workspace { @@ -392,14 +435,7 @@ pub struct Workspace { impl Workspace { pub fn new(params: &WorkspaceParams, cx: &mut ViewContext) -> Self { - let project = Project::local( - params.client.clone(), - params.user_store.clone(), - params.languages.clone(), - params.fs.clone(), - cx, - ); - cx.observe(&project, |_, _, cx| cx.notify()).detach(); + cx.observe(¶ms.project, |_, _, cx| cx.notify()).detach(); let pane = cx.add_view(|_| Pane::new(params.settings.clone())); let pane_id = pane.id(); @@ -445,7 +481,7 @@ impl Workspace { fs: params.fs.clone(), left_sidebar: Sidebar::new(Side::Left), right_sidebar: Sidebar::new(Side::Right), - project, + project: params.project.clone(), entry_openers: params.entry_openers.clone(), items: Default::default(), _observe_current_user, @@ -1258,20 +1294,6 @@ impl std::fmt::Debug for OpenParams { } } -impl<'a> From<&'a AppState> for WorkspaceParams { - fn from(state: &'a AppState) -> Self { - Self { - client: state.client.clone(), - fs: state.fs.clone(), - languages: state.languages.clone(), - settings: state.settings.clone(), - user_store: state.user_store.clone(), - channel_list: state.channel_list.clone(), - entry_openers: state.entry_openers.clone(), - } - } -} - fn open(action: &Open, cx: &mut MutableAppContext) { let app_state = action.0.clone(); cx.prompt_for_paths( @@ -1314,7 +1336,14 @@ pub fn open_paths( let workspace = existing.unwrap_or_else(|| { cx.add_window((app_state.build_window_options)(), |cx| { - (app_state.build_workspace)(&WorkspaceParams::from(app_state.as_ref()), cx) + let project = Project::local( + app_state.client.clone(), + app_state.user_store.clone(), + app_state.languages.clone(), + app_state.fs.clone(), + cx, + ); + (app_state.build_workspace)(project, &app_state, cx) }) .1 }); @@ -1326,9 +1355,49 @@ pub fn open_paths( }) } +pub fn join_project( + project_id: u64, + app_state: &Arc, + cx: &mut MutableAppContext, +) -> Task>> { + for window_id in cx.window_ids().collect::>() { + if let Some(workspace) = cx.root_view::(window_id) { + if workspace.read(cx).project().read(cx).remote_id() == Some(project_id) { + return Task::ready(Ok(workspace)); + } + } + } + + let app_state = app_state.clone(); + cx.spawn(|mut cx| async move { + let project = Project::remote( + project_id, + app_state.client.clone(), + app_state.user_store.clone(), + app_state.languages.clone(), + app_state.fs.clone(), + &mut cx, + ) + .await?; + let (_, workspace) = cx.update(|cx| { + cx.add_window((app_state.build_window_options)(), |cx| { + (app_state.build_workspace)(project, &app_state, cx) + }) + }); + Ok(workspace) + }) +} + fn open_new(app_state: &Arc, cx: &mut MutableAppContext) { let (window_id, workspace) = cx.add_window((app_state.build_window_options)(), |cx| { - (app_state.build_workspace)(&app_state.as_ref().into(), cx) + let project = Project::local( + app_state.client.clone(), + app_state.user_store.clone(), + app_state.languages.clone(), + app_state.fs.clone(), + cx, + ); + (app_state.build_workspace)(project, &app_state, cx) }); cx.dispatch_action(window_id, vec![workspace.id()], &OpenNew(app_state.clone())); } diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 1b578598d134a3ddd7ff64caed911a1f6738271e..c676523554feb1b2568592ceba27b1e6410f7d0e 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -58,7 +58,6 @@ fn main() { editor::init(cx, &mut entry_openers); go_to_line::init(cx); file_finder::init(cx); - contacts_panel::init(cx); chat_panel::init(cx); project_panel::init(cx); diagnostics::init(cx); diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index ad543ee166e35bf1f27b771e0d963ccd7932bd0f..713a9a3cac2d3e2bee2aa9bfd701ba5b920c99c2 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -14,9 +14,10 @@ use gpui::{ geometry::vector::vec2f, keymap::Binding, platform::{WindowBounds, WindowOptions}, - ViewContext, + ModelHandle, ViewContext, }; pub use lsp; +use project::Project; pub use project::{self, fs}; use project_panel::ProjectPanel; use std::sync::Arc; @@ -48,27 +49,39 @@ pub fn init(app_state: &Arc, cx: &mut gpui::MutableAppContext) { ]) } -pub fn build_workspace(params: &WorkspaceParams, cx: &mut ViewContext) -> Workspace { - let mut workspace = Workspace::new(params, cx); +pub fn build_workspace( + project: ModelHandle, + app_state: &Arc, + cx: &mut ViewContext, +) -> Workspace { + let workspace_params = WorkspaceParams { + project, + client: app_state.client.clone(), + fs: app_state.fs.clone(), + languages: app_state.languages.clone(), + settings: app_state.settings.clone(), + user_store: app_state.user_store.clone(), + channel_list: app_state.channel_list.clone(), + entry_openers: app_state.entry_openers.clone(), + }; + let mut workspace = Workspace::new(&workspace_params, cx); let project = workspace.project().clone(); workspace.left_sidebar_mut().add_item( "icons/folder-tree-16.svg", - ProjectPanel::new(project, params.settings.clone(), cx).into(), + ProjectPanel::new(project, app_state.settings.clone(), cx).into(), ); workspace.right_sidebar_mut().add_item( "icons/user-16.svg", - cx.add_view(|cx| { - ContactsPanel::new(params.user_store.clone(), params.settings.clone(), cx) - }) - .into(), + cx.add_view(|cx| ContactsPanel::new(app_state.clone(), cx)) + .into(), ); workspace.right_sidebar_mut().add_item( "icons/comment-16.svg", cx.add_view(|cx| { ChatPanel::new( - params.client.clone(), - params.channel_list.clone(), - params.settings.clone(), + app_state.client.clone(), + app_state.channel_list.clone(), + app_state.settings.clone(), cx, ) }) @@ -76,9 +89,9 @@ pub fn build_workspace(params: &WorkspaceParams, cx: &mut ViewContext ); let diagnostic = - cx.add_view(|_| editor::items::DiagnosticMessage::new(params.settings.clone())); + cx.add_view(|_| editor::items::DiagnosticMessage::new(app_state.settings.clone())); let cursor_position = - cx.add_view(|_| editor::items::CursorPosition::new(params.settings.clone())); + cx.add_view(|_| editor::items::CursorPosition::new(app_state.settings.clone())); workspace.status_bar().update(cx, |status_bar, cx| { status_bar.add_left_item(diagnostic, cx); status_bar.add_right_item(cursor_position, cx); @@ -225,8 +238,8 @@ mod tests { }), ) .await; - - let (_, workspace) = cx.add_window(|cx| Workspace::new(&app_state.as_ref().into(), cx)); + let params = cx.update(|cx| WorkspaceParams::local(&app_state, cx)); + let (_, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx)); workspace .update(&mut cx, |workspace, cx| { workspace.add_worktree(Path::new("/root"), cx) @@ -340,7 +353,8 @@ mod tests { fs.insert_file("/dir1/a.txt", "".into()).await.unwrap(); fs.insert_file("/dir2/b.txt", "".into()).await.unwrap(); - let (_, workspace) = cx.add_window(|cx| Workspace::new(&app_state.as_ref().into(), cx)); + let params = cx.update(|cx| WorkspaceParams::local(&app_state, cx)); + let (_, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx)); workspace .update(&mut cx, |workspace, cx| { workspace.add_worktree("/dir1".as_ref(), cx) @@ -406,8 +420,8 @@ mod tests { let fs = app_state.fs.as_fake(); fs.insert_tree("/root", json!({ "a.txt": "" })).await; - let (window_id, workspace) = - cx.add_window(|cx| Workspace::new(&app_state.as_ref().into(), cx)); + let params = cx.update(|cx| WorkspaceParams::local(&app_state, cx)); + let (window_id, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx)); workspace .update(&mut cx, |workspace, cx| { workspace.add_worktree(Path::new("/root"), cx) @@ -453,7 +467,7 @@ mod tests { async fn test_open_and_save_new_file(mut cx: gpui::TestAppContext) { let app_state = cx.update(test_app_state); app_state.fs.as_fake().insert_dir("/root").await.unwrap(); - let params = app_state.as_ref().into(); + let params = cx.update(|cx| WorkspaceParams::local(&app_state, cx)); let (window_id, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx)); workspace .update(&mut cx, |workspace, cx| { @@ -570,7 +584,7 @@ mod tests { ) { let app_state = cx.update(test_app_state); app_state.fs.as_fake().insert_dir("/root").await.unwrap(); - let params = app_state.as_ref().into(); + let params = cx.update(|cx| WorkspaceParams::local(&app_state, cx)); let (window_id, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx)); // Create a new untitled buffer @@ -628,8 +642,8 @@ mod tests { ) .await; - let (window_id, workspace) = - cx.add_window(|cx| Workspace::new(&app_state.as_ref().into(), cx)); + let params = cx.update(|cx| WorkspaceParams::local(&app_state, cx)); + let (window_id, workspace) = cx.add_window(|cx| Workspace::new(¶ms, cx)); workspace .update(&mut cx, |workspace, cx| { workspace.add_worktree(Path::new("/root"), cx) From 89c0b358a75c8bf17a0d5ae5540bd8d3fb5392be Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 21 Dec 2021 12:45:20 +0100 Subject: [PATCH 155/196] Allow sharing/unsharing of projects --- crates/project/src/project.rs | 18 ++++++++-- crates/workspace/src/workspace.rs | 44 ++++++++++++++++++++++++ crates/zed/assets/icons/broadcast-24.svg | 6 ++++ 3 files changed, 65 insertions(+), 3 deletions(-) create mode 100644 crates/zed/assets/icons/broadcast-24.svg diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index b805006b936391c4f89675e621aad1935574e009..34706f111886f5aa5313dd7dd8b7244c65bfb54f 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -357,6 +357,7 @@ impl Project { for task in tasks { task.await?; } + this.update(&mut cx, |_, cx| cx.notify()); Ok(()) }) } @@ -371,7 +372,7 @@ impl Project { .. } = &mut this.client_state { - *is_shared = true; + *is_shared = false; remote_id_rx .borrow() .ok_or_else(|| anyhow!("no project id")) @@ -381,7 +382,10 @@ impl Project { })?; rpc.send(proto::UnshareProject { project_id }).await?; - + this.update(&mut cx, |this, cx| { + this.collaborators.clear(); + cx.notify() + }); Ok(()) }) } @@ -396,6 +400,13 @@ impl Project { } } + pub fn is_local(&self) -> bool { + match &self.client_state { + ProjectClientState::Local { .. } => true, + ProjectClientState::Remote { .. } => false, + } + } + pub fn open_buffer( &self, path: ProjectPath, @@ -408,7 +419,7 @@ impl Project { } } - fn is_shared(&self) -> bool { + pub fn is_shared(&self) -> bool { match &self.client_state { ProjectClientState::Local { is_shared, .. } => *is_shared, ProjectClientState::Remote { .. } => false, @@ -512,6 +523,7 @@ impl Project { } = &mut self.client_state { *sharing_has_stopped = true; + self.collaborators.clear(); cx.notify(); Ok(()) } else { diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 7dfe81f84ea7746276813a520936d3ac07cb13a8..fdce6f6ff21cdeafde981f67dfad04a776425491 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -40,6 +40,7 @@ use theme::{Theme, ThemeRegistry}; action!(Open, Arc); action!(OpenNew, Arc); action!(OpenPaths, OpenParams); +action!(ToggleShare); action!(JoinProject, JoinProjectParams); action!(Save); action!(DebugElements); @@ -56,6 +57,7 @@ pub fn init(cx: &mut MutableAppContext) { join_project(action.0.project_id, &action.0.app_state, cx).detach(); }); + cx.add_action(Workspace::toggle_share); cx.add_action(Workspace::save_active_item); cx.add_action(Workspace::debug_elements); cx.add_action(Workspace::toggle_sidebar_item); @@ -992,6 +994,18 @@ impl Workspace { &self.active_pane } + fn toggle_share(&mut self, _: &ToggleShare, cx: &mut ViewContext) { + self.project.update(cx, |project, cx| { + if project.is_local() { + if project.is_shared() { + project.unshare(cx).detach(); + } else { + project.share(cx).detach(); + } + } + }); + } + fn render_connection_status(&self) -> Option { let theme = &self.settings.borrow().theme; match &*self.client.status().borrow() { @@ -1043,6 +1057,7 @@ impl Workspace { .with_child( Align::new( Flex::row() + .with_children(self.render_share_icon(cx)) .with_children(self.render_collaborators(theme, cx)) .with_child(self.render_avatar( self.user_store.read(cx).current_user().as_ref(), @@ -1133,6 +1148,35 @@ impl Workspace { .boxed() } } + + fn render_share_icon(&self, cx: &mut RenderContext) -> Option { + if self.project().read(cx).is_local() && self.client.user_id().is_some() { + enum Share {} + + let color = if self.project().read(cx).is_shared() { + Color::green() + } else { + Color::red() + }; + Some( + MouseEventHandler::new::(0, cx, |_, _| { + Align::new( + ConstrainedBox::new( + Svg::new("icons/broadcast-24.svg").with_color(color).boxed(), + ) + .with_width(24.) + .boxed(), + ) + .boxed() + }) + .with_cursor_style(CursorStyle::PointingHand) + .on_click(|cx| cx.dispatch_action(ToggleShare)) + .boxed(), + ) + } else { + None + } + } } impl Entity for Workspace { diff --git a/crates/zed/assets/icons/broadcast-24.svg b/crates/zed/assets/icons/broadcast-24.svg new file mode 100644 index 0000000000000000000000000000000000000000..391528cdc7fa6ee815e9e94e65c24617a932c2af --- /dev/null +++ b/crates/zed/assets/icons/broadcast-24.svg @@ -0,0 +1,6 @@ + + + + + + From 99317bbd6204417e256334180f2478fafa86e04e Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 21 Dec 2021 14:51:09 +0100 Subject: [PATCH 156/196] Delete unit test Sharing/unsharing is already exercised via the integration tests. --- crates/project/src/worktree.rs | 56 ---------------------------------- 1 file changed, 56 deletions(-) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 45aa357ded14950ce826e280447f8a672a9b7867..31f4fe26937ff5faf268e2aa150e08a409caa569 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -3274,62 +3274,6 @@ mod tests { }); } - // #[gpui::test] - // async fn test_open_and_share_worktree(mut cx: gpui::TestAppContext) { - // let user_id = 100; - // let mut client = Client::new(); - // let server = FakeServer::for_client(user_id, &mut client, &cx).await; - // let user_store = server.build_user_store(client.clone(), &mut cx).await; - - // let fs = Arc::new(FakeFs::new()); - // fs.insert_tree( - // "/path", - // json!({ - // "to": { - // "the-dir": { - // ".zed.toml": r#"collaborators = ["friend-1", "friend-2"]"#, - // "a.txt": "a-contents", - // }, - // }, - // }), - // ) - // .await; - - // let worktree = Worktree::open_local( - // client.clone(), - // user_store, - // "/path/to/the-dir".as_ref(), - // fs, - // Default::default(), - // &mut cx.to_async(), - // ) - // .await - // .unwrap(); - - // let open_worktree = server.receive::().await.unwrap(); - // assert_eq!( - // open_worktree.payload, - // proto::OpenWorktree { - // root_name: "the-dir".to_string(), - // authorized_logins: vec!["friend-1".to_string(), "friend-2".to_string()], - // } - // ); - - // server - // .respond( - // open_worktree.receipt(), - // proto::OpenWorktreeResponse { worktree_id: 5 }, - // ) - // .await; - // let remote_id = worktree - // .update(&mut cx, |tree, _| tree.as_local().unwrap().next_remote_id()) - // .await; - // assert_eq!(remote_id, Some(5)); - - // cx.update(move |_| drop(worktree)); - // server.receive::().await.unwrap(); - // } - #[gpui::test] async fn test_buffer_deduping(mut cx: gpui::TestAppContext) { let user_id = 100; From 8534a9cc412abdae85a2fab505c41a1aa66aeb5d Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 21 Dec 2021 16:38:18 +0100 Subject: [PATCH 157/196] Don't insert headers in `MultiBuffer` This lays the groundwork to insert headers in the block map instead. --- crates/editor/src/display_map.rs | 3 +- crates/editor/src/multi_buffer.rs | 537 +++++++++--------------------- crates/text/src/point.rs | 2 +- crates/text/src/point_utf16.rs | 8 + 4 files changed, 165 insertions(+), 385 deletions(-) diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index e1f7f473a864545ee9c53a1f35e270d336f620ce..494f662759c791bddc8667ad337cb75a0baf00c0 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -348,11 +348,12 @@ impl DisplaySnapshot { &'a self, rows: Range, ) -> impl 'a + Iterator, RenderHeaderFn)> { + todo!(); let start_row = DisplayPoint::new(rows.start, 0).to_point(self).row; let end_row = DisplayPoint::new(rows.end, 0).to_point(self).row; self.buffer_snapshot .excerpt_headers_in_range(start_row..end_row) - .map(move |(rows, render)| { + .map(move |(row, header_height, render)| { let start_row = Point::new(rows.start, 0).to_display_point(self).row(); let end_row = Point::new(rows.end, 0).to_display_point(self).row(); (start_row..end_row, render) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 7f0b2ea736cf8bb70a3e693dccf566c479b628ad..a642778f42bde5b512442c05ff211dc72dace908 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -114,7 +114,6 @@ struct ExcerptSummary { } pub struct MultiBufferRows<'a> { - header_height: u32, buffer_row_range: Range, excerpts: Cursor<'a, Excerpt, Point>, } @@ -134,13 +133,11 @@ pub struct MultiBufferBytes<'a> { } struct ExcerptChunks<'a> { - header_height: usize, content_chunks: BufferChunks<'a>, footer_height: usize, } struct ExcerptBytes<'a> { - header_height: usize, content_bytes: language::rope::Bytes<'a>, footer_height: usize, } @@ -326,8 +323,7 @@ impl MultiBuffer { cursor.prev(&()); } let start_excerpt = cursor.item().expect("start offset out of bounds"); - let start_overshoot = - (start - cursor.start()).saturating_sub(start_excerpt.header_height as usize); + let start_overshoot = start - cursor.start(); let buffer_start = start_excerpt.range.start.to_offset(&start_excerpt.buffer) + start_overshoot; @@ -336,8 +332,7 @@ impl MultiBuffer { cursor.prev(&()); } let end_excerpt = cursor.item().expect("end offset out of bounds"); - let end_overshoot = - (end - cursor.start()).saturating_sub(end_excerpt.header_height as usize); + let end_overshoot = end - cursor.start(); let buffer_end = end_excerpt.range.start.to_offset(&end_excerpt.buffer) + end_overshoot; if start_excerpt.id == end_excerpt.id { @@ -749,10 +744,8 @@ impl MultiBuffer { old_excerpt.range.clone(), ) .map(|mut edit| { - let excerpt_old_start = - cursor.start().1 + old_excerpt.header_height as usize; - let excerpt_new_start = new_excerpts.summary().text.bytes - + old_excerpt.header_height as usize; + let excerpt_old_start = cursor.start().1; + let excerpt_new_start = new_excerpts.summary().text.bytes; edit.old.start += excerpt_old_start; edit.old.end += excerpt_old_start; edit.new.start += excerpt_new_start; @@ -831,14 +824,12 @@ impl MultiBufferSnapshot { pub fn excerpt_headers_in_range<'a>( &'a self, range: Range, - ) -> impl 'a + Iterator, RenderHeaderFn)> { + ) -> impl 'a + Iterator { let mut cursor = self.excerpts.cursor::(); cursor.seek(&Point::new(range.start, 0), Bias::Right, &()); - if let Some(excerpt) = cursor.item() { - if range.start >= cursor.start().row + excerpt.header_height as u32 { - cursor.next(&()); - } + if cursor.item().is_some() && range.start > cursor.start().row { + cursor.next(&()); } iter::from_fn(move || { @@ -849,9 +840,8 @@ impl MultiBufferSnapshot { if let Some(render) = excerpt.render_header.clone() { let start = cursor.start().row; - let end = start + excerpt.header_height as u32; cursor.next(&()); - return Some((start..end, render)); + return Some((start, excerpt.header_height, render)); } else { cursor.next(&()); } @@ -868,12 +858,9 @@ impl MultiBufferSnapshot { let mut cursor = self.excerpts.cursor::(); cursor.seek(&offset, Bias::Left, &()); let mut excerpt_chunks = cursor.item().map(|excerpt| { - let start_after_header = cursor.start() + excerpt.header_height as usize; let end_before_footer = cursor.start() + excerpt.text_summary.bytes; - let start = excerpt.range.start.to_offset(&excerpt.buffer); - let end = - start + (cmp::min(offset, end_before_footer).saturating_sub(start_after_header)); + let end = start + (cmp::min(offset, end_before_footer) - cursor.start()); excerpt.buffer.reversed_chunks_in_range(start..end) }); iter::from_fn(move || { @@ -888,11 +875,7 @@ impl MultiBufferSnapshot { } let excerpt = cursor.item().unwrap(); - if offset <= cursor.start() + excerpt.header_height as usize { - let header_height = offset - cursor.start(); - offset -= header_height; - Some(unsafe { str::from_utf8_unchecked(&NEWLINES[..header_height]) }) - } else if offset == cursor.end(&()) && excerpt.has_trailing_newline { + if offset == cursor.end(&()) && excerpt.has_trailing_newline { offset -= 1; Some("\n") } else { @@ -953,93 +936,48 @@ impl MultiBufferSnapshot { pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize { let mut cursor = self.excerpts.cursor::(); cursor.seek(&offset, Bias::Right, &()); - if let Some(excerpt) = cursor.item() { - let header_end = *cursor.start() + excerpt.header_height as usize; - if offset < header_end { - if bias == Bias::Left { - cursor.prev(&()); - if let Some(excerpt) = cursor.item() { - return *cursor.start() + excerpt.text_summary.bytes; - } - } - header_end - } else { - let excerpt_start = excerpt.range.start.to_offset(&excerpt.buffer); - let buffer_offset = excerpt - .buffer - .clip_offset(excerpt_start + (offset - header_end), bias); - let offset_in_excerpt = if buffer_offset > excerpt_start { - buffer_offset - excerpt_start - } else { - 0 - }; - header_end + offset_in_excerpt - } + let overshoot = if let Some(excerpt) = cursor.item() { + let excerpt_start = excerpt.range.start.to_offset(&excerpt.buffer); + let buffer_offset = excerpt + .buffer + .clip_offset(excerpt_start + (offset - cursor.start()), bias); + buffer_offset.saturating_sub(excerpt_start) } else { - self.excerpts.summary().text.bytes - } + 0 + }; + cursor.start() + overshoot } pub fn clip_point(&self, point: Point, bias: Bias) -> Point { let mut cursor = self.excerpts.cursor::(); cursor.seek(&point, Bias::Right, &()); - if let Some(excerpt) = cursor.item() { - let header_end = *cursor.start() + Point::new(excerpt.header_height as u32, 0); - if point < header_end { - if bias == Bias::Left { - cursor.prev(&()); - if let Some(excerpt) = cursor.item() { - return *cursor.start() + excerpt.text_summary.lines; - } - } - header_end - } else { - let excerpt_start = excerpt.range.start.to_point(&excerpt.buffer); - let buffer_point = excerpt - .buffer - .clip_point(excerpt_start + (point - header_end), bias); - let point_in_excerpt = if buffer_point > excerpt_start { - buffer_point - excerpt_start - } else { - Point::zero() - }; - header_end + point_in_excerpt - } + let overshoot = if let Some(excerpt) = cursor.item() { + let excerpt_start = excerpt.range.start.to_point(&excerpt.buffer); + let buffer_point = excerpt + .buffer + .clip_point(excerpt_start + (point - cursor.start()), bias); + buffer_point.saturating_sub(excerpt_start) } else { - self.excerpts.summary().text.lines - } + Point::zero() + }; + *cursor.start() + overshoot } pub fn clip_point_utf16(&self, point: PointUtf16, bias: Bias) -> PointUtf16 { let mut cursor = self.excerpts.cursor::(); cursor.seek(&point, Bias::Right, &()); - if let Some(excerpt) = cursor.item() { - let header_end = *cursor.start() + PointUtf16::new(excerpt.header_height as u32, 0); - if point < header_end { - if bias == Bias::Left { - cursor.prev(&()); - if let Some(excerpt) = cursor.item() { - return *cursor.start() + excerpt.text_summary.lines_utf16; - } - } - header_end - } else { - let excerpt_start = excerpt - .buffer - .offset_to_point_utf16(excerpt.range.start.to_offset(&excerpt.buffer)); - let buffer_point = excerpt - .buffer - .clip_point_utf16(excerpt_start + (point - header_end), bias); - let point_in_excerpt = if buffer_point > excerpt_start { - buffer_point - excerpt_start - } else { - PointUtf16::new(0, 0) - }; - header_end + point_in_excerpt - } + let overshoot = if let Some(excerpt) = cursor.item() { + let excerpt_start = excerpt + .buffer + .offset_to_point_utf16(excerpt.range.start.to_offset(&excerpt.buffer)); + let buffer_point = excerpt + .buffer + .clip_point_utf16(excerpt_start + (point - cursor.start()), bias); + buffer_point.saturating_sub(excerpt_start) } else { - self.excerpts.summary().text.lines_utf16 - } + PointUtf16::zero() + }; + *cursor.start() + overshoot } pub fn bytes_in_range<'a, T: ToOffset>(&'a self, range: Range) -> MultiBufferBytes<'a> { @@ -1067,7 +1005,6 @@ impl MultiBufferSnapshot { pub fn buffer_rows<'a>(&'a self, start_row: u32) -> MultiBufferRows<'a> { let mut result = MultiBufferRows { - header_height: 0, buffer_row_range: 0..0, excerpts: self.excerpts.cursor(), }; @@ -1097,19 +1034,12 @@ impl MultiBufferSnapshot { if let Some(excerpt) = cursor.item() { let (start_offset, start_point) = cursor.start(); let overshoot = offset - start_offset; - let header_height = excerpt.header_height as usize; - if overshoot < header_height { - *start_point + Point::new(overshoot as u32, 0) - } else { - let excerpt_start_offset = excerpt.range.start.to_offset(&excerpt.buffer); - let excerpt_start_point = excerpt.range.start.to_point(&excerpt.buffer); - let buffer_point = excerpt - .buffer - .offset_to_point(excerpt_start_offset + (overshoot - header_height)); - *start_point - + Point::new(header_height as u32, 0) - + (buffer_point - excerpt_start_point) - } + let excerpt_start_offset = excerpt.range.start.to_offset(&excerpt.buffer); + let excerpt_start_point = excerpt.range.start.to_point(&excerpt.buffer); + let buffer_point = excerpt + .buffer + .offset_to_point(excerpt_start_offset + overshoot); + *start_point + (buffer_point - excerpt_start_point) } else { self.excerpts.summary().text.lines } @@ -1121,18 +1051,12 @@ impl MultiBufferSnapshot { if let Some(excerpt) = cursor.item() { let (start_point, start_offset) = cursor.start(); let overshoot = point - start_point; - let header_height = Point::new(excerpt.header_height as u32, 0); - if overshoot < header_height { - start_offset + overshoot.row as usize - } else { - let excerpt_start_offset = excerpt.range.start.to_offset(&excerpt.buffer); - let excerpt_start_point = excerpt.range.start.to_point(&excerpt.buffer); - let buffer_offset = excerpt - .buffer - .point_to_offset(excerpt_start_point + (overshoot - header_height)); - *start_offset + excerpt.header_height as usize + buffer_offset - - excerpt_start_offset - } + let excerpt_start_offset = excerpt.range.start.to_offset(&excerpt.buffer); + let excerpt_start_point = excerpt.range.start.to_point(&excerpt.buffer); + let buffer_offset = excerpt + .buffer + .point_to_offset(excerpt_start_point + overshoot); + *start_offset + buffer_offset - excerpt_start_offset } else { self.excerpts.summary().text.bytes } @@ -1144,21 +1068,14 @@ impl MultiBufferSnapshot { if let Some(excerpt) = cursor.item() { let (start_point, start_offset) = cursor.start(); let overshoot = point - start_point; - let header_height = PointUtf16::new(excerpt.header_height as u32, 0); - if overshoot < header_height { - start_offset + overshoot.row as usize - } else { - let excerpt_start_offset = excerpt.range.start.to_offset(&excerpt.buffer); - let excerpt_start_point = excerpt - .buffer - .offset_to_point_utf16(excerpt.range.start.to_offset(&excerpt.buffer)); - let buffer_offset = excerpt - .buffer - .point_utf16_to_offset(excerpt_start_point + (overshoot - header_height)); - *start_offset - + excerpt.header_height as usize - + (buffer_offset - excerpt_start_offset) - } + let excerpt_start_offset = excerpt.range.start.to_offset(&excerpt.buffer); + let excerpt_start_point = excerpt + .buffer + .offset_to_point_utf16(excerpt.range.start.to_offset(&excerpt.buffer)); + let buffer_offset = excerpt + .buffer + .point_utf16_to_offset(excerpt_start_point + overshoot); + *start_offset + (buffer_offset - excerpt_start_offset) } else { self.excerpts.summary().text.bytes } @@ -1188,18 +1105,15 @@ impl MultiBufferSnapshot { cursor.seek(&Point::new(row, 0), Bias::Right, &()); if let Some(excerpt) = cursor.item() { let overshoot = row - cursor.start().row; - let header_height = excerpt.header_height as u32; - if overshoot >= header_height { - let excerpt_start = excerpt.range.start.to_point(&excerpt.buffer); - let excerpt_end = excerpt.range.end.to_point(&excerpt.buffer); - let buffer_row = excerpt_start.row + overshoot - header_height; - let line_start = Point::new(buffer_row, 0); - let line_end = Point::new(buffer_row, excerpt.buffer.line_len(buffer_row)); - return Some(( - &excerpt.buffer, - line_start.max(excerpt_start)..line_end.min(excerpt_end), - )); - } + let excerpt_start = excerpt.range.start.to_point(&excerpt.buffer); + let excerpt_end = excerpt.range.end.to_point(&excerpt.buffer); + let buffer_row = excerpt_start.row + overshoot; + let line_start = Point::new(buffer_row, 0); + let line_end = Point::new(buffer_row, excerpt.buffer.line_len(buffer_row)); + return Some(( + &excerpt.buffer, + line_start.max(excerpt_start)..line_end.min(excerpt_end), + )); } None } @@ -1222,31 +1136,15 @@ impl MultiBufferSnapshot { let mut cursor = self.excerpts.cursor::(); cursor.seek(&range.start, Bias::Right, &()); if let Some(excerpt) = cursor.item() { - let start_after_header = cursor.start() + excerpt.header_height as usize; - if range.start < start_after_header { - let header_len = cmp::min(range.end, start_after_header) - range.start; - summary.add_assign(&D::from_text_summary(&TextSummary { - bytes: header_len, - lines: Point::new(header_len as u32, 0), - lines_utf16: PointUtf16::new(header_len as u32, 0), - first_line_chars: 0, - last_line_chars: 0, - longest_row: 0, - longest_row_chars: 0, - })); - range.start = start_after_header; - range.end = cmp::max(range.start, range.end); - } - let mut end_before_newline = cursor.end(&()); if excerpt.has_trailing_newline { end_before_newline -= 1; } let excerpt_start = excerpt.range.start.to_offset(&excerpt.buffer); - let start_in_excerpt = excerpt_start + (range.start - start_after_header); + let start_in_excerpt = excerpt_start + (range.start - cursor.start()); let end_in_excerpt = - excerpt_start + (cmp::min(end_before_newline, range.end) - start_after_header); + excerpt_start + (cmp::min(end_before_newline, range.end) - cursor.start()); summary.add_assign( &excerpt .buffer @@ -1275,28 +1173,15 @@ impl MultiBufferSnapshot { &(), ))); if let Some(excerpt) = cursor.item() { - let start_after_header = cursor.start() + excerpt.header_height as usize; - let header_len = - cmp::min(range.end - cursor.start(), excerpt.header_height as usize); - summary.add_assign(&D::from_text_summary(&TextSummary { - bytes: header_len, - lines: Point::new(header_len as u32, 0), - lines_utf16: PointUtf16::new(header_len as u32, 0), - first_line_chars: 0, - last_line_chars: 0, - longest_row: 0, - longest_row_chars: 0, - })); - range.end = cmp::max(start_after_header, range.end); + range.end = cmp::max(*cursor.start(), range.end); let excerpt_start = excerpt.range.start.to_offset(&excerpt.buffer); - let end_in_excerpt = excerpt_start + (range.end - start_after_header); + let end_in_excerpt = excerpt_start + (range.end - cursor.start()); summary.add_assign( &excerpt .buffer .text_summary_for_range(excerpt_start..end_in_excerpt), ); - cursor.next(&()); } } @@ -1315,7 +1200,6 @@ impl MultiBufferSnapshot { let mut position = D::from_text_summary(&cursor.start().text); if let Some(excerpt) = cursor.item() { - position.add_summary(&excerpt.header_summary(), &()); if excerpt.id == anchor.excerpt_id { let excerpt_buffer_start = excerpt.range.start.summary::(&excerpt.buffer); let buffer_position = anchor.text_anchor.summary::(&excerpt.buffer); @@ -1351,9 +1235,8 @@ impl MultiBufferSnapshot { cursor.next(&()); } - let mut position = D::from_text_summary(&cursor.start().text); + let position = D::from_text_summary(&cursor.start().text); if let Some(excerpt) = cursor.item() { - position.add_summary(&excerpt.header_summary(), &()); if excerpt.id == *excerpt_id { let excerpt_buffer_start = excerpt.range.start.summary::(&excerpt.buffer); summaries.extend( @@ -1395,8 +1278,7 @@ impl MultiBufferSnapshot { cursor.prev(&()); } if let Some(excerpt) = cursor.item() { - let start_after_header = cursor.start().0 + excerpt.header_height as usize; - let mut overshoot = offset.saturating_sub(start_after_header); + let mut overshoot = offset.saturating_sub(cursor.start().0); if excerpt.has_trailing_newline && offset == cursor.end(&()).0 { overshoot -= 1; bias = Bias::Right; @@ -1458,14 +1340,12 @@ impl MultiBufferSnapshot { let excerpt_buffer_start = start_excerpt.range.start.to_offset(&start_excerpt.buffer); - let excerpt_buffer_end = excerpt_buffer_start + start_excerpt.text_summary.bytes - - start_excerpt.header_height as usize; + let excerpt_buffer_end = excerpt_buffer_start + start_excerpt.text_summary.bytes; - let start_after_header = cursor.start() + start_excerpt.header_height as usize; let start_in_buffer = - excerpt_buffer_start + range.start.saturating_sub(start_after_header); + excerpt_buffer_start + range.start.saturating_sub(*cursor.start()); let end_in_buffer = - excerpt_buffer_start + range.end.saturating_sub(start_after_header); + excerpt_buffer_start + range.end.saturating_sub(*cursor.start()); let (mut start_bracket_range, mut end_bracket_range) = start_excerpt .buffer .enclosing_bracket_ranges(start_in_buffer..end_in_buffer)?; @@ -1474,13 +1354,13 @@ impl MultiBufferSnapshot { && end_bracket_range.end < excerpt_buffer_end { start_bracket_range.start = - start_after_header + (start_bracket_range.start - excerpt_buffer_start); + cursor.start() + (start_bracket_range.start - excerpt_buffer_start); start_bracket_range.end = - start_after_header + (start_bracket_range.end - excerpt_buffer_start); + cursor.start() + (start_bracket_range.end - excerpt_buffer_start); end_bracket_range.start = - start_after_header + (end_bracket_range.start - excerpt_buffer_start); + cursor.start() + (end_bracket_range.start - excerpt_buffer_start); end_bracket_range.end = - start_after_header + (end_bracket_range.end - excerpt_buffer_start); + cursor.start() + (end_bracket_range.end - excerpt_buffer_start); Some((start_bracket_range, end_bracket_range)) } else { None @@ -1551,14 +1431,12 @@ impl MultiBufferSnapshot { let excerpt_buffer_start = start_excerpt.range.start.to_offset(&start_excerpt.buffer); - let excerpt_buffer_end = excerpt_buffer_start + start_excerpt.text_summary.bytes - - start_excerpt.header_height as usize; + let excerpt_buffer_end = excerpt_buffer_start + start_excerpt.text_summary.bytes; - let start_after_header = cursor.start() + start_excerpt.header_height as usize; let start_in_buffer = - excerpt_buffer_start + range.start.saturating_sub(start_after_header); + excerpt_buffer_start + range.start.saturating_sub(*cursor.start()); let end_in_buffer = - excerpt_buffer_start + range.end.saturating_sub(start_after_header); + excerpt_buffer_start + range.end.saturating_sub(*cursor.start()); let mut ancestor_buffer_range = start_excerpt .buffer .range_for_syntax_ancestor(start_in_buffer..end_in_buffer)?; @@ -1566,9 +1444,8 @@ impl MultiBufferSnapshot { cmp::max(ancestor_buffer_range.start, excerpt_buffer_start); ancestor_buffer_range.end = cmp::min(ancestor_buffer_range.end, excerpt_buffer_end); - let start = - start_after_header + (ancestor_buffer_range.start - excerpt_buffer_start); - let end = start_after_header + (ancestor_buffer_range.end - excerpt_buffer_start); + let start = cursor.start() + (ancestor_buffer_range.start - excerpt_buffer_start); + let end = cursor.start() + (ancestor_buffer_range.end - excerpt_buffer_start); Some(start..end) }) } @@ -1737,54 +1614,26 @@ impl Excerpt { render_header: Option, has_trailing_newline: bool, ) -> Self { - let mut text_summary = - buffer.text_summary_for_range::(range.to_offset(&buffer)); - if header_height > 0 { - text_summary.first_line_chars = 0; - text_summary.lines.row += header_height as u32; - text_summary.lines_utf16.row += header_height as u32; - text_summary.bytes += header_height as usize; - text_summary.longest_row += header_height as u32; - } Excerpt { id, + text_summary: buffer.text_summary_for_range::(range.to_offset(&buffer)), buffer_id, buffer, range, - text_summary, header_height, render_header, has_trailing_newline, } } - fn header_summary(&self) -> TextSummary { - TextSummary { - bytes: self.header_height as usize, - lines: Point::new(self.header_height as u32, 0), - lines_utf16: PointUtf16::new(self.header_height as u32, 0), - first_line_chars: 0, - last_line_chars: 0, - longest_row: 0, - longest_row_chars: 0, - } - } - fn chunks_in_range<'a>( &'a self, range: Range, theme: Option<&'a SyntaxTheme>, ) -> ExcerptChunks<'a> { let content_start = self.range.start.to_offset(&self.buffer); - let chunks_start = content_start + range.start.saturating_sub(self.header_height as usize); - let chunks_end = content_start - + cmp::min(range.end, self.text_summary.bytes) - .saturating_sub(self.header_height as usize); - - let header_height = cmp::min( - (self.header_height as usize).saturating_sub(range.start), - range.len(), - ); + let chunks_start = content_start + range.start; + let chunks_end = content_start + cmp::min(range.end, self.text_summary.bytes); let footer_height = if self.has_trailing_newline && range.start <= self.text_summary.bytes @@ -1798,7 +1647,6 @@ impl Excerpt { let content_chunks = self.buffer.chunks(chunks_start..chunks_end, theme); ExcerptChunks { - header_height, content_chunks, footer_height, } @@ -1806,16 +1654,8 @@ impl Excerpt { fn bytes_in_range(&self, range: Range) -> ExcerptBytes { let content_start = self.range.start.to_offset(&self.buffer); - let bytes_start = content_start + range.start.saturating_sub(self.header_height as usize); - let bytes_end = content_start - + cmp::min(range.end, self.text_summary.bytes) - .saturating_sub(self.header_height as usize); - - let header_height = cmp::min( - (self.header_height as usize).saturating_sub(range.start), - range.len(), - ); - + let bytes_start = content_start + range.start; + let bytes_end = content_start + cmp::min(range.end, self.text_summary.bytes); let footer_height = if self.has_trailing_newline && range.start <= self.text_summary.bytes && range.end > self.text_summary.bytes @@ -1824,11 +1664,9 @@ impl Excerpt { } else { 0 }; - let content_bytes = self.buffer.bytes_in_range(bytes_start..bytes_end); ExcerptBytes { - header_height, content_bytes, footer_height, } @@ -1860,7 +1698,6 @@ impl fmt::Debug for Excerpt { .field("buffer_id", &self.buffer_id) .field("range", &self.range) .field("text_summary", &self.text_summary) - .field("header_height", &self.header_height) .field("has_trailing_newline", &self.has_trailing_newline) .finish() } @@ -1935,7 +1772,6 @@ impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Option<&'a ExcerptId> { impl<'a> MultiBufferRows<'a> { pub fn seek(&mut self, row: u32) { - self.header_height = 0; self.buffer_row_range = 0..0; self.excerpts @@ -1952,13 +1788,8 @@ impl<'a> MultiBufferRows<'a> { if let Some(excerpt) = self.excerpts.item() { let overshoot = row - self.excerpts.start().row; let excerpt_start = excerpt.range.start.to_point(&excerpt.buffer).row; - let excerpt_header_height = excerpt.header_height as u32; - - self.header_height = excerpt_header_height.saturating_sub(overshoot); - self.buffer_row_range.start = - excerpt_start + overshoot.saturating_sub(excerpt_header_height); - self.buffer_row_range.end = - excerpt_start + excerpt.text_summary.lines.row + 1 - excerpt_header_height; + self.buffer_row_range.start = excerpt_start + overshoot; + self.buffer_row_range.end = excerpt_start + excerpt.text_summary.lines.row + 1; } } } @@ -1968,10 +1799,6 @@ impl<'a> Iterator for MultiBufferRows<'a> { fn next(&mut self) -> Option { loop { - if self.header_height > 0 { - self.header_height -= 1; - return Some(None); - } if !self.buffer_row_range.is_empty() { let row = Some(self.buffer_row_range.start); self.buffer_row_range.start += 1; @@ -1980,11 +1807,9 @@ impl<'a> Iterator for MultiBufferRows<'a> { self.excerpts.item()?; self.excerpts.next(&()); let excerpt = self.excerpts.item()?; - self.header_height = excerpt.header_height as u32; self.buffer_row_range.start = excerpt.range.start.to_point(&excerpt.buffer).row; self.buffer_row_range.end = - self.buffer_row_range.start + excerpt.text_summary.lines.row + 1 - - self.header_height; + self.buffer_row_range.start + excerpt.text_summary.lines.row + 1; } } } @@ -2078,12 +1903,6 @@ impl<'a> Iterator for ExcerptBytes<'a> { type Item = &'a [u8]; fn next(&mut self) -> Option { - if self.header_height > 0 { - let result = &NEWLINES[..self.header_height]; - self.header_height = 0; - return Some(result); - } - if let Some(chunk) = self.content_bytes.next() { if !chunk.is_empty() { return Some(chunk); @@ -2104,15 +1923,6 @@ impl<'a> Iterator for ExcerptChunks<'a> { type Item = Chunk<'a>; fn next(&mut self) -> Option { - if self.header_height > 0 { - let text = unsafe { str::from_utf8_unchecked(&NEWLINES[..self.header_height]) }; - self.header_height = 0; - return Some(Chunk { - text, - ..Default::default() - }); - } - if let Some(chunk) = self.content_chunks.next() { return Some(chunk); } @@ -2219,7 +2029,7 @@ mod tests { subscription.consume().into_inner(), [Edit { old: 0..0, - new: 0..12 + new: 0..10 }] ); @@ -2244,8 +2054,8 @@ mod tests { assert_eq!( subscription.consume().into_inner(), [Edit { - old: 12..12, - new: 12..28 + old: 10..10, + new: 10..22 }] ); @@ -2256,81 +2066,64 @@ mod tests { assert_eq!( snapshot.text(), concat!( - "\n", // Preserve newlines - "\n", // - "bbbb\n", // + "bbbb\n", // Preserve newlines "ccccc\n", // - "\n", // "ddd\n", // "eeee\n", // - "\n", // - "\n", // - "\n", // "jj" // ) ); assert_eq!( snapshot.buffer_rows(0).collect::>(), - &[ - None, - None, - Some(1), - Some(2), - None, - Some(3), - Some(4), - None, - None, - None, - Some(3) - ] + [Some(1), Some(2), Some(3), Some(4), Some(3)] ); assert_eq!( snapshot.buffer_rows(2).collect::>(), - &[ - Some(1), - Some(2), - None, - Some(3), - Some(4), - None, - None, - None, - Some(3) - ] + [Some(3), Some(4), Some(3)] ); - assert_eq!(snapshot.buffer_rows(10).collect::>(), &[Some(3)]); - assert_eq!(snapshot.buffer_rows(11).collect::>(), &[]); - assert_eq!(snapshot.buffer_rows(12).collect::>(), &[]); + assert_eq!(snapshot.buffer_rows(4).collect::>(), [Some(3)]); + assert_eq!(snapshot.buffer_rows(5).collect::>(), []); { let snapshot = multibuffer.read(cx).read(cx); assert_eq!( snapshot .excerpt_headers_in_range(0..snapshot.max_point().row + 1) - .map(|(rows, render)| (rows, render(cx).name().unwrap().to_string())) + .map(|(start_row, header_height, render)| ( + start_row, + header_height, + render(cx).name().unwrap().to_string() + )) .collect::>(), &[ - (0..2, "header 1".into()), - (4..5, "header 2".into()), - (7..10, "header 3".into()) + (0, 2, "header 1".into()), + (2, 1, "header 2".into()), + (4, 3, "header 3".into()) ] ); assert_eq!( snapshot - .excerpt_headers_in_range(1..5) - .map(|(rows, render)| (rows, render(cx).name().unwrap().to_string())) + .excerpt_headers_in_range(1..4) + .map(|(start_row, header_height, render)| ( + start_row, + header_height, + render(cx).name().unwrap().to_string() + )) .collect::>(), - &[(0..2, "header 1".into()), (4..5, "header 2".into())] + &[(2, 1, "header 2".into())] ); assert_eq!( snapshot - .excerpt_headers_in_range(2..8) - .map(|(rows, render)| (rows, render(cx).name().unwrap().to_string())) + .excerpt_headers_in_range(2..5) + .map(|(start_row, header_height, render)| ( + start_row, + header_height, + render(cx).name().unwrap().to_string() + )) .collect::>(), - &[(4..5, "header 2".into()), (7..10, "header 3".into())] + &[(2, 1, "header 2".into()), (4, 3, "header 3".into())] ); } @@ -2348,17 +2141,11 @@ mod tests { assert_eq!( multibuffer.read(cx).snapshot(cx).text(), concat!( - "\n", // Preserve newlines - "\n", // - "bbbb\n", // + "bbbb\n", // Preserve newlines "c\n", // "cc\n", // - "\n", // "ddd\n", // "eeee\n", // - "\n", // - "\n", // - "\n", // "jj" // ) ); @@ -2366,43 +2153,32 @@ mod tests { assert_eq!( subscription.consume().into_inner(), [Edit { - old: 8..10, - new: 8..9 + old: 6..8, + new: 6..7 }] ); + // bbbb\nc\ncc\nddd\neeee\njj let multibuffer = multibuffer.read(cx).snapshot(cx); assert_eq!( - multibuffer.clip_point(Point::new(0, 0), Bias::Left), - Point::new(2, 0) - ); - assert_eq!( - multibuffer.clip_point(Point::new(0, 0), Bias::Right), - Point::new(2, 0) - ); - assert_eq!( - multibuffer.clip_point(Point::new(1, 0), Bias::Left), - Point::new(2, 0) + multibuffer.clip_point(Point::new(0, 5), Bias::Left), + Point::new(0, 4) ); assert_eq!( - multibuffer.clip_point(Point::new(1, 0), Bias::Right), - Point::new(2, 0) + multibuffer.clip_point(Point::new(0, 5), Bias::Right), + Point::new(0, 4) ); assert_eq!( - multibuffer.clip_point(Point::new(8, 0), Bias::Left), - Point::new(7, 4) + multibuffer.clip_point(Point::new(5, 1), Bias::Right), + Point::new(5, 1) ); assert_eq!( - multibuffer.clip_point(Point::new(8, 0), Bias::Right), - Point::new(11, 0) + multibuffer.clip_point(Point::new(5, 2), Bias::Right), + Point::new(5, 2) ); assert_eq!( - multibuffer.clip_point(Point::new(9, 0), Bias::Left), - Point::new(7, 4) - ); - assert_eq!( - multibuffer.clip_point(Point::new(9, 0), Bias::Right), - Point::new(11, 0) + multibuffer.clip_point(Point::new(5, 3), Bias::Right), + Point::new(5, 2) ); } @@ -2464,12 +2240,12 @@ mod tests { }); let old_snapshot = multibuffer.read(cx).snapshot(cx); - assert_eq!(old_snapshot.anchor_before(0).to_offset(&old_snapshot), 1); - assert_eq!(old_snapshot.anchor_after(0).to_offset(&old_snapshot), 1); - assert_eq!(Anchor::min().to_offset(&old_snapshot), 1); - assert_eq!(Anchor::min().to_offset(&old_snapshot), 1); - assert_eq!(Anchor::max().to_offset(&old_snapshot), 12); - assert_eq!(Anchor::max().to_offset(&old_snapshot), 12); + assert_eq!(old_snapshot.anchor_before(0).to_offset(&old_snapshot), 0); + assert_eq!(old_snapshot.anchor_after(0).to_offset(&old_snapshot), 0); + assert_eq!(Anchor::min().to_offset(&old_snapshot), 0); + assert_eq!(Anchor::min().to_offset(&old_snapshot), 0); + assert_eq!(Anchor::max().to_offset(&old_snapshot), 10); + assert_eq!(Anchor::max().to_offset(&old_snapshot), 10); buffer_1.update(cx, |buffer, cx| { buffer.edit([0..0], "W", cx); @@ -2481,19 +2257,19 @@ mod tests { }); let new_snapshot = multibuffer.read(cx).snapshot(cx); - assert_eq!(old_snapshot.text(), "\nabcd\n\nefghi"); - assert_eq!(new_snapshot.text(), "\nWabcdX\n\nYefghiZ"); + assert_eq!(old_snapshot.text(), "abcd\nefghi"); + assert_eq!(new_snapshot.text(), "WabcdX\nYefghiZ"); - assert_eq!(old_snapshot.anchor_before(0).to_offset(&new_snapshot), 1); - assert_eq!(old_snapshot.anchor_after(0).to_offset(&new_snapshot), 2); - assert_eq!(old_snapshot.anchor_before(1).to_offset(&new_snapshot), 1); + assert_eq!(old_snapshot.anchor_before(0).to_offset(&new_snapshot), 0); + assert_eq!(old_snapshot.anchor_after(0).to_offset(&new_snapshot), 1); + assert_eq!(old_snapshot.anchor_before(1).to_offset(&new_snapshot), 2); assert_eq!(old_snapshot.anchor_after(1).to_offset(&new_snapshot), 2); assert_eq!(old_snapshot.anchor_before(2).to_offset(&new_snapshot), 3); assert_eq!(old_snapshot.anchor_after(2).to_offset(&new_snapshot), 3); - assert_eq!(old_snapshot.anchor_before(7).to_offset(&new_snapshot), 9); - assert_eq!(old_snapshot.anchor_after(7).to_offset(&new_snapshot), 10); - assert_eq!(old_snapshot.anchor_before(12).to_offset(&new_snapshot), 15); - assert_eq!(old_snapshot.anchor_after(12).to_offset(&new_snapshot), 16); + assert_eq!(old_snapshot.anchor_before(5).to_offset(&new_snapshot), 7); + assert_eq!(old_snapshot.anchor_after(5).to_offset(&new_snapshot), 8); + assert_eq!(old_snapshot.anchor_before(10).to_offset(&new_snapshot), 13); + assert_eq!(old_snapshot.anchor_after(10).to_offset(&new_snapshot), 14); } #[gpui::test(iterations = 100)] @@ -2564,15 +2340,10 @@ mod tests { let mut excerpt_starts = Vec::new(); let mut expected_text = String::new(); let mut expected_buffer_rows = Vec::new(); - for (buffer, range, header_height) in &expected_excerpts { + for (buffer, range, _) in &expected_excerpts { let buffer = buffer.read(cx); let buffer_range = range.to_offset(buffer); - for _ in 0..*header_height { - expected_text.push('\n'); - expected_buffer_rows.push(None); - } - excerpt_starts.push(TextSummary::from(expected_text.as_str())); expected_text.extend(buffer.text_for_range(buffer_range.clone())); expected_text.push('\n'); diff --git a/crates/text/src/point.rs b/crates/text/src/point.rs index f36357650f3c3a0e8012d7fadf94eeeee2533a7b..9eded9f44354a851a88e1e0ae073bf7a0e11c8cf 100644 --- a/crates/text/src/point.rs +++ b/crates/text/src/point.rs @@ -38,7 +38,7 @@ impl Point { pub fn saturating_sub(self, other: Self) -> Self { if self < other { - Point::zero() + Self::zero() } else { self - other } diff --git a/crates/text/src/point_utf16.rs b/crates/text/src/point_utf16.rs index 22b895a2c009b0d38ee8b82c9d1e5f1401578b8d..7b76ee377686f8cb9877de4cf44811799abcb320 100644 --- a/crates/text/src/point_utf16.rs +++ b/crates/text/src/point_utf16.rs @@ -26,6 +26,14 @@ impl PointUtf16 { pub fn is_zero(&self) -> bool { self.row == 0 && self.column == 0 } + + pub fn saturating_sub(self, other: Self) -> Self { + if self < other { + Self::zero() + } else { + self - other + } + } } impl<'a> Add<&'a Self> for PointUtf16 { From 91c786a8db1fa54212111eaf2d9025b06fbafe37 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 21 Dec 2021 17:38:03 +0100 Subject: [PATCH 158/196] WIP: Insert blocks in `BlockMap` for `MultiBuffer` headers --- crates/diagnostics/src/diagnostics.rs | 2 +- crates/editor/src/display_map.rs | 20 +----- crates/editor/src/display_map/block_map.rs | 42 +++++++++--- crates/editor/src/element.rs | 77 ++++++++-------------- 4 files changed, 62 insertions(+), 79 deletions(-) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index def194b72474fd06a33a5edf1d5f914e6a5a56f1..826376766af5d17bc8eadecf2a7358452f98ba4a 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -345,7 +345,7 @@ mod tests { view.update(cx, |view, cx| { view.populate_excerpts(buffer, cx); assert_eq!( - view.excerpts.read(cx).read(cx).text(), + view.editor.update(cx, |editor, cx| editor.display_text(cx)), concat!( "\n", // primary diagnostic message "\n", // filename diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 494f662759c791bddc8667ad337cb75a0baf00c0..469dac5802cf59f616b6873d5fa926ad0e3d6de4 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -3,9 +3,7 @@ mod fold_map; mod tab_map; mod wrap_map; -use crate::{ - multi_buffer::RenderHeaderFn, Anchor, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint, -}; +use crate::{Anchor, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint}; use block_map::{BlockMap, BlockPoint}; use fold_map::{FoldMap, ToFoldPoint as _}; use gpui::{fonts::FontId, Entity, ModelContext, ModelHandle}; @@ -344,22 +342,6 @@ impl DisplaySnapshot { self.blocks_snapshot.blocks_in_range(rows) } - pub fn excerpt_headers_in_range<'a>( - &'a self, - rows: Range, - ) -> impl 'a + Iterator, RenderHeaderFn)> { - todo!(); - let start_row = DisplayPoint::new(rows.start, 0).to_point(self).row; - let end_row = DisplayPoint::new(rows.end, 0).to_point(self).row; - self.buffer_snapshot - .excerpt_headers_in_range(start_row..end_row) - .map(move |(row, header_height, render)| { - let start_row = Point::new(rows.start, 0).to_display_point(self).row(); - let end_row = Point::new(rows.end, 0).to_display_point(self).row(); - (start_row..end_row, render) - }) - } - pub fn intersects_fold(&self, offset: T) -> bool { self.folds_snapshot.intersects_fold(offset) } diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 620ac97e06bc73bdf51ed91acf36584e70e92184..589ed36c3bc811891ae58b9916ff8094ce7f1038 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -227,8 +227,9 @@ impl BlockMap { } // Find the blocks within this edited region. - let new_start = wrap_snapshot.to_point(WrapPoint::new(new_start.0, 0), Bias::Left); - let start_anchor = buffer.anchor_before(new_start); + let new_buffer_start = + wrap_snapshot.to_point(WrapPoint::new(new_start.0, 0), Bias::Left); + let start_anchor = buffer.anchor_before(new_buffer_start); let start_block_ix = match self.blocks[last_block_ix..].binary_search_by(|probe| { probe .position @@ -238,11 +239,14 @@ impl BlockMap { }) { Ok(ix) | Err(ix) => last_block_ix + ix, }; + + let new_buffer_end; let end_block_ix = if new_end.0 > wrap_snapshot.max_point().row() { + new_buffer_end = wrap_snapshot.buffer_snapshot().max_point() + Point::new(1, 0); self.blocks.len() } else { - let new_end = wrap_snapshot.to_point(WrapPoint::new(new_end.0, 0), Bias::Left); - let end_anchor = buffer.anchor_before(new_end); + new_buffer_end = wrap_snapshot.to_point(WrapPoint::new(new_end.0, 0), Bias::Left); + let end_anchor = buffer.anchor_before(new_buffer_end); match self.blocks[start_block_ix..].binary_search_by(|probe| { probe .position @@ -254,7 +258,26 @@ impl BlockMap { } }; last_block_ix = end_block_ix; - blocks_in_edit.clear(); + + debug_assert!(blocks_in_edit.is_empty()); + blocks_in_edit.extend( + wrap_snapshot + .buffer_snapshot() + .excerpt_headers_in_range(new_buffer_start.row..new_buffer_end.row) + .map(|(start_row, header_height, render_header)| { + ( + start_row, + 0, + Arc::new(Block { + id: Default::default(), + position: Anchor::min(), + height: header_height, + render: Mutex::new(Arc::new(move |cx| render_header(cx))), + disposition: BlockDisposition::Above, + }), + ) + }), + ); blocks_in_edit.extend( self.blocks[start_block_ix..end_block_ix] .iter() @@ -268,22 +291,21 @@ impl BlockMap { } } let position = wrap_snapshot.from_point(position, Bias::Left); - (position.row(), column, block) + (position.row(), column, block.clone()) }), ); - blocks_in_edit - .sort_unstable_by_key(|(row, _, block)| (*row, block.disposition, block.id)); + blocks_in_edit.sort_by_key(|(row, _, block)| (*row, block.disposition, block.id)); // For each of these blocks, insert a new isomorphic transform preceding the block, // and then insert the block itself. - for (block_row, column, block) in blocks_in_edit.iter().copied() { + for (block_row, column, block) in blocks_in_edit.drain(..) { let insertion_row = match block.disposition { BlockDisposition::Above => block_row, BlockDisposition::Below => block_row + 1, }; let extent_before_block = insertion_row - new_transforms.summary().input_rows; push_isomorphic(&mut new_transforms, extent_before_block); - new_transforms.push(Transform::block(block.clone(), column), &()); + new_transforms.push(Transform::block(block, column), &()); } old_end = WrapRow(old_end.0.min(old_row_count)); diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 0878071cd30dd2eb6641321577bb69db2c3c2226..e790973c8c9e15cefcd8b191fc58b1bf6c9f65be 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -631,55 +631,34 @@ impl EditorElement { line_layouts: &[text_layout::Line], cx: &mut LayoutContext, ) -> Vec<(u32, ElementBox)> { - let mut blocks = Vec::new(); - - blocks.extend( - snapshot - .blocks_in_range(rows.clone()) - .map(|(start_row, block)| { - let anchor_row = block - .position() - .to_point(&snapshot.buffer_snapshot) - .to_display_point(snapshot) - .row(); - - let anchor_x = if rows.contains(&anchor_row) { - line_layouts[(anchor_row - rows.start) as usize] - .x_for_index(block.column() as usize) - } else { - layout_line(anchor_row, snapshot, style, cx.text_layout_cache) - .x_for_index(block.column() as usize) - }; - - let mut element = block.render(&BlockContext { cx, anchor_x }); - element.layout( - SizeConstraint { - min: Vector2F::zero(), - max: vec2f(text_width, block.height() as f32 * line_height), - }, - cx, - ); - (start_row, element) - }), - ); - - blocks.extend( - snapshot - .excerpt_headers_in_range(rows.clone()) - .map(|(rows, render)| { - let mut element = render(cx); - element.layout( - SizeConstraint { - min: Vector2F::zero(), - max: vec2f(text_width, rows.len() as f32 * line_height), - }, - cx, - ); - (rows.start, element) - }), - ); - - blocks + snapshot + .blocks_in_range(rows.clone()) + .map(|(start_row, block)| { + let anchor_row = block + .position() + .to_point(&snapshot.buffer_snapshot) + .to_display_point(snapshot) + .row(); + + let anchor_x = if rows.contains(&anchor_row) { + line_layouts[(anchor_row - rows.start) as usize] + .x_for_index(block.column() as usize) + } else { + layout_line(anchor_row, snapshot, style, cx.text_layout_cache) + .x_for_index(block.column() as usize) + }; + + let mut element = block.render(&BlockContext { cx, anchor_x }); + element.layout( + SizeConstraint { + min: Vector2F::zero(), + max: vec2f(text_width, block.height() as f32 * line_height), + }, + cx, + ); + (start_row, element) + }) + .collect() } } From eec1748dc7b5f3b560c7b8c2ec69be881c79b4b0 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 21 Dec 2021 18:31:13 +0100 Subject: [PATCH 159/196] Render excerpt headers using `DisplayMap::insert_blocks` Co-Authored-By: Max Brunsfeld --- crates/diagnostics/src/diagnostics.rs | 41 ++++--- crates/editor/src/display_map/block_map.rs | 23 +--- crates/editor/src/editor.rs | 18 +--- crates/editor/src/movement.rs | 47 ++++++-- crates/editor/src/multi_buffer.rs | 119 +-------------------- 5 files changed, 73 insertions(+), 175 deletions(-) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 826376766af5d17bc8eadecf2a7358452f98ba4a..e9efa4046a39ea44f6d0818b4b7cf9b879aa63b2 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -105,33 +105,40 @@ impl ProjectDiagnosticsEditor { let excerpt_start = Point::new(range.start.row.saturating_sub(1), 0); let excerpt_end = snapshot .clip_point(Point::new(range.end.row + 1, u32::MAX), Bias::Left); + let excerpt_id = excerpts.push_excerpt( + ExcerptProperties { + buffer: &buffer, + range: excerpt_start..excerpt_end, + }, + excerpts_cx, + ); - let mut excerpt = ExcerptProperties { - buffer: &buffer, - range: excerpt_start..excerpt_end, - header_height: 0, - render_header: None, - }; - + let header_position = (excerpt_id.clone(), language::Anchor::min()); if is_first_excerpt { let primary = &group.entries[group.primary_ix].diagnostic; let mut header = primary.clone(); header.message = primary.message.split('\n').next().unwrap().to_string(); - excerpt.header_height = 2; - excerpt.render_header = Some(diagnostic_header_renderer( - buffer.clone(), - header, - self.build_settings.clone(), - )); + blocks.push(BlockProperties { + position: header_position, + height: 2, + render: diagnostic_header_renderer( + buffer.clone(), + header, + self.build_settings.clone(), + ), + disposition: BlockDisposition::Above, + }); } else { - excerpt.header_height = 1; - excerpt.render_header = - Some(context_header_renderer(self.build_settings.clone())); + blocks.push(BlockProperties { + position: header_position, + height: 1, + render: context_header_renderer(self.build_settings.clone()), + disposition: BlockDisposition::Above, + }); } is_first_excerpt = false; - let excerpt_id = excerpts.push_excerpt(excerpt, excerpts_cx); for entry in &group.entries[*start_ix..ix] { let mut diagnostic = entry.diagnostic.clone(); if diagnostic.is_primary { diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 589ed36c3bc811891ae58b9916ff8094ce7f1038..5f879ef1638e08a5a14abee8eb6d36fcd3e40fac 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -240,12 +240,11 @@ impl BlockMap { Ok(ix) | Err(ix) => last_block_ix + ix, }; - let new_buffer_end; let end_block_ix = if new_end.0 > wrap_snapshot.max_point().row() { - new_buffer_end = wrap_snapshot.buffer_snapshot().max_point() + Point::new(1, 0); self.blocks.len() } else { - new_buffer_end = wrap_snapshot.to_point(WrapPoint::new(new_end.0, 0), Bias::Left); + let new_buffer_end = + wrap_snapshot.to_point(WrapPoint::new(new_end.0, 0), Bias::Left); let end_anchor = buffer.anchor_before(new_buffer_end); match self.blocks[start_block_ix..].binary_search_by(|probe| { probe @@ -260,24 +259,6 @@ impl BlockMap { last_block_ix = end_block_ix; debug_assert!(blocks_in_edit.is_empty()); - blocks_in_edit.extend( - wrap_snapshot - .buffer_snapshot() - .excerpt_headers_in_range(new_buffer_start.row..new_buffer_end.row) - .map(|(start_row, header_height, render_header)| { - ( - start_row, - 0, - Arc::new(Block { - id: Default::default(), - position: Anchor::min(), - height: header_height, - render: Mutex::new(Arc::new(move |cx| render_header(cx))), - disposition: BlockDisposition::Above, - }), - ) - }), - ); blocks_in_edit.extend( self.blocks[start_block_ix..end_block_ix] .iter() diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 8992772577fa76af7a0ae2ae7908286726cb5715..b2243a41516ac2ede06aa48ebbf64414d1377319 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -28,9 +28,7 @@ use language::{ TransactionId, }; pub use multi_buffer::{Anchor, ExcerptProperties, MultiBuffer}; -use multi_buffer::{ - AnchorRangeExt, MultiBufferChunks, MultiBufferSnapshot, RenderHeaderFn, ToOffset, ToPoint, -}; +use multi_buffer::{AnchorRangeExt, MultiBufferChunks, MultiBufferSnapshot, ToOffset, ToPoint}; use postage::watch; use serde::{Deserialize, Serialize}; use smallvec::SmallVec; @@ -3787,12 +3785,12 @@ pub fn diagnostic_header_renderer( buffer: ModelHandle, diagnostic: Diagnostic, build_settings: BuildSettings, -) -> RenderHeaderFn { +) -> RenderBlock { Arc::new(move |cx| { let settings = build_settings(cx); let mut text_style = settings.style.text.clone(); text_style.color = diagnostic_style(diagnostic.severity, true, &settings.style).text; - let file_path = if let Some(file) = buffer.read(cx).file() { + let file_path = if let Some(file) = buffer.read(&**cx).file() { file.path().to_string_lossy().to_string() } else { "untitled".to_string() @@ -3805,7 +3803,7 @@ pub fn diagnostic_header_renderer( }) } -pub fn context_header_renderer(build_settings: BuildSettings) -> RenderHeaderFn { +pub fn context_header_renderer(build_settings: BuildSettings) -> RenderBlock { Arc::new(move |cx| { let settings = build_settings(cx); let text_style = settings.style.text.clone(); @@ -5910,8 +5908,6 @@ mod tests { ExcerptProperties { buffer: &buffer, range: Point::new(0, 0)..Point::new(0, 4), - header_height: 0, - render_header: None, }, cx, ); @@ -5919,8 +5915,6 @@ mod tests { ExcerptProperties { buffer: &buffer, range: Point::new(1, 0)..Point::new(1, 4), - header_height: 0, - render_header: None, }, cx, ); @@ -5964,8 +5958,6 @@ mod tests { ExcerptProperties { buffer: &buffer, range: Point::new(0, 0)..Point::new(1, 4), - header_height: 0, - render_header: None, }, cx, ); @@ -5973,8 +5965,6 @@ mod tests { ExcerptProperties { buffer: &buffer, range: Point::new(1, 0)..Point::new(2, 4), - header_height: 0, - render_header: None, }, cx, ); diff --git a/crates/editor/src/movement.rs b/crates/editor/src/movement.rs index 00b5140a6cd6520d50d1dd4ac7fcd8b0f576c369..217b1e63e4f53dc1ac0766950bf9d31cf03bfa1c 100644 --- a/crates/editor/src/movement.rs +++ b/crates/editor/src/movement.rs @@ -238,8 +238,13 @@ fn char_kind(c: char) -> CharKind { #[cfg(test)] mod tests { use super::*; - use crate::{Buffer, DisplayMap, ExcerptProperties, MultiBuffer}; + use crate::{ + display_map::{BlockDisposition, BlockProperties}, + Buffer, DisplayMap, ExcerptProperties, MultiBuffer, + }; + use gpui::{elements::Empty, Element}; use language::Point; + use std::sync::Arc; #[gpui::test] fn test_move_up_and_down_with_excerpts(cx: &mut gpui::MutableAppContext) { @@ -250,31 +255,59 @@ mod tests { .unwrap(); let buffer = cx.add_model(|cx| Buffer::new(0, "abc\ndefg\nhijkl\nmn", cx)); + let mut excerpt1_header_position = None; + let mut excerpt2_header_position = None; let multibuffer = cx.add_model(|cx| { let mut multibuffer = MultiBuffer::new(0); - multibuffer.push_excerpt( + let excerpt1_id = multibuffer.push_excerpt( ExcerptProperties { buffer: &buffer, range: Point::new(0, 0)..Point::new(1, 4), - header_height: 2, - render_header: None, }, cx, ); - multibuffer.push_excerpt( + let excerpt2_id = multibuffer.push_excerpt( ExcerptProperties { buffer: &buffer, range: Point::new(2, 0)..Point::new(3, 2), - header_height: 3, - render_header: None, }, cx, ); + + excerpt1_header_position = Some( + multibuffer + .read(cx) + .anchor_in_excerpt(excerpt1_id, language::Anchor::min()), + ); + excerpt2_header_position = Some( + multibuffer + .read(cx) + .anchor_in_excerpt(excerpt2_id, language::Anchor::min()), + ); multibuffer }); let display_map = cx.add_model(|cx| DisplayMap::new(multibuffer, 2, font_id, 14.0, None, cx)); + display_map.update(cx, |display_map, cx| { + display_map.insert_blocks( + [ + BlockProperties { + position: excerpt1_header_position.unwrap(), + height: 2, + render: Arc::new(|_| Empty::new().boxed()), + disposition: BlockDisposition::Above, + }, + BlockProperties { + position: excerpt2_header_position.unwrap(), + height: 3, + render: Arc::new(|_| Empty::new().boxed()), + disposition: BlockDisposition::Above, + }, + ], + cx, + ) + }); let snapshot = display_map.update(cx, |map, cx| map.snapshot(cx)); assert_eq!(snapshot.text(), "\n\nabc\ndefg\n\n\n\nhijkl\nmn"); diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index a642778f42bde5b512442c05ff211dc72dace908..301499f144c2168195c58e6eea663f4a159e77d7 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -4,7 +4,7 @@ pub use anchor::{Anchor, AnchorRangeExt}; use anyhow::Result; use clock::ReplicaId; use collections::{HashMap, HashSet}; -use gpui::{AppContext, ElementBox, Entity, ModelContext, ModelHandle, Task}; +use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task}; use language::{ Buffer, BufferChunks, BufferSnapshot, Chunk, DiagnosticEntry, Event, File, Language, Selection, ToOffset as _, ToPoint as _, TransactionId, @@ -86,13 +86,9 @@ pub struct MultiBufferSnapshot { has_conflict: bool, } -pub type RenderHeaderFn = Arc ElementBox>; - pub struct ExcerptProperties<'a, T> { pub buffer: &'a ModelHandle, pub range: Range, - pub header_height: u8, - pub render_header: Option, } #[derive(Clone)] @@ -101,9 +97,7 @@ struct Excerpt { buffer_id: usize, buffer: BufferSnapshot, range: Range, - render_header: Option, text_summary: TextSummary, - header_height: u8, has_trailing_newline: bool, } @@ -167,8 +161,6 @@ impl MultiBuffer { ExcerptProperties { buffer: &buffer, range: text::Anchor::min()..text::Anchor::max(), - header_height: 0, - render_header: None, }, cx, ); @@ -229,8 +221,6 @@ impl MultiBuffer { ExcerptProperties { buffer: buffer_handle, range: start_ix..end_ix, - header_height, - render_header: None, }, cx, ); @@ -618,15 +608,7 @@ impl MultiBuffer { ); let id = ExcerptId::between(&prev_id.unwrap_or(ExcerptId::min()), &ExcerptId::max()); - let excerpt = Excerpt::new( - id.clone(), - buffer.id(), - buffer_snapshot, - range, - props.header_height, - props.render_header, - false, - ); + let excerpt = Excerpt::new(id.clone(), buffer.id(), buffer_snapshot, range, false); snapshot.excerpts.push(excerpt, &()); self.buffers .entry(props.buffer.id()) @@ -759,8 +741,6 @@ impl MultiBuffer { buffer_state.buffer.id(), buffer.snapshot(), old_excerpt.range.clone(), - old_excerpt.header_height, - old_excerpt.render_header.clone(), old_excerpt.has_trailing_newline, ); } else { @@ -821,35 +801,6 @@ impl MultiBufferSnapshot { .collect() } - pub fn excerpt_headers_in_range<'a>( - &'a self, - range: Range, - ) -> impl 'a + Iterator { - let mut cursor = self.excerpts.cursor::(); - cursor.seek(&Point::new(range.start, 0), Bias::Right, &()); - - if cursor.item().is_some() && range.start > cursor.start().row { - cursor.next(&()); - } - - iter::from_fn(move || { - while let Some(excerpt) = cursor.item() { - if cursor.start().row >= range.end { - break; - } - - if let Some(render) = excerpt.render_header.clone() { - let start = cursor.start().row; - cursor.next(&()); - return Some((start, excerpt.header_height, render)); - } else { - cursor.next(&()); - } - } - None - }) - } - pub fn reversed_chars_at<'a, T: ToOffset>( &'a self, position: T, @@ -1610,8 +1561,6 @@ impl Excerpt { buffer_id: usize, buffer: BufferSnapshot, range: Range, - header_height: u8, - render_header: Option, has_trailing_newline: bool, ) -> Self { Excerpt { @@ -1620,8 +1569,6 @@ impl Excerpt { buffer_id, buffer, range, - header_height, - render_header, has_trailing_newline, } } @@ -1974,7 +1921,7 @@ impl ToPoint for Point { #[cfg(test)] mod tests { use super::*; - use gpui::{elements::Empty, Element, MutableAppContext}; + use gpui::MutableAppContext; use language::{Buffer, Rope}; use rand::prelude::*; use std::env; @@ -2020,8 +1967,6 @@ mod tests { ExcerptProperties { buffer: &buffer_1, range: Point::new(1, 2)..Point::new(2, 5), - header_height: 2, - render_header: Some(Arc::new(|_| Empty::new().named("header 1"))), }, cx, ); @@ -2037,8 +1982,6 @@ mod tests { ExcerptProperties { buffer: &buffer_1, range: Point::new(3, 3)..Point::new(4, 4), - header_height: 1, - render_header: Some(Arc::new(|_| Empty::new().named("header 2"))), }, cx, ); @@ -2046,8 +1989,6 @@ mod tests { ExcerptProperties { buffer: &buffer_2, range: Point::new(3, 1)..Point::new(3, 3), - header_height: 3, - render_header: Some(Arc::new(|_| Empty::new().named("header 3"))), }, cx, ); @@ -2084,49 +2025,6 @@ mod tests { assert_eq!(snapshot.buffer_rows(4).collect::>(), [Some(3)]); assert_eq!(snapshot.buffer_rows(5).collect::>(), []); - { - let snapshot = multibuffer.read(cx).read(cx); - assert_eq!( - snapshot - .excerpt_headers_in_range(0..snapshot.max_point().row + 1) - .map(|(start_row, header_height, render)| ( - start_row, - header_height, - render(cx).name().unwrap().to_string() - )) - .collect::>(), - &[ - (0, 2, "header 1".into()), - (2, 1, "header 2".into()), - (4, 3, "header 3".into()) - ] - ); - - assert_eq!( - snapshot - .excerpt_headers_in_range(1..4) - .map(|(start_row, header_height, render)| ( - start_row, - header_height, - render(cx).name().unwrap().to_string() - )) - .collect::>(), - &[(2, 1, "header 2".into())] - ); - - assert_eq!( - snapshot - .excerpt_headers_in_range(2..5) - .map(|(start_row, header_height, render)| ( - start_row, - header_height, - render(cx).name().unwrap().to_string() - )) - .collect::>(), - &[(2, 1, "header 2".into()), (4, 3, "header 3".into())] - ); - } - buffer_1.update(cx, |buffer, cx| { buffer.edit( [ @@ -2158,7 +2056,6 @@ mod tests { }] ); - // bbbb\nc\ncc\nddd\neeee\njj let multibuffer = multibuffer.read(cx).snapshot(cx); assert_eq!( multibuffer.clip_point(Point::new(0, 5), Bias::Left), @@ -2222,8 +2119,6 @@ mod tests { ExcerptProperties { buffer: &buffer_1, range: 0..4, - header_height: 1, - render_header: None, }, cx, ); @@ -2231,8 +2126,6 @@ mod tests { ExcerptProperties { buffer: &buffer_2, range: 0..5, - header_height: 1, - render_header: None, }, cx, ); @@ -2318,8 +2211,6 @@ mod tests { ExcerptProperties { buffer: &buffer_handle, range: start_ix..end_ix, - header_height, - render_header: None, }, cx, ) @@ -2588,8 +2479,6 @@ mod tests { ExcerptProperties { buffer: &buffer_1, range: 0..buffer_1.read(cx).len(), - header_height: 0, - render_header: None, }, cx, ); @@ -2597,8 +2486,6 @@ mod tests { ExcerptProperties { buffer: &buffer_2, range: 0..buffer_2.read(cx).len(), - header_height: 0, - render_header: None, }, cx, ); From 60f716900835b3fd7b04f94a704a5cb5a2188bca Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 21 Dec 2021 10:24:01 -0800 Subject: [PATCH 160/196] Remove header heights from multibuffer randomized test --- crates/editor/src/multi_buffer.rs | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 301499f144c2168195c58e6eea663f4a159e77d7..f66ed061ad47fb7537a0962372295fab201c076b 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -2195,11 +2195,9 @@ mod tests { let buffer = buffer_handle.read(cx); let end_ix = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Bias::Right); let start_ix = buffer.clip_offset(rng.gen_range(0..=end_ix), Bias::Left); - let header_height = rng.gen_range(0..=5); let anchor_range = buffer.anchor_before(start_ix)..buffer.anchor_after(end_ix); log::info!( - "Pushing excerpt wih header {}, buffer {}: {:?}[{:?}] = {:?}", - header_height, + "Pushing excerpt for buffer {}: {:?}[{:?}] = {:?}", buffer_handle.id(), buffer.text(), start_ix..end_ix, @@ -2216,7 +2214,7 @@ mod tests { ) }); excerpt_ids.push(excerpt_id); - expected_excerpts.push((buffer_handle.clone(), anchor_range, header_height)); + expected_excerpts.push((buffer_handle.clone(), anchor_range)); } } @@ -2231,7 +2229,7 @@ mod tests { let mut excerpt_starts = Vec::new(); let mut expected_text = String::new(); let mut expected_buffer_rows = Vec::new(); - for (buffer, range, _) in &expected_excerpts { + for (buffer, range) in &expected_excerpts { let buffer = buffer.read(cx); let buffer_range = range.to_offset(buffer); @@ -2269,7 +2267,7 @@ mod tests { } let mut excerpt_starts = excerpt_starts.into_iter(); - for (buffer, range, _) in &expected_excerpts { + for (buffer, range) in &expected_excerpts { let buffer_id = buffer.id(); let buffer = buffer.read(cx); let buffer_range = range.to_offset(buffer); From 04d577e3260a2a6a96738e50c3d0a298b37b828e Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 21 Dec 2021 11:46:47 -0800 Subject: [PATCH 161/196] Fix context line handline in project diagnostic view --- crates/diagnostics/src/diagnostics.rs | 85 +++++++++++++++++---- crates/language/build.rs | 1 - crates/server/src/auth.rs | 2 +- crates/server/src/releases.rs | 7 +- crates/text/src/tests.rs | 2 +- crates/theme_selector/src/theme_selector.rs | 2 +- 6 files changed, 75 insertions(+), 24 deletions(-) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index e9efa4046a39ea44f6d0818b4b7cf9b879aa63b2..dd87ec9bd65adae1ea82aa9f49971af5fceb556d 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -16,6 +16,8 @@ use workspace::Workspace; action!(Toggle); +const CONTEXT_LINE_COUNT: u32 = 1; + pub fn init(cx: &mut MutableAppContext) { cx.add_bindings([Binding::new("alt-shift-D", Toggle, None)]); cx.add_action(ProjectDiagnosticsEditor::toggle); @@ -96,15 +98,18 @@ impl ProjectDiagnosticsEditor { for (ix, entry) in group.entries.iter().map(Some).chain([None]).enumerate() { if let Some((range, start_ix)) = &mut pending_range { if let Some(entry) = entry { - if entry.range.start.row <= range.end.row + 1 { + if entry.range.start.row <= range.end.row + 1 + CONTEXT_LINE_COUNT * 2 { range.end = range.end.max(entry.range.end); continue; } } - let excerpt_start = Point::new(range.start.row.saturating_sub(1), 0); - let excerpt_end = snapshot - .clip_point(Point::new(range.end.row + 1, u32::MAX), Bias::Left); + let excerpt_start = + Point::new(range.start.row.saturating_sub(CONTEXT_LINE_COUNT), 0); + let excerpt_end = snapshot.clip_point( + Point::new(range.end.row + CONTEXT_LINE_COUNT, u32::MAX), + Bias::Left, + ); let excerpt_id = excerpts.push_excerpt( ExcerptProperties { buffer: &buffer, @@ -296,10 +301,8 @@ mod tests { b(y); // comment 1 // comment 2 - // comment 3 - // comment 4 - d(y); - e(x); + c(y); + d(x); } " .unindent(); @@ -310,6 +313,18 @@ mod tests { .update_diagnostics( None, vec![ + DiagnosticEntry { + range: PointUtf16::new(1, 8)..PointUtf16::new(1, 9), + diagnostic: Diagnostic { + message: + "move occurs because `x` has type `Vec`, which does not implement the `Copy` trait" + .to_string(), + severity: DiagnosticSeverity::INFORMATION, + is_primary: false, + group_id: 1, + ..Default::default() + }, + }, DiagnosticEntry { range: PointUtf16::new(2, 8)..PointUtf16::new(2, 9), diagnostic: Diagnostic { @@ -322,6 +337,16 @@ mod tests { ..Default::default() }, }, + DiagnosticEntry { + range: PointUtf16::new(3, 6)..PointUtf16::new(3, 7), + diagnostic: Diagnostic { + message: "value moved here".to_string(), + severity: DiagnosticSeverity::INFORMATION, + is_primary: false, + group_id: 1, + ..Default::default() + }, + }, DiagnosticEntry { range: PointUtf16::new(4, 6)..PointUtf16::new(4, 7), diagnostic: Diagnostic { @@ -333,7 +358,7 @@ mod tests { }, }, DiagnosticEntry { - range: PointUtf16::new(8, 6)..PointUtf16::new(8, 7), + range: PointUtf16::new(7, 6)..PointUtf16::new(7, 7), diagnostic: Diagnostic { message: "use of moved value\nvalue used here after move".to_string(), severity: DiagnosticSeverity::ERROR, @@ -342,6 +367,16 @@ mod tests { ..Default::default() }, }, + DiagnosticEntry { + range: PointUtf16::new(8, 6)..PointUtf16::new(8, 7), + diagnostic: Diagnostic { + message: "use of moved value\nvalue used here after move".to_string(), + severity: DiagnosticSeverity::ERROR, + is_primary: true, + group_id: 1, + ..Default::default() + }, + }, ], cx, ) @@ -351,22 +386,40 @@ mod tests { view.update(cx, |view, cx| { view.populate_excerpts(buffer, cx); + let editor = view.editor.update(cx, |editor, cx| editor.snapshot(cx)); + assert_eq!( - view.editor.update(cx, |editor, cx| editor.display_text(cx)), + editor.text(), concat!( - "\n", // primary diagnostic message + // Diagnostic group 1 (error for `y`) + "\n", // primary message "\n", // filename " let x = vec![];\n", " let y = vec![];\n", - " a(x);\n", - "\n", // context ellipsis + "\n", // supporting diagnostic " a(x);\n", " b(y);\n", + "\n", // supporting diagnostic " // comment 1\n", + " // comment 2\n", + " c(y);\n", + "\n", // supporting diagnostic + " d(x);\n", + // Diagnostic group 2 (error for `x`) + "\n", // primary message + "\n", // filename + "fn main() {\n", + " let x = vec![];\n", + "\n", // supporting diagnostic + " let y = vec![];\n", + " a(x);\n", + "\n", // supporting diagnostic + " b(y);\n", "\n", // context ellipsis - " // comment 3\n", - " // comment 4\n", - " d(y);" + " c(y);\n", + " d(x);\n", + "\n", // supporting diagnostic + "}" ) ); }); diff --git a/crates/language/build.rs b/crates/language/build.rs index d69cce4d1d847ee3041611307c87fc96762236d8..90d5a4cb2d72cd854c4f09214bb59479c0d22875 100644 --- a/crates/language/build.rs +++ b/crates/language/build.rs @@ -3,4 +3,3 @@ fn main() { println!("cargo:rustc-env=ZED_BUNDLE={}", bundled); } } - diff --git a/crates/server/src/auth.rs b/crates/server/src/auth.rs index 2e2f40fca731ff993464e6b5554acc1afcdd418b..9c6142618fc787b59c407a3c35cf402789a40ec4 100644 --- a/crates/server/src/auth.rs +++ b/crates/server/src/auth.rs @@ -11,6 +11,7 @@ use oauth2::{ TokenResponse as _, TokenUrl, }; use rand::thread_rng; +use rpc::auth as zed_auth; use scrypt::{ password_hash::{PasswordHash, PasswordHasher, PasswordVerifier, SaltString}, Scrypt, @@ -19,7 +20,6 @@ use serde::{Deserialize, Serialize}; use std::{borrow::Cow, convert::TryFrom, sync::Arc}; use surf::{StatusCode, Url}; use tide::{log, Error, Server}; -use rpc::auth as zed_auth; static CURRENT_GITHUB_USER: &'static str = "current_github_user"; static GITHUB_AUTH_URL: &'static str = "https://github.com/login/oauth/authorize"; diff --git a/crates/server/src/releases.rs b/crates/server/src/releases.rs index f1f341057780c4e8f58e0072296323d77a9dd65d..f0a051fa42273daa186d18595a7b1e33f00e65d9 100644 --- a/crates/server/src/releases.rs +++ b/crates/server/src/releases.rs @@ -2,16 +2,15 @@ use crate::{ auth::RequestExt as _, github::Release, AppState, LayoutData, Request, RequestExt as _, }; use comrak::ComrakOptions; -use serde::{Serialize}; +use serde::Serialize; use std::sync::Arc; -use tide::{http::mime}; +use tide::http::mime; pub fn add_routes(releases: &mut tide::Server>) { releases.at("/releases").get(get_releases); } async fn get_releases(mut request: Request) -> tide::Result { - #[derive(Serialize)] struct ReleasesData { #[serde(flatten)] @@ -52,4 +51,4 @@ async fn get_releases(mut request: Request) -> tide::Result { .body(request.state().render_template("releases.hbs", &data)?) .content_type(mime::HTML) .build()) -} \ No newline at end of file +} diff --git a/crates/text/src/tests.rs b/crates/text/src/tests.rs index 94523de5a6298272fbd693aa67d2ef718825e8ae..55163436c5949c6d3770e4b29cf8972c731cf3f8 100644 --- a/crates/text/src/tests.rs +++ b/crates/text/src/tests.rs @@ -561,7 +561,7 @@ fn test_random_concurrent_edits(mut rng: StdRng) { match rng.gen_range(0..=100) { 0..=50 if mutation_count != 0 => { let op = buffer.randomly_edit(&mut rng, 5).2; - network.broadcast(buffer.replica_id, vec!(op)); + network.broadcast(buffer.replica_id, vec![op]); log::info!("buffer {} text: {:?}", buffer.replica_id, buffer.text()); mutation_count -= 1; } diff --git a/crates/theme_selector/src/theme_selector.rs b/crates/theme_selector/src/theme_selector.rs index 67ad7899ea3b2ece34da28bea872e9035b93953f..4d226c3880b152f7b20f7ef39297120de1a1d665 100644 --- a/crates/theme_selector/src/theme_selector.rs +++ b/crates/theme_selector/src/theme_selector.rs @@ -11,7 +11,7 @@ use parking_lot::Mutex; use postage::watch; use std::{cmp, sync::Arc}; use theme::ThemeRegistry; -use workspace::{Settings, Workspace, AppState}; +use workspace::{AppState, Settings, Workspace}; #[derive(Clone)] pub struct ThemeSelectorParams { From 13ecd16685dccb701a3b76a4b59013f4c32467d5 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 21 Dec 2021 12:36:46 -0800 Subject: [PATCH 162/196] Index max buffer row on MultiBuffer --- crates/editor/src/display_map.rs | 4 ++-- crates/editor/src/element.rs | 2 +- crates/editor/src/multi_buffer.rs | 18 ++++++++++++++++++ 3 files changed, 21 insertions(+), 3 deletions(-) diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 469dac5802cf59f616b6873d5fa926ad0e3d6de4..a3c95ad4bfa9ef89b9025495cd382cb72aca8aed 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -198,8 +198,8 @@ impl DisplaySnapshot { self.blocks_snapshot.buffer_rows(start_row) } - pub fn buffer_row_count(&self) -> u32 { - self.buffer_snapshot.max_point().row + 1 + pub fn max_buffer_row(&self) -> u32 { + self.buffer_snapshot.max_buffer_row() } pub fn prev_row_boundary(&self, input_display_point: DisplayPoint) -> (DisplayPoint, Point) { diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index e790973c8c9e15cefcd8b191fc58b1bf6c9f65be..9c45207bb01ac5cf02c6f2d0deeaf37ed954ec43 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -436,7 +436,7 @@ impl EditorElement { } fn max_line_number_width(&self, snapshot: &EditorSnapshot, cx: &LayoutContext) -> f32 { - let digit_count = (snapshot.buffer_row_count() as f32).log10().floor() as usize + 1; + let digit_count = (snapshot.max_buffer_row() as f32).log10().floor() as usize + 1; let style = &self.settings.style; cx.text_layout_cache diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index f66ed061ad47fb7537a0962372295fab201c076b..f4c55f2b8601e5748eb5f3251047b09ede80a7e3 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -97,6 +97,7 @@ struct Excerpt { buffer_id: usize, buffer: BufferSnapshot, range: Range, + max_buffer_row: u32, text_summary: TextSummary, has_trailing_newline: bool, } @@ -104,6 +105,7 @@ struct Excerpt { #[derive(Clone, Debug, Default)] struct ExcerptSummary { excerpt_id: ExcerptId, + max_buffer_row: u32, text: TextSummary, } @@ -884,6 +886,10 @@ impl MultiBufferSnapshot { self.excerpts.summary().text.bytes } + pub fn max_buffer_row(&self) -> u32 { + self.excerpts.summary().max_buffer_row + } + pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize { let mut cursor = self.excerpts.cursor::(); cursor.seek(&offset, Bias::Right, &()); @@ -1565,6 +1571,7 @@ impl Excerpt { ) -> Self { Excerpt { id, + max_buffer_row: range.end.to_point(&buffer).row, text_summary: buffer.text_summary_for_range::(range.to_offset(&buffer)), buffer_id, buffer, @@ -1660,6 +1667,7 @@ impl sum_tree::Item for Excerpt { } ExcerptSummary { excerpt_id: self.id.clone(), + max_buffer_row: self.max_buffer_row, text, } } @@ -1672,6 +1680,7 @@ impl sum_tree::Summary for ExcerptSummary { debug_assert!(summary.excerpt_id > self.excerpt_id); self.excerpt_id = summary.excerpt_id.clone(); self.text.add_summary(&summary.text, &()); + self.max_buffer_row = cmp::max(self.max_buffer_row, summary.max_buffer_row); } } @@ -2266,6 +2275,15 @@ mod tests { ); } + assert_eq!( + snapshot.max_buffer_row(), + expected_buffer_rows + .into_iter() + .filter_map(|r| r) + .max() + .unwrap() + ); + let mut excerpt_starts = excerpt_starts.into_iter(); for (buffer, range) in &expected_excerpts { let buffer_id = buffer.id(); From 8492c6e7ac7ae5f80c021eacda2bb46861898d5f Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 21 Dec 2021 13:07:43 -0800 Subject: [PATCH 163/196] Fix maintenance of MultiBuffer's buffer states --- crates/editor/src/multi_buffer.rs | 163 +++++++++++++++++------------- 1 file changed, 93 insertions(+), 70 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index f4c55f2b8601e5748eb5f3251047b09ede80a7e3..b76f97a3a04dba6ec569a12d2adfd572fa256e84 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -34,7 +34,7 @@ pub type ExcerptId = Locator; pub struct MultiBuffer { snapshot: RefCell, - buffers: HashMap, + buffers: RefCell>, subscriptions: Topic, singleton: bool, replica_id: ReplicaId, @@ -245,9 +245,17 @@ impl MultiBuffer { self.snapshot.borrow() } - pub fn as_singleton(&self) -> Option<&ModelHandle> { + pub fn as_singleton(&self) -> Option> { if self.singleton { - return Some(&self.buffers.values().next().unwrap().buffer); + return Some( + self.buffers + .borrow() + .values() + .next() + .unwrap() + .buffer + .clone(), + ); } else { None } @@ -364,40 +372,42 @@ impl MultiBuffer { let new_text = new_text.into(); for (buffer_id, mut edits) in buffer_edits { edits.sort_unstable_by_key(|(range, _)| range.start); - self.buffers[&buffer_id].buffer.update(cx, |buffer, cx| { - let mut edits = edits.into_iter().peekable(); - let mut insertions = Vec::new(); - let mut deletions = Vec::new(); - while let Some((mut range, mut is_insertion)) = edits.next() { - while let Some((next_range, next_is_insertion)) = edits.peek() { - if range.end >= next_range.start { - range.end = cmp::max(next_range.end, range.end); - is_insertion |= *next_is_insertion; - edits.next(); - } else { - break; + self.buffers.borrow()[&buffer_id] + .buffer + .update(cx, |buffer, cx| { + let mut edits = edits.into_iter().peekable(); + let mut insertions = Vec::new(); + let mut deletions = Vec::new(); + while let Some((mut range, mut is_insertion)) = edits.next() { + while let Some((next_range, next_is_insertion)) = edits.peek() { + if range.end >= next_range.start { + range.end = cmp::max(next_range.end, range.end); + is_insertion |= *next_is_insertion; + edits.next(); + } else { + break; + } } - } - if is_insertion { - insertions.push( - buffer.anchor_before(range.start)..buffer.anchor_before(range.end), - ); - } else if !range.is_empty() { - deletions.push( - buffer.anchor_before(range.start)..buffer.anchor_before(range.end), - ); + if is_insertion { + insertions.push( + buffer.anchor_before(range.start)..buffer.anchor_before(range.end), + ); + } else if !range.is_empty() { + deletions.push( + buffer.anchor_before(range.start)..buffer.anchor_before(range.end), + ); + } } - } - if autoindent { - buffer.edit_with_autoindent(deletions, "", cx); - buffer.edit_with_autoindent(insertions, new_text.clone(), cx); - } else { - buffer.edit(deletions, "", cx); - buffer.edit(insertions, new_text.clone(), cx); - } - }) + if autoindent { + buffer.edit_with_autoindent(deletions, "", cx); + buffer.edit_with_autoindent(insertions, new_text.clone(), cx); + } else { + buffer.edit(deletions, "", cx); + buffer.edit(insertions, new_text.clone(), cx); + } + }) } } @@ -414,7 +424,7 @@ impl MultiBuffer { return buffer.update(cx, |buffer, _| buffer.start_transaction_at(now)); } - for BufferState { buffer, .. } in self.buffers.values() { + for BufferState { buffer, .. } in self.buffers.borrow().values() { buffer.update(cx, |buffer, _| buffer.start_transaction_at(now)); } self.history.start_transaction(now) @@ -434,7 +444,7 @@ impl MultiBuffer { } let mut buffer_transactions = HashSet::default(); - for BufferState { buffer, .. } in self.buffers.values() { + for BufferState { buffer, .. } in self.buffers.borrow().values() { if let Some(transaction_id) = buffer.update(cx, |buffer, cx| buffer.end_transaction_at(now, cx)) { @@ -490,40 +500,42 @@ impl MultiBuffer { } for (buffer_id, mut selections) in selections_by_buffer { - self.buffers[&buffer_id].buffer.update(cx, |buffer, cx| { - selections.sort_unstable_by(|a, b| a.start.cmp(&b.start, buffer).unwrap()); - let mut selections = selections.into_iter().peekable(); - let merged_selections = Arc::from_iter(iter::from_fn(|| { - let mut selection = selections.next()?; - while let Some(next_selection) = selections.peek() { - if selection - .end - .cmp(&next_selection.start, buffer) - .unwrap() - .is_ge() - { - let next_selection = selections.next().unwrap(); - if next_selection + self.buffers.borrow()[&buffer_id] + .buffer + .update(cx, |buffer, cx| { + selections.sort_unstable_by(|a, b| a.start.cmp(&b.start, buffer).unwrap()); + let mut selections = selections.into_iter().peekable(); + let merged_selections = Arc::from_iter(iter::from_fn(|| { + let mut selection = selections.next()?; + while let Some(next_selection) = selections.peek() { + if selection .end - .cmp(&selection.end, buffer) + .cmp(&next_selection.start, buffer) .unwrap() .is_ge() { - selection.end = next_selection.end; + let next_selection = selections.next().unwrap(); + if next_selection + .end + .cmp(&selection.end, buffer) + .unwrap() + .is_ge() + { + selection.end = next_selection.end; + } + } else { + break; } - } else { - break; } - } - Some(selection) - })); - buffer.set_active_selections(merged_selections, cx); - }); + Some(selection) + })); + buffer.set_active_selections(merged_selections, cx); + }); } } pub fn remove_active_selections(&mut self, cx: &mut ModelContext) { - for buffer in self.buffers.values() { + for buffer in self.buffers.borrow().values() { buffer .buffer .update(cx, |buffer, cx| buffer.remove_active_selections(cx)); @@ -538,7 +550,7 @@ impl MultiBuffer { while let Some(transaction) = self.history.pop_undo() { let mut undone = false; for (buffer_id, buffer_transaction_id) in &transaction.buffer_transactions { - if let Some(BufferState { buffer, .. }) = self.buffers.get(&buffer_id) { + if let Some(BufferState { buffer, .. }) = self.buffers.borrow().get(&buffer_id) { undone |= buffer.update(cx, |buf, cx| { buf.undo_transaction(*buffer_transaction_id, cx) }); @@ -561,7 +573,7 @@ impl MultiBuffer { while let Some(transaction) = self.history.pop_redo() { let mut redone = false; for (buffer_id, buffer_transaction_id) in &transaction.buffer_transactions { - if let Some(BufferState { buffer, .. }) = self.buffers.get(&buffer_id) { + if let Some(BufferState { buffer, .. }) = self.buffers.borrow().get(&buffer_id) { redone |= buffer.update(cx, |buf, cx| { buf.redo_transaction(*buffer_transaction_id, cx) }); @@ -613,6 +625,7 @@ impl MultiBuffer { let excerpt = Excerpt::new(id.clone(), buffer.id(), buffer_snapshot, range, false); snapshot.excerpts.push(excerpt, &()); self.buffers + .borrow_mut() .entry(props.buffer.id()) .or_insert_with(|| BufferState { buffer, @@ -644,7 +657,7 @@ impl MultiBuffer { pub fn save(&mut self, cx: &mut ModelContext) -> Result>> { let mut save_tasks = Vec::new(); - for BufferState { buffer, .. } in self.buffers.values() { + for BufferState { buffer, .. } in self.buffers.borrow().values() { save_tasks.push(buffer.update(cx, |buffer, cx| buffer.save(cx))?); } @@ -658,6 +671,7 @@ impl MultiBuffer { pub fn language<'a>(&self, cx: &'a AppContext) -> Option<&'a Arc> { self.buffers + .borrow() .values() .next() .and_then(|state| state.buffer.read(cx).language()) @@ -679,18 +693,26 @@ impl MultiBuffer { let mut diagnostics_updated = false; let mut is_dirty = false; let mut has_conflict = false; - for buffer_state in self.buffers.values() { + let mut buffers = self.buffers.borrow_mut(); + for buffer_state in buffers.values_mut() { let buffer = buffer_state.buffer.read(cx); - let buffer_edited = buffer.version().gt(&buffer_state.last_version); - let buffer_reparsed = buffer.parse_count() > buffer_state.last_parse_count; + let version = buffer.version(); + let parse_count = buffer.parse_count(); + let diagnostics_update_count = buffer.diagnostics_update_count(); + + let buffer_edited = version.gt(&buffer_state.last_version); + let buffer_reparsed = parse_count > buffer_state.last_parse_count; let buffer_diagnostics_updated = - buffer.diagnostics_update_count() > buffer_state.last_diagnostics_update_count; + diagnostics_update_count > buffer_state.last_diagnostics_update_count; if buffer_edited || buffer_reparsed || buffer_diagnostics_updated { + buffer_state.last_version = version; + buffer_state.last_parse_count = parse_count; + buffer_state.last_diagnostics_update_count = diagnostics_update_count; excerpts_to_edit.extend( buffer_state .excerpts .iter() - .map(|excerpt_id| (excerpt_id, buffer_state, buffer_edited)), + .map(|excerpt_id| (excerpt_id, buffer_state.buffer.clone(), buffer_edited)), ); } @@ -714,10 +736,11 @@ impl MultiBuffer { let mut new_excerpts = SumTree::new(); let mut cursor = snapshot.excerpts.cursor::<(Option<&ExcerptId>, usize)>(); - for (id, buffer_state, buffer_edited) in excerpts_to_edit { + for (id, buffer, buffer_edited) in excerpts_to_edit { new_excerpts.push_tree(cursor.slice(&Some(id), Bias::Left, &()), &()); let old_excerpt = cursor.item().unwrap(); - let buffer = buffer_state.buffer.read(cx); + let buffer_id = buffer.id(); + let buffer = buffer.read(cx); let mut new_excerpt; if buffer_edited { @@ -740,7 +763,7 @@ impl MultiBuffer { new_excerpt = Excerpt::new( id.clone(), - buffer_state.buffer.id(), + buffer_id, buffer.snapshot(), old_excerpt.range.clone(), old_excerpt.has_trailing_newline, From 699dafbbd463e2d63e39c04b8f7428acbf3b4344 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 21 Dec 2021 14:06:17 -0800 Subject: [PATCH 164/196] Avoid cloning diagnostic messages from language server --- crates/project/src/worktree.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 31f4fe26937ff5faf268e2aa150e08a409caa569..f66af7b55f1f0c0c3240bece6fc4ad84ee0f78ca 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -34,6 +34,7 @@ use std::{ ffi::{OsStr, OsString}, fmt, future::Future, + mem, ops::{Deref, Range}, path::{Path, PathBuf}, sync::{ @@ -671,7 +672,7 @@ impl Worktree { fn update_diagnostics( &mut self, - params: lsp::PublishDiagnosticsParams, + mut params: lsp::PublishDiagnosticsParams, cx: &mut ModelContext, ) -> Result<()> { let this = self.as_local_mut().ok_or_else(|| anyhow!("not local"))?; @@ -688,7 +689,7 @@ impl Worktree { let mut group_ids_by_diagnostic_range = HashMap::default(); let mut diagnostics_by_group_id = HashMap::default(); let mut next_group_id = 0; - for diagnostic in ¶ms.diagnostics { + for diagnostic in &mut params.diagnostics { let source = diagnostic.source.as_ref(); let code = diagnostic.code.as_ref(); let group_id = diagnostic_ranges(&diagnostic, &abs_path) @@ -715,7 +716,7 @@ impl Worktree { lsp::NumberOrString::String(code) => code, }), severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR), - message: diagnostic.message.clone(), + message: mem::take(&mut diagnostic.message), group_id, is_primary: false, }, From bc906fef9c7d6cab6beaa067366437215d6b3743 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 21 Dec 2021 14:07:09 -0800 Subject: [PATCH 165/196] Store worktree's diagnostics summaries ordered by path --- crates/project/src/worktree.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index f66af7b55f1f0c0c3240bece6fc4ad84ee0f78ca..4e4c8581983ec747963b01491d93d45d16f7331e 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -7,6 +7,7 @@ use ::ignore::gitignore::{Gitignore, GitignoreBuilder}; use anyhow::{anyhow, Context, Result}; use client::{proto, Client, PeerId, TypedEnvelope, UserStore}; use clock::ReplicaId; +use collections::BTreeMap; use collections::{hash_map, HashMap}; use futures::{Stream, StreamExt}; use fuzzy::CharBag; @@ -218,7 +219,7 @@ impl Worktree { client: client.clone(), loading_buffers: Default::default(), open_buffers: Default::default(), - diagnostic_summaries: HashMap::default(), + diagnostic_summaries: Default::default(), queued_operations: Default::default(), languages, user_store, @@ -832,7 +833,7 @@ pub struct LocalWorktree { open_buffers: HashMap>, shared_buffers: HashMap>>, diagnostics: HashMap, Vec>>, - diagnostic_summaries: HashMap, DiagnosticSummary>, + diagnostic_summaries: BTreeMap, DiagnosticSummary>, queued_operations: Vec<(u64, Operation)>, languages: Arc, client: Arc, @@ -856,7 +857,7 @@ pub struct RemoteWorktree { replica_id: ReplicaId, loading_buffers: LoadingBuffers, open_buffers: HashMap, - diagnostic_summaries: HashMap, DiagnosticSummary>, + diagnostic_summaries: BTreeMap, DiagnosticSummary>, languages: Arc, user_store: ModelHandle, queued_operations: Vec<(u64, Operation)>, From 3c26f67ea393df44421414bb3e78f061b27e8765 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 21 Dec 2021 14:07:50 -0800 Subject: [PATCH 166/196] Minor cleanup in Buffer::update_diagnostics --- crates/language/src/buffer.rs | 22 +++++++++------------- crates/language/src/language.rs | 9 +++++---- 2 files changed, 14 insertions(+), 17 deletions(-) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 2d9ddd76d8a989539dd2796310bb4b618ae1167c..c09f27bfe3937a5ec6c03aceb2d003e5fad3689b 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -730,6 +730,9 @@ impl Buffer { let version = version.map(|version| version as usize); let content = if let Some(version) = version { let language_server = self.language_server.as_mut().unwrap(); + language_server + .pending_snapshots + .retain(|&v, _| v >= version); let snapshot = language_server .pending_snapshots .get(&version) @@ -756,6 +759,10 @@ impl Buffer { let entry = &mut diagnostics[ix]; let mut start = entry.range.start; let mut end = entry.range.end; + + // Some diagnostics are based on files on disk instead of buffers' + // current contents. Adjust these diagnostics' ranges to reflect + // any unsaved edits. if entry .diagnostic .source @@ -781,6 +788,8 @@ impl Buffer { entry.range = content.clip_point_utf16(start, Bias::Left) ..content.clip_point_utf16(end, Bias::Right); + + // Expand empty ranges by one character if entry.range.start == entry.range.end { entry.range.end.column += 1; entry.range.end = content.clip_point_utf16(entry.range.end, Bias::Right); @@ -794,19 +803,6 @@ impl Buffer { drop(edits_since_save); self.diagnostics = DiagnosticSet::new(diagnostics, content); - - if let Some(version) = version { - let language_server = self.language_server.as_mut().unwrap(); - let versions_to_delete = language_server - .pending_snapshots - .range(..version) - .map(|(v, _)| *v) - .collect::>(); - for version in versions_to_delete { - language_server.pending_snapshots.remove(&version); - } - } - self.diagnostics_update_count += 1; cx.notify(); cx.emit(Event::DiagnosticsUpdated); diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 99161d1f5c778464e8c5d6f98367801a8381ccf1..317a6ed84a770aba875d33621ce5ab60df6ad396 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -9,10 +9,9 @@ use anyhow::{anyhow, Result}; pub use buffer::Operation; pub use buffer::*; pub use diagnostic_set::DiagnosticEntry; -use gpui::{executor::Background, AppContext}; +use gpui::AppContext; use highlight_map::HighlightMap; use lazy_static::lazy_static; -use lsp::LanguageServer; use parking_lot::Mutex; use serde::Deserialize; use std::{collections::HashSet, path::Path, str, sync::Arc}; @@ -48,7 +47,7 @@ pub struct LanguageServerConfig { pub disk_based_diagnostic_sources: HashSet, #[cfg(any(test, feature = "test-support"))] #[serde(skip)] - pub fake_server: Option<(Arc, Arc)>, + pub fake_server: Option<(Arc, Arc)>, } #[derive(Clone, Debug, Deserialize)] @@ -219,7 +218,9 @@ impl Grammar { #[cfg(any(test, feature = "test-support"))] impl LanguageServerConfig { - pub async fn fake(executor: Arc) -> (Self, lsp::FakeLanguageServer) { + pub async fn fake( + executor: Arc, + ) -> (Self, lsp::FakeLanguageServer) { let (server, fake) = lsp::LanguageServer::fake(executor).await; fake.started .store(false, std::sync::atomic::Ordering::SeqCst); From a93f5e5fb40eff798061036228e249f4dd905403 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 21 Dec 2021 14:28:23 -0800 Subject: [PATCH 167/196] Avoid repeated subscriptions + clones when adding another excerpt for same buffer --- crates/editor/src/multi_buffer.rs | 29 ++++++++++++----------------- 1 file changed, 12 insertions(+), 17 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index b76f97a3a04dba6ec569a12d2adfd572fa256e84..6008f3623c614aec016215e20a27387731ee89a9 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -68,13 +68,13 @@ pub trait FromAnchor: 'static { fn from_anchor(anchor: &Anchor, snapshot: &MultiBufferSnapshot) -> Self; } -#[derive(Debug)] struct BufferState { buffer: ModelHandle, last_version: clock::Global, last_parse_count: usize, last_diagnostics_update_count: usize, excerpts: Vec, + _subscriptions: [gpui::Subscription; 2], } #[derive(Clone, Default)] @@ -599,17 +599,9 @@ impl MultiBuffer { assert_eq!(self.history.transaction_depth, 0); self.sync(cx); - let buffer = props.buffer.clone(); - cx.observe(&buffer, |_, _, cx| cx.notify()).detach(); - cx.subscribe(&buffer, Self::on_buffer_event).detach(); - - let buffer_snapshot = buffer.read(cx).snapshot(); + let buffer_snapshot = props.buffer.read(cx).snapshot(); let range = buffer_snapshot.anchor_before(&props.range.start) ..buffer_snapshot.anchor_after(&props.range.end); - let last_version = buffer_snapshot.version().clone(); - let last_parse_count = buffer_snapshot.parse_count(); - let last_diagnostics_update_count = buffer_snapshot.diagnostics_update_count(); - let mut snapshot = self.snapshot.borrow_mut(); let mut prev_id = None; let edit_start = snapshot.excerpts.summary().text.bytes; @@ -622,27 +614,30 @@ impl MultiBuffer { ); let id = ExcerptId::between(&prev_id.unwrap_or(ExcerptId::min()), &ExcerptId::max()); - let excerpt = Excerpt::new(id.clone(), buffer.id(), buffer_snapshot, range, false); - snapshot.excerpts.push(excerpt, &()); self.buffers .borrow_mut() .entry(props.buffer.id()) .or_insert_with(|| BufferState { - buffer, - last_version, - last_parse_count, - last_diagnostics_update_count, + last_version: buffer_snapshot.version().clone(), + last_parse_count: buffer_snapshot.parse_count(), + last_diagnostics_update_count: buffer_snapshot.diagnostics_update_count(), excerpts: Default::default(), + _subscriptions: [ + cx.observe(&props.buffer, |_, _, cx| cx.notify()), + cx.subscribe(&props.buffer, Self::on_buffer_event), + ], + buffer: props.buffer.clone(), }) .excerpts .push(id.clone()); + let excerpt = Excerpt::new(id.clone(), props.buffer.id(), buffer_snapshot, range, false); + snapshot.excerpts.push(excerpt, &()); self.subscriptions.publish_mut([Edit { old: edit_start..edit_start, new: edit_start..snapshot.excerpts.summary().text.bytes, }]); cx.notify(); - id } From a888620e5fc1175a83d00186c1f293658fa3f67f Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 21 Dec 2021 15:25:57 -0800 Subject: [PATCH 168/196] Implement MultiBuffer::remove_excerpts We'll need this for updating project diagnostics --- crates/editor/src/multi_buffer.rs | 104 +++++++++++++++++++++++++++--- 1 file changed, 96 insertions(+), 8 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 6008f3623c614aec016215e20a27387731ee89a9..63c87d6606563ae79f315e4348ac76166a7a9c5c 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -641,6 +641,57 @@ impl MultiBuffer { id } + pub fn excerpt_ids_for_buffer(&self, buffer: &ModelHandle) -> Vec { + self.buffers + .borrow() + .get(&buffer.id()) + .map_or(Vec::new(), |state| state.excerpts.clone()) + } + + pub fn remove_excerpts<'a>( + &mut self, + excerpt_ids: impl IntoIterator, + cx: &mut ModelContext, + ) { + let mut buffers = self.buffers.borrow_mut(); + let mut snapshot = self.snapshot.borrow_mut(); + let mut new_excerpts = SumTree::new(); + let mut cursor = snapshot.excerpts.cursor::<(Option<&ExcerptId>, usize)>(); + let mut edits = Vec::new(); + for excerpt_id in excerpt_ids { + new_excerpts.push_tree(cursor.slice(&Some(excerpt_id), Bias::Left, &()), &()); + if let Some(excerpt) = cursor.item() { + if excerpt.id == *excerpt_id { + let mut old_start = cursor.start().1; + let old_end = cursor.end(&()).1; + cursor.next(&()); + + if let Some(buffer_state) = buffers.get_mut(&excerpt.buffer_id) { + buffer_state.excerpts.retain(|id| id != excerpt_id); + } + + // When removing the last excerpt, remove the trailing newline from + // the previous excerpt. + if cursor.item().is_none() && old_start > 0 { + old_start -= 1; + new_excerpts.update_last(|e| e.has_trailing_newline = false, &()); + } + + let new_start = new_excerpts.summary().text.bytes; + edits.push(Edit { + old: old_start..old_end, + new: new_start..new_start, + }); + } + } + } + new_excerpts.push_tree(cursor.suffix(&()), &()); + drop(cursor); + snapshot.excerpts = new_excerpts; + self.subscriptions.publish_mut(edits); + cx.notify(); + } + fn on_buffer_event( &mut self, _: ModelHandle, @@ -2063,8 +2114,9 @@ mod tests { ); }); + let snapshot = multibuffer.read(cx).snapshot(cx); assert_eq!( - multibuffer.read(cx).snapshot(cx).text(), + snapshot.text(), concat!( "bbbb\n", // Preserve newlines "c\n", // @@ -2083,27 +2135,44 @@ mod tests { }] ); - let multibuffer = multibuffer.read(cx).snapshot(cx); + let snapshot = multibuffer.read(cx).snapshot(cx); assert_eq!( - multibuffer.clip_point(Point::new(0, 5), Bias::Left), + snapshot.clip_point(Point::new(0, 5), Bias::Left), Point::new(0, 4) ); assert_eq!( - multibuffer.clip_point(Point::new(0, 5), Bias::Right), + snapshot.clip_point(Point::new(0, 5), Bias::Right), Point::new(0, 4) ); assert_eq!( - multibuffer.clip_point(Point::new(5, 1), Bias::Right), + snapshot.clip_point(Point::new(5, 1), Bias::Right), Point::new(5, 1) ); assert_eq!( - multibuffer.clip_point(Point::new(5, 2), Bias::Right), + snapshot.clip_point(Point::new(5, 2), Bias::Right), Point::new(5, 2) ); assert_eq!( - multibuffer.clip_point(Point::new(5, 3), Bias::Right), + snapshot.clip_point(Point::new(5, 3), Bias::Right), Point::new(5, 2) ); + + let snapshot = multibuffer.update(cx, |multibuffer, cx| { + let buffer_2_excerpt_id = multibuffer.excerpt_ids_for_buffer(&buffer_2)[0].clone(); + multibuffer.remove_excerpts(&[buffer_2_excerpt_id], cx); + multibuffer.snapshot(cx) + }); + + assert_eq!( + snapshot.text(), + concat!( + "bbbb\n", // Preserve newlines + "c\n", // + "cc\n", // + "ddd\n", // + "eeee", // + ) + ); } #[gpui::test] @@ -2201,7 +2270,7 @@ mod tests { let mut buffers: Vec> = Vec::new(); let list = cx.add_model(|_| MultiBuffer::new(0)); let mut excerpt_ids = Vec::new(); - let mut expected_excerpts = Vec::new(); + let mut expected_excerpts = Vec::<(ModelHandle, Range)>::new(); let mut old_versions = Vec::new(); for _ in 0..operations { @@ -2210,6 +2279,20 @@ mod tests { let buffer = buffers.choose(&mut rng).unwrap(); buffer.update(cx, |buf, cx| buf.randomly_edit(&mut rng, 5, cx)); } + 20..=29 if !expected_excerpts.is_empty() => { + let ix = rng.gen_range(0..expected_excerpts.len()); + let id = excerpt_ids.remove(ix); + let (buffer, range) = expected_excerpts.remove(ix); + let buffer = buffer.read(cx); + log::info!( + "Removing excerpt {}: {:?}", + ix, + buffer + .text_for_range(range.to_offset(&buffer)) + .collect::(), + ); + list.update(cx, |list, cx| list.remove_excerpts(&[id], cx)); + } _ => { let buffer_handle = if buffers.is_empty() || rng.gen_bool(0.4) { let base_text = RandomCharIter::new(&mut rng).take(10).collect::(); @@ -2275,6 +2358,11 @@ mod tests { expected_text.pop(); } + // Always report one buffer row + if expected_buffer_rows.is_empty() { + expected_buffer_rows.push(Some(0)); + } + assert_eq!(snapshot.text(), expected_text); log::info!("MultiBuffer text: {:?}", expected_text); From 2c3efdea8cfe2d587f02712e5b428f052d927c6d Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 21 Dec 2021 16:39:23 -0800 Subject: [PATCH 169/196] WIP - Start work on updating project diagnostics view --- Cargo.lock | 3 + crates/diagnostics/Cargo.toml | 3 + crates/diagnostics/src/diagnostics.rs | 227 ++++++++++++++++++++------ crates/project/src/project.rs | 11 ++ crates/project/src/worktree.rs | 25 ++- 5 files changed, 214 insertions(+), 55 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1544ef8302da558c1153e81434b1deae1217feae..299526596c0909b8244e252146357682eaf0e612 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1404,13 +1404,16 @@ name = "diagnostics" version = "0.1.0" dependencies = [ "anyhow", + "client", "collections", "editor", "gpui", "language", "postage", "project", + "serde_json", "unindent", + "util", "workspace", ] diff --git a/crates/diagnostics/Cargo.toml b/crates/diagnostics/Cargo.toml index a3d75b21f9c2d7002d19b793844a8911754f134c..5da4c9c8faa804b4a95fd9f3ebf35ec5849b525b 100644 --- a/crates/diagnostics/Cargo.toml +++ b/crates/diagnostics/Cargo.toml @@ -13,12 +13,15 @@ editor = { path = "../editor" } language = { path = "../language" } gpui = { path = "../gpui" } project = { path = "../project" } +util = { path = "../util" } workspace = { path = "../workspace" } postage = { version = "0.4", features = ["futures-traits"] } [dev-dependencies] unindent = "0.1" +client = { path = "../client", features = ["test-support"] } editor = { path = "../editor", features = ["test-support"] } language = { path = "../language", features = ["test-support"] } gpui = { path = "../gpui", features = ["test-support"] } workspace = { path = "../workspace", features = ["test-support"] } +serde_json = { version = "1", features = ["preserve_order"] } diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index dd87ec9bd65adae1ea82aa9f49971af5fceb556d..9e028c53218d05894fd9bed61894eb3ffe980dcf 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -12,6 +12,7 @@ use language::{Bias, Buffer, Point}; use postage::watch; use project::Project; use std::ops::Range; +use util::TryFutureExt; use workspace::Workspace; action!(Toggle); @@ -65,11 +66,49 @@ impl View for ProjectDiagnosticsEditor { impl ProjectDiagnosticsEditor { fn new( - replica_id: u16, + project: ModelHandle, settings: watch::Receiver, cx: &mut ViewContext, ) -> Self { - let excerpts = cx.add_model(|_| MultiBuffer::new(replica_id)); + let project_paths = project + .read(cx) + .diagnostic_summaries(cx) + .map(|e| e.0) + .collect::>(); + + cx.spawn(|this, mut cx| { + let project = project.clone(); + async move { + for project_path in project_paths { + let buffer = project + .update(&mut cx, |project, cx| project.open_buffer(project_path, cx)) + .await?; + this.update(&mut cx, |view, cx| view.populate_excerpts(buffer, cx)) + } + Result::<_, anyhow::Error>::Ok(()) + } + }) + .detach(); + + cx.subscribe(&project, |_, project, event, cx| { + if let project::Event::DiagnosticsUpdated(project_path) = event { + let project_path = project_path.clone(); + cx.spawn(|this, mut cx| { + async move { + let buffer = project + .update(&mut cx, |project, cx| project.open_buffer(project_path, cx)) + .await?; + this.update(&mut cx, |view, cx| view.populate_excerpts(buffer, cx)); + Ok(()) + } + .log_err() + }) + .detach(); + } + }) + .detach(); + + let excerpts = cx.add_model(|cx| MultiBuffer::new(project.read(cx).replica_id())); let build_settings = editor::settings_builder(excerpts.downgrade(), settings.clone()); let editor = cx.add_view(|cx| Editor::for_buffer(excerpts.clone(), build_settings.clone(), cx)); @@ -82,6 +121,11 @@ impl ProjectDiagnosticsEditor { } } + #[cfg(test)] + fn text(&self, cx: &AppContext) -> String { + self.editor.read(cx).text(cx) + } + fn toggle(workspace: &mut Workspace, _: &Toggle, cx: &mut ViewContext) { let diagnostics = cx.add_model(|_| ProjectDiagnostics::new(workspace.project().clone())); workspace.add_item(diagnostics, cx); @@ -193,6 +237,7 @@ impl ProjectDiagnosticsEditor { cx, ); }); + cx.notify(); } } @@ -205,27 +250,7 @@ impl workspace::Item for ProjectDiagnostics { cx: &mut ViewContext, ) -> Self::View { let project = handle.read(cx).project.clone(); - let project_paths = project - .read(cx) - .diagnostic_summaries(cx) - .map(|e| e.0) - .collect::>(); - - cx.spawn(|view, mut cx| { - let project = project.clone(); - async move { - for project_path in project_paths { - let buffer = project - .update(&mut cx, |project, cx| project.open_buffer(project_path, cx)) - .await?; - view.update(&mut cx, |view, cx| view.populate_excerpts(buffer, cx)) - } - Result::<_, anyhow::Error>::Ok(()) - } - }) - .detach(); - - ProjectDiagnosticsEditor::new(project.read(cx).replica_id(), settings, cx) + ProjectDiagnosticsEditor::new(project, settings, cx) } fn project_path(&self) -> Option { @@ -282,35 +307,68 @@ impl workspace::ItemView for ProjectDiagnosticsEditor { #[cfg(test)] mod tests { use super::*; - use language::{Diagnostic, DiagnosticEntry, DiagnosticSeverity, PointUtf16}; + use client::{http::ServerResponse, test::FakeHttpClient, Client, UserStore}; + use gpui::TestAppContext; + use language::{Diagnostic, DiagnosticEntry, DiagnosticSeverity, LanguageRegistry, PointUtf16}; + use project::FakeFs; + use serde_json::json; + use std::sync::Arc; use unindent::Unindent as _; use workspace::WorkspaceParams; #[gpui::test] - fn test_diagnostics(cx: &mut MutableAppContext) { - let settings = WorkspaceParams::test(cx).settings; - let view = cx.add_view(Default::default(), |cx| { - ProjectDiagnosticsEditor::new(0, settings, cx) + async fn test_diagnostics(mut cx: TestAppContext) { + let settings = cx.update(WorkspaceParams::test).settings; + let http_client = FakeHttpClient::new(|_| async move { Ok(ServerResponse::new(404)) }); + let client = Client::new(); + let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx)); + let fs = Arc::new(FakeFs::new()); + + let project = cx.update(|cx| { + Project::local( + client.clone(), + user_store, + Arc::new(LanguageRegistry::new()), + fs.clone(), + cx, + ) }); - let text = " - fn main() { - let x = vec![]; - let y = vec![]; - a(x); - b(y); - // comment 1 - // comment 2 - c(y); - d(x); - } - " - .unindent(); - - let buffer = cx.add_model(|cx| { - let mut buffer = Buffer::new(0, text, cx); - buffer - .update_diagnostics( + fs.insert_tree( + "/test", + json!({ + "a.rs": " + const a: i32 = 'a'; + ".unindent(), + + "main.rs": " + fn main() { + let x = vec![]; + let y = vec![]; + a(x); + b(y); + // comment 1 + // comment 2 + c(y); + d(x); + } + " + .unindent(), + }), + ) + .await; + + let worktree = project + .update(&mut cx, |project, cx| { + project.add_local_worktree("/test", cx) + }) + .await + .unwrap(); + + worktree.update(&mut cx, |worktree, cx| { + worktree + .update_diagnostic_entries( + Arc::from("/test/main.rs".as_ref()), None, vec![ DiagnosticEntry { @@ -381,11 +439,16 @@ mod tests { cx, ) .unwrap(); - buffer }); - view.update(cx, |view, cx| { - view.populate_excerpts(buffer, cx); + let view = cx.add_view(Default::default(), |cx| { + ProjectDiagnosticsEditor::new(project.clone(), settings, cx) + }); + + view.condition(&mut cx, |view, cx| view.text(cx).contains("fn main()")) + .await; + + view.update(&mut cx, |view, cx| { let editor = view.editor.update(cx, |editor, cx| editor.snapshot(cx)); assert_eq!( @@ -423,5 +486,71 @@ mod tests { ) ); }); + + worktree.update(&mut cx, |worktree, cx| { + worktree + .update_diagnostic_entries( + Arc::from("/test/a.rs".as_ref()), + None, + vec![DiagnosticEntry { + range: PointUtf16::new(0, 15)..PointUtf16::new(0, 15), + diagnostic: Diagnostic { + message: "mismatched types\nexpected `usize`, found `char`".to_string(), + severity: DiagnosticSeverity::ERROR, + is_primary: true, + group_id: 0, + ..Default::default() + }, + }], + cx, + ) + .unwrap(); + }); + + view.condition(&mut cx, |view, cx| view.text(cx).contains("const a")) + .await; + + view.update(&mut cx, |view, cx| { + let editor = view.editor.update(cx, |editor, cx| editor.snapshot(cx)); + + assert_eq!( + editor.text(), + concat!( + // a.rs + "\n", // primary message + "\n", // filename + "const a: i32 = 'a';\n", + // main.rs, diagnostic group 1 + "\n", // primary message + "\n", // filename + " let x = vec![];\n", + " let y = vec![];\n", + "\n", // supporting diagnostic + " a(x);\n", + " b(y);\n", + "\n", // supporting diagnostic + " // comment 1\n", + " // comment 2\n", + " c(y);\n", + "\n", // supporting diagnostic + " d(x);\n", + // main.rs, diagnostic group 2 + "\n", // primary message + "\n", // filename + "fn main() {\n", + " let x = vec![];\n", + "\n", // supporting diagnostic + " let y = vec![];\n", + " a(x);\n", + "\n", // supporting diagnostic + " b(y);\n", + "\n", // context ellipsis + " c(y);\n", + " d(x);\n", + "\n", // supporting diagnostic + "}" + ) + ); + }); } } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 34706f111886f5aa5313dd7dd8b7244c65bfb54f..ae2730e9cbcdf0c043c90f0f601c78268efccc95 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -56,9 +56,11 @@ pub struct Collaborator { pub replica_id: ReplicaId, } +#[derive(Debug)] pub enum Event { ActiveEntryChanged(Option), WorktreeRemoved(usize), + DiagnosticsUpdated(ProjectPath), } #[derive(Clone, Debug, Eq, PartialEq, Hash)] @@ -473,6 +475,15 @@ impl Project { fn add_worktree(&mut self, worktree: ModelHandle, cx: &mut ModelContext) { cx.observe(&worktree, |_, _, cx| cx.notify()).detach(); + cx.subscribe(&worktree, |_, worktree, event, cx| match event { + worktree::Event::DiagnosticsUpdated(path) => { + cx.emit(Event::DiagnosticsUpdated(ProjectPath { + worktree_id: worktree.id(), + path: path.clone(), + })); + } + }) + .detach(); self.worktrees.push(worktree); cx.notify(); } diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 4e4c8581983ec747963b01491d93d45d16f7331e..62c05adef55d42bfbc5067c196e10a6ef8c43cbd 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -64,8 +64,9 @@ pub enum Worktree { Remote(RemoteWorktree), } +#[derive(Debug)] pub enum Event { - Closed, + DiagnosticsUpdated(Arc), } impl Entity for Worktree { @@ -671,7 +672,7 @@ impl Worktree { } } - fn update_diagnostics( + pub fn update_diagnostics( &mut self, mut params: lsp::PublishDiagnosticsParams, cx: &mut ModelContext, @@ -736,17 +737,28 @@ impl Worktree { }) .collect::>(); + self.update_diagnostic_entries(worktree_path, params.version, diagnostics, cx) + } + + pub fn update_diagnostic_entries( + &mut self, + path: Arc, + version: Option, + diagnostics: Vec>, + cx: &mut ModelContext, + ) -> Result<()> { + let this = self.as_local_mut().unwrap(); for buffer in this.open_buffers.values() { if let Some(buffer) = buffer.upgrade(cx) { if buffer .read(cx) .file() - .map_or(false, |file| *file.path() == worktree_path) + .map_or(false, |file| *file.path() == path) { let (remote_id, operation) = buffer.update(cx, |buffer, cx| { ( buffer.remote_id(), - buffer.update_diagnostics(params.version, diagnostics.clone(), cx), + buffer.update_diagnostics(version, diagnostics.clone(), cx), ) }); self.send_buffer_update(remote_id, operation?, cx); @@ -757,8 +769,9 @@ impl Worktree { let this = self.as_local_mut().unwrap(); this.diagnostic_summaries - .insert(worktree_path.clone(), DiagnosticSummary::new(&diagnostics)); - this.diagnostics.insert(worktree_path.clone(), diagnostics); + .insert(path.clone(), DiagnosticSummary::new(&diagnostics)); + this.diagnostics.insert(path.clone(), diagnostics); + cx.emit(Event::DiagnosticsUpdated(path.clone())); Ok(()) } From 275b7e8d4f80258e8c490c0fd13ffb77e0bfd0e8 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Wed, 22 Dec 2021 17:57:36 +0100 Subject: [PATCH 170/196] Implement `MultiBuffer::remove_excerpts` by inserting tombstones This will make it easier to use anchors in the presence of deletes. --- crates/editor/src/multi_buffer.rs | 164 ++++++++++++++++++++++-------- 1 file changed, 123 insertions(+), 41 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 63c87d6606563ae79f315e4348ac76166a7a9c5c..d3d21a14970fe239ba9d0cad7b3d6da11fb6b6b1 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -6,8 +6,8 @@ use clock::ReplicaId; use collections::{HashMap, HashSet}; use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task}; use language::{ - Buffer, BufferChunks, BufferSnapshot, Chunk, DiagnosticEntry, Event, File, Language, Selection, - ToOffset as _, ToPoint as _, TransactionId, + Buffer, BufferChunks, BufferSnapshot, Chunk, DiagnosticEntry, Event, File, Language, Patch, + Selection, ToOffset as _, ToPoint as _, TransactionId, }; use std::{ cell::{Ref, RefCell}, @@ -100,6 +100,7 @@ struct Excerpt { max_buffer_row: u32, text_summary: TextSummary, has_trailing_newline: bool, + is_tombstone: bool, } #[derive(Clone, Debug, Default)] @@ -107,8 +108,12 @@ struct ExcerptSummary { excerpt_id: ExcerptId, max_buffer_row: u32, text: TextSummary, + visible_excerpts: usize, } +#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord)] +struct VisibleExcerptCount(usize); + pub struct MultiBufferRows<'a> { buffer_row_range: Range, excerpts: Cursor<'a, Excerpt, Point>, @@ -603,16 +608,25 @@ impl MultiBuffer { let range = buffer_snapshot.anchor_before(&props.range.start) ..buffer_snapshot.anchor_after(&props.range.end); let mut snapshot = self.snapshot.borrow_mut(); - let mut prev_id = None; let edit_start = snapshot.excerpts.summary().text.bytes; - snapshot.excerpts.update_last( - |excerpt| { - excerpt.has_trailing_newline = true; - prev_id = Some(excerpt.id.clone()); - }, - &(), - ); + snapshot.excerpts = { + let mut cursor = snapshot.excerpts.cursor::(); + let mut new_excerpts = cursor.slice( + &VisibleExcerptCount(snapshot.excerpts.summary().visible_excerpts), + Bias::Left, + &(), + ); + if let Some(mut excerpt) = cursor.item().cloned() { + excerpt.has_trailing_newline = !excerpt.is_tombstone; + new_excerpts.push(excerpt, &()); + cursor.next(&()); + } + new_excerpts.push_tree(cursor.suffix(&()), &()); + new_excerpts + }; + + let prev_id = snapshot.excerpts.last().map(|e| e.id.clone()); let id = ExcerptId::between(&prev_id.unwrap_or(ExcerptId::min()), &ExcerptId::max()); self.buffers .borrow_mut() @@ -655,39 +669,60 @@ impl MultiBuffer { ) { let mut buffers = self.buffers.borrow_mut(); let mut snapshot = self.snapshot.borrow_mut(); - let mut new_excerpts = SumTree::new(); - let mut cursor = snapshot.excerpts.cursor::<(Option<&ExcerptId>, usize)>(); - let mut edits = Vec::new(); - for excerpt_id in excerpt_ids { - new_excerpts.push_tree(cursor.slice(&Some(excerpt_id), Bias::Left, &()), &()); - if let Some(excerpt) = cursor.item() { - if excerpt.id == *excerpt_id { - let mut old_start = cursor.start().1; - let old_end = cursor.end(&()).1; - cursor.next(&()); - - if let Some(buffer_state) = buffers.get_mut(&excerpt.buffer_id) { - buffer_state.excerpts.retain(|id| id != excerpt_id); - } + let mut edits = Patch::default(); + snapshot.excerpts = { + let mut new_excerpts = SumTree::new(); + let mut cursor = snapshot.excerpts.cursor::<(Option<&ExcerptId>, usize)>(); + for excerpt_id in excerpt_ids { + new_excerpts.push_tree(cursor.slice(&Some(excerpt_id), Bias::Left, &()), &()); + if let Some(excerpt) = cursor.item() { + if excerpt.id == *excerpt_id { + let old_start = cursor.start().1; + let old_end = cursor.end(&()).1; + cursor.next(&()); + + if let Some(buffer_state) = buffers.get_mut(&excerpt.buffer_id) { + buffer_state.excerpts.retain(|id| id != excerpt_id); + } - // When removing the last excerpt, remove the trailing newline from - // the previous excerpt. - if cursor.item().is_none() && old_start > 0 { - old_start -= 1; - new_excerpts.update_last(|e| e.has_trailing_newline = false, &()); + new_excerpts.push(excerpt.tombstone(), &()); + let new_start = new_excerpts.summary().text.bytes; + edits.push(Edit { + old: old_start..old_end, + new: new_start..new_start, + }); } + } + } + new_excerpts.push_tree(cursor.suffix(&()), &()); + new_excerpts + }; - let new_start = new_excerpts.summary().text.bytes; - edits.push(Edit { + // Ensure there's no trailing newline on the last visible excerpt. + snapshot.excerpts = { + let mut cursor = snapshot.excerpts.cursor::<(VisibleExcerptCount, usize)>(); + let mut new_excerpts = cursor.slice( + &VisibleExcerptCount(snapshot.excerpts.summary().visible_excerpts), + Bias::Left, + &(), + ); + if let Some(mut excerpt) = cursor.item().cloned() { + if excerpt.has_trailing_newline { + let old_start = cursor.start().1; + let old_end = cursor.end(&()).1; + edits = edits.compose([Edit { old: old_start..old_end, - new: new_start..new_start, - }); + new: old_start..old_end - 1, + }]); } + excerpt.has_trailing_newline = false; + new_excerpts.push(excerpt, &()); + cursor.next(&()); } - } - new_excerpts.push_tree(cursor.suffix(&()), &()); - drop(cursor); - snapshot.excerpts = new_excerpts; + new_excerpts.push_tree(cursor.suffix(&()), &()); + new_excerpts + }; + self.subscriptions.publish_mut(edits); cx.notify(); } @@ -888,6 +923,10 @@ impl MultiBufferSnapshot { iter::from_fn(move || { if offset == *cursor.start() { cursor.prev(&()); + while cursor.item()?.is_tombstone { + cursor.prev(&()); + } + let excerpt = cursor.item()?; excerpt_chunks = Some( excerpt @@ -1638,7 +1677,7 @@ impl Excerpt { range: Range, has_trailing_newline: bool, ) -> Self { - Excerpt { + Self { id, max_buffer_row: range.end.to_point(&buffer).row, text_summary: buffer.text_summary_for_range::(range.to_offset(&buffer)), @@ -1646,6 +1685,20 @@ impl Excerpt { buffer, range, has_trailing_newline, + is_tombstone: false, + } + } + + fn tombstone(&self) -> Self { + Self { + id: self.id.clone(), + buffer_id: self.buffer_id, + buffer: self.buffer.clone(), + range: self.range.start.clone()..self.range.start.clone(), + max_buffer_row: 0, + text_summary: Default::default(), + has_trailing_newline: false, + is_tombstone: true, } } @@ -1722,6 +1775,7 @@ impl fmt::Debug for Excerpt { .field("range", &self.range) .field("text_summary", &self.text_summary) .field("has_trailing_newline", &self.has_trailing_newline) + .field("is_tombstone", &self.is_tombstone) .finish() } } @@ -1738,6 +1792,7 @@ impl sum_tree::Item for Excerpt { excerpt_id: self.id.clone(), max_buffer_row: self.max_buffer_row, text, + visible_excerpts: if self.is_tombstone { 0 } else { 1 }, } } } @@ -1750,6 +1805,7 @@ impl sum_tree::Summary for ExcerptSummary { self.excerpt_id = summary.excerpt_id.clone(); self.text.add_summary(&summary.text, &()); self.max_buffer_row = cmp::max(self.max_buffer_row, summary.max_buffer_row); + self.visible_excerpts += summary.visible_excerpts; } } @@ -1795,6 +1851,12 @@ impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Option<&'a ExcerptId> { } } +impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for VisibleExcerptCount { + fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) { + self.0 += summary.visible_excerpts; + } +} + impl<'a> MultiBufferRows<'a> { pub fn seek(&mut self, row: u32) { self.buffer_row_range = 0..0; @@ -1803,6 +1865,13 @@ impl<'a> MultiBufferRows<'a> { .seek_forward(&Point::new(row, 0), Bias::Right, &()); if self.excerpts.item().is_none() { self.excerpts.prev(&()); + while let Some(excerpt) = self.excerpts.item() { + if excerpt.is_tombstone { + self.excerpts.prev(&()); + } else { + break; + } + } if self.excerpts.item().is_none() && row == 0 { self.buffer_row_range = 0..1; @@ -1832,9 +1901,11 @@ impl<'a> Iterator for MultiBufferRows<'a> { self.excerpts.item()?; self.excerpts.next(&()); let excerpt = self.excerpts.item()?; - self.buffer_row_range.start = excerpt.range.start.to_point(&excerpt.buffer).row; - self.buffer_row_range.end = - self.buffer_row_range.start + excerpt.text_summary.lines.row + 1; + if !excerpt.is_tombstone { + self.buffer_row_range.start = excerpt.range.start.to_point(&excerpt.buffer).row; + self.buffer_row_range.end = + self.buffer_row_range.start + excerpt.text_summary.lines.row + 1; + } } } } @@ -1869,6 +1940,9 @@ impl<'a> Iterator for MultiBufferChunks<'a> { Some(chunk) } else { self.excerpts.next(&()); + while self.excerpts.item()?.is_tombstone { + self.excerpts.next(&()); + } let excerpt = self.excerpts.item()?; self.excerpt_chunks = Some( excerpt.chunks_in_range(0..self.range.end - self.excerpts.start(), self.theme), @@ -1888,6 +1962,14 @@ impl<'a> MultiBufferBytes<'a> { self.chunk = chunk; } else { self.excerpts.next(&()); + while let Some(excerpt) = self.excerpts.item() { + if excerpt.is_tombstone { + self.excerpts.next(&()); + } else { + break; + } + } + if let Some(excerpt) = self.excerpts.item() { let mut excerpt_bytes = excerpt.bytes_in_range(0..self.range.end - self.excerpts.start()); From e31205c95e7a2e7cdfe8c732ff0b534e9dacf63d Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 22 Dec 2021 10:18:33 -0800 Subject: [PATCH 171/196] Revert "Implement `MultiBuffer::remove_excerpts` by inserting tombstones" This reverts commit 275b7e8d4f80258e8c490c0fd13ffb77e0bfd0e8. --- crates/editor/src/multi_buffer.rs | 164 ++++++++---------------------- 1 file changed, 41 insertions(+), 123 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index d3d21a14970fe239ba9d0cad7b3d6da11fb6b6b1..63c87d6606563ae79f315e4348ac76166a7a9c5c 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -6,8 +6,8 @@ use clock::ReplicaId; use collections::{HashMap, HashSet}; use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task}; use language::{ - Buffer, BufferChunks, BufferSnapshot, Chunk, DiagnosticEntry, Event, File, Language, Patch, - Selection, ToOffset as _, ToPoint as _, TransactionId, + Buffer, BufferChunks, BufferSnapshot, Chunk, DiagnosticEntry, Event, File, Language, Selection, + ToOffset as _, ToPoint as _, TransactionId, }; use std::{ cell::{Ref, RefCell}, @@ -100,7 +100,6 @@ struct Excerpt { max_buffer_row: u32, text_summary: TextSummary, has_trailing_newline: bool, - is_tombstone: bool, } #[derive(Clone, Debug, Default)] @@ -108,12 +107,8 @@ struct ExcerptSummary { excerpt_id: ExcerptId, max_buffer_row: u32, text: TextSummary, - visible_excerpts: usize, } -#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord)] -struct VisibleExcerptCount(usize); - pub struct MultiBufferRows<'a> { buffer_row_range: Range, excerpts: Cursor<'a, Excerpt, Point>, @@ -608,25 +603,16 @@ impl MultiBuffer { let range = buffer_snapshot.anchor_before(&props.range.start) ..buffer_snapshot.anchor_after(&props.range.end); let mut snapshot = self.snapshot.borrow_mut(); + let mut prev_id = None; let edit_start = snapshot.excerpts.summary().text.bytes; + snapshot.excerpts.update_last( + |excerpt| { + excerpt.has_trailing_newline = true; + prev_id = Some(excerpt.id.clone()); + }, + &(), + ); - snapshot.excerpts = { - let mut cursor = snapshot.excerpts.cursor::(); - let mut new_excerpts = cursor.slice( - &VisibleExcerptCount(snapshot.excerpts.summary().visible_excerpts), - Bias::Left, - &(), - ); - if let Some(mut excerpt) = cursor.item().cloned() { - excerpt.has_trailing_newline = !excerpt.is_tombstone; - new_excerpts.push(excerpt, &()); - cursor.next(&()); - } - new_excerpts.push_tree(cursor.suffix(&()), &()); - new_excerpts - }; - - let prev_id = snapshot.excerpts.last().map(|e| e.id.clone()); let id = ExcerptId::between(&prev_id.unwrap_or(ExcerptId::min()), &ExcerptId::max()); self.buffers .borrow_mut() @@ -669,60 +655,39 @@ impl MultiBuffer { ) { let mut buffers = self.buffers.borrow_mut(); let mut snapshot = self.snapshot.borrow_mut(); - let mut edits = Patch::default(); - snapshot.excerpts = { - let mut new_excerpts = SumTree::new(); - let mut cursor = snapshot.excerpts.cursor::<(Option<&ExcerptId>, usize)>(); - for excerpt_id in excerpt_ids { - new_excerpts.push_tree(cursor.slice(&Some(excerpt_id), Bias::Left, &()), &()); - if let Some(excerpt) = cursor.item() { - if excerpt.id == *excerpt_id { - let old_start = cursor.start().1; - let old_end = cursor.end(&()).1; - cursor.next(&()); - - if let Some(buffer_state) = buffers.get_mut(&excerpt.buffer_id) { - buffer_state.excerpts.retain(|id| id != excerpt_id); - } + let mut new_excerpts = SumTree::new(); + let mut cursor = snapshot.excerpts.cursor::<(Option<&ExcerptId>, usize)>(); + let mut edits = Vec::new(); + for excerpt_id in excerpt_ids { + new_excerpts.push_tree(cursor.slice(&Some(excerpt_id), Bias::Left, &()), &()); + if let Some(excerpt) = cursor.item() { + if excerpt.id == *excerpt_id { + let mut old_start = cursor.start().1; + let old_end = cursor.end(&()).1; + cursor.next(&()); - new_excerpts.push(excerpt.tombstone(), &()); - let new_start = new_excerpts.summary().text.bytes; - edits.push(Edit { - old: old_start..old_end, - new: new_start..new_start, - }); + if let Some(buffer_state) = buffers.get_mut(&excerpt.buffer_id) { + buffer_state.excerpts.retain(|id| id != excerpt_id); } - } - } - new_excerpts.push_tree(cursor.suffix(&()), &()); - new_excerpts - }; - // Ensure there's no trailing newline on the last visible excerpt. - snapshot.excerpts = { - let mut cursor = snapshot.excerpts.cursor::<(VisibleExcerptCount, usize)>(); - let mut new_excerpts = cursor.slice( - &VisibleExcerptCount(snapshot.excerpts.summary().visible_excerpts), - Bias::Left, - &(), - ); - if let Some(mut excerpt) = cursor.item().cloned() { - if excerpt.has_trailing_newline { - let old_start = cursor.start().1; - let old_end = cursor.end(&()).1; - edits = edits.compose([Edit { + // When removing the last excerpt, remove the trailing newline from + // the previous excerpt. + if cursor.item().is_none() && old_start > 0 { + old_start -= 1; + new_excerpts.update_last(|e| e.has_trailing_newline = false, &()); + } + + let new_start = new_excerpts.summary().text.bytes; + edits.push(Edit { old: old_start..old_end, - new: old_start..old_end - 1, - }]); + new: new_start..new_start, + }); } - excerpt.has_trailing_newline = false; - new_excerpts.push(excerpt, &()); - cursor.next(&()); } - new_excerpts.push_tree(cursor.suffix(&()), &()); - new_excerpts - }; - + } + new_excerpts.push_tree(cursor.suffix(&()), &()); + drop(cursor); + snapshot.excerpts = new_excerpts; self.subscriptions.publish_mut(edits); cx.notify(); } @@ -923,10 +888,6 @@ impl MultiBufferSnapshot { iter::from_fn(move || { if offset == *cursor.start() { cursor.prev(&()); - while cursor.item()?.is_tombstone { - cursor.prev(&()); - } - let excerpt = cursor.item()?; excerpt_chunks = Some( excerpt @@ -1677,7 +1638,7 @@ impl Excerpt { range: Range, has_trailing_newline: bool, ) -> Self { - Self { + Excerpt { id, max_buffer_row: range.end.to_point(&buffer).row, text_summary: buffer.text_summary_for_range::(range.to_offset(&buffer)), @@ -1685,20 +1646,6 @@ impl Excerpt { buffer, range, has_trailing_newline, - is_tombstone: false, - } - } - - fn tombstone(&self) -> Self { - Self { - id: self.id.clone(), - buffer_id: self.buffer_id, - buffer: self.buffer.clone(), - range: self.range.start.clone()..self.range.start.clone(), - max_buffer_row: 0, - text_summary: Default::default(), - has_trailing_newline: false, - is_tombstone: true, } } @@ -1775,7 +1722,6 @@ impl fmt::Debug for Excerpt { .field("range", &self.range) .field("text_summary", &self.text_summary) .field("has_trailing_newline", &self.has_trailing_newline) - .field("is_tombstone", &self.is_tombstone) .finish() } } @@ -1792,7 +1738,6 @@ impl sum_tree::Item for Excerpt { excerpt_id: self.id.clone(), max_buffer_row: self.max_buffer_row, text, - visible_excerpts: if self.is_tombstone { 0 } else { 1 }, } } } @@ -1805,7 +1750,6 @@ impl sum_tree::Summary for ExcerptSummary { self.excerpt_id = summary.excerpt_id.clone(); self.text.add_summary(&summary.text, &()); self.max_buffer_row = cmp::max(self.max_buffer_row, summary.max_buffer_row); - self.visible_excerpts += summary.visible_excerpts; } } @@ -1851,12 +1795,6 @@ impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Option<&'a ExcerptId> { } } -impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for VisibleExcerptCount { - fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) { - self.0 += summary.visible_excerpts; - } -} - impl<'a> MultiBufferRows<'a> { pub fn seek(&mut self, row: u32) { self.buffer_row_range = 0..0; @@ -1865,13 +1803,6 @@ impl<'a> MultiBufferRows<'a> { .seek_forward(&Point::new(row, 0), Bias::Right, &()); if self.excerpts.item().is_none() { self.excerpts.prev(&()); - while let Some(excerpt) = self.excerpts.item() { - if excerpt.is_tombstone { - self.excerpts.prev(&()); - } else { - break; - } - } if self.excerpts.item().is_none() && row == 0 { self.buffer_row_range = 0..1; @@ -1901,11 +1832,9 @@ impl<'a> Iterator for MultiBufferRows<'a> { self.excerpts.item()?; self.excerpts.next(&()); let excerpt = self.excerpts.item()?; - if !excerpt.is_tombstone { - self.buffer_row_range.start = excerpt.range.start.to_point(&excerpt.buffer).row; - self.buffer_row_range.end = - self.buffer_row_range.start + excerpt.text_summary.lines.row + 1; - } + self.buffer_row_range.start = excerpt.range.start.to_point(&excerpt.buffer).row; + self.buffer_row_range.end = + self.buffer_row_range.start + excerpt.text_summary.lines.row + 1; } } } @@ -1940,9 +1869,6 @@ impl<'a> Iterator for MultiBufferChunks<'a> { Some(chunk) } else { self.excerpts.next(&()); - while self.excerpts.item()?.is_tombstone { - self.excerpts.next(&()); - } let excerpt = self.excerpts.item()?; self.excerpt_chunks = Some( excerpt.chunks_in_range(0..self.range.end - self.excerpts.start(), self.theme), @@ -1962,14 +1888,6 @@ impl<'a> MultiBufferBytes<'a> { self.chunk = chunk; } else { self.excerpts.next(&()); - while let Some(excerpt) = self.excerpts.item() { - if excerpt.is_tombstone { - self.excerpts.next(&()); - } else { - break; - } - } - if let Some(excerpt) = self.excerpts.item() { let mut excerpt_bytes = excerpt.bytes_in_range(0..self.range.end - self.excerpts.start()); From 1544da887e3c43ef5aa365e0750a3051f96b38b3 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 22 Dec 2021 12:52:41 -0800 Subject: [PATCH 172/196] Start work on preserving continuity of disk-based diagnostics --- crates/language/src/buffer.rs | 67 ++++++++++++++++++++++++++++++++-- crates/language/src/proto.rs | 2 + crates/project/src/worktree.rs | 1 + crates/rpc/proto/zed.proto | 10 +++-- 4 files changed, 72 insertions(+), 8 deletions(-) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index c09f27bfe3937a5ec6c03aceb2d003e5fad3689b..3d28ce19a8742be83b7ed232566d2d9538794e66 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -18,11 +18,11 @@ use smol::future::yield_now; use std::{ any::Any, cell::RefCell, - cmp, + cmp::{self, Reverse}, collections::{BTreeMap, HashMap, HashSet}, ffi::OsString, future::Future, - iter::{Iterator, Peekable}, + iter::{self, Iterator, Peekable}, ops::{Deref, DerefMut, Range}, path::{Path, PathBuf}, str, @@ -92,6 +92,7 @@ pub struct Diagnostic { pub severity: DiagnosticSeverity, pub message: String, pub group_id: usize, + pub is_valid: bool, pub is_primary: bool, } @@ -725,7 +726,7 @@ impl Buffer { mut diagnostics: Vec>, cx: &mut ModelContext, ) -> Result { - diagnostics.sort_unstable_by_key(|d| (d.range.start, d.range.end)); + diagnostics.sort_unstable_by_key(|d| (d.range.start, Reverse(d.range.end))); let version = version.map(|version| version as usize); let content = if let Some(version) = version { @@ -754,6 +755,7 @@ impl Buffer { .peekable(); let mut last_edit_old_end = PointUtf16::zero(); let mut last_edit_new_end = PointUtf16::zero(); + let mut has_disk_based_diagnostics = false; let mut ix = 0; 'outer: while ix < diagnostics.len() { let entry = &mut diagnostics[ix]; @@ -769,6 +771,7 @@ impl Buffer { .as_ref() .map_or(false, |source| disk_based_sources.contains(source)) { + has_disk_based_diagnostics = true; while let Some(edit) = edits_since_save.peek() { if edit.old.end <= start { last_edit_old_end = edit.old.end; @@ -802,7 +805,62 @@ impl Buffer { } drop(edits_since_save); - self.diagnostics = DiagnosticSet::new(diagnostics, content); + + let diagnostics = diagnostics.into_iter().map(|entry| DiagnosticEntry { + range: content.anchor_before(entry.range.start)..content.anchor_after(entry.range.end), + diagnostic: entry.diagnostic, + }); + + // Some diagnostic sources are reported on a less frequent basis than others. + // If those sources are absent from this message, then preserve the previous + // diagnostics for those sources, but mark them as stale, and set a time to + // clear them out. + let mut merged_old_disk_based_diagnostics = false; + self.diagnostics = if has_disk_based_diagnostics { + DiagnosticSet::from_sorted_entries(diagnostics, content) + } else { + let mut new_diagnostics = diagnostics.peekable(); + let mut old_diagnostics = self + .diagnostics + .iter() + .filter_map(|entry| { + let is_disk_based = entry + .diagnostic + .source + .as_ref() + .map_or(false, |source| disk_based_sources.contains(source)); + if is_disk_based { + merged_old_disk_based_diagnostics = true; + let mut entry = entry.clone(); + entry.diagnostic.is_valid = false; + Some(entry) + } else { + None + } + }) + .peekable(); + let merged_diagnostics = + iter::from_fn(|| match (old_diagnostics.peek(), new_diagnostics.peek()) { + (None, None) => None, + (Some(_), None) => old_diagnostics.next(), + (None, Some(_)) => new_diagnostics.next(), + (Some(old), Some(new)) => { + let ordering = old + .range + .start + .cmp(&new.range.start, content) + .unwrap() + .then_with(|| new.range.end.cmp(&old.range.end, content).unwrap()); + if ordering.is_lt() { + old_diagnostics.next() + } else { + new_diagnostics.next() + } + } + }); + DiagnosticSet::from_sorted_entries(merged_diagnostics, content) + }; + self.diagnostics_update_count += 1; cx.notify(); cx.emit(Event::DiagnosticsUpdated); @@ -2009,6 +2067,7 @@ impl Default for Diagnostic { message: Default::default(), group_id: Default::default(), is_primary: Default::default(), + is_valid: Default::default(), } } } diff --git a/crates/language/src/proto.rs b/crates/language/src/proto.rs index 304a296088d14725b3c09e2cd851a84924cb3c4a..06d3609d59f9d787df2377d20c0e4a9ed84d3cb0 100644 --- a/crates/language/src/proto.rs +++ b/crates/language/src/proto.rs @@ -117,6 +117,7 @@ pub fn serialize_diagnostics<'a>( } as i32, group_id: entry.diagnostic.group_id as u64, is_primary: entry.diagnostic.is_primary, + is_valid: entry.diagnostic.is_valid, code: entry.diagnostic.code.clone(), source: entry.diagnostic.source.clone(), }) @@ -273,6 +274,7 @@ pub fn deserialize_diagnostics( is_primary: diagnostic.is_primary, code: diagnostic.code, source: diagnostic.source, + is_valid: diagnostic.is_valid, }, }) }) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 62c05adef55d42bfbc5067c196e10a6ef8c43cbd..53ea5f1bf84cc660b3c001e54bdc56d066fde00b 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -721,6 +721,7 @@ impl Worktree { message: mem::take(&mut diagnostic.message), group_id, is_primary: false, + is_valid: true, }, }); } diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index d8fa9bc8e5c6c02df499ab81c26aacfd5450942c..3a36868b8d5b20a8caf306b64311f1cb876f97cd 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -303,10 +303,12 @@ message Diagnostic { Anchor end = 2; Severity severity = 3; string message = 4; - uint64 group_id = 5; - bool is_primary = 6; - optional string code = 7; - optional string source = 8; + optional string code = 5; + optional string source = 6; + uint64 group_id = 7; + bool is_primary = 8; + bool is_valid = 9; + enum Severity { None = 0; Error = 1; From 0faf5308acb4de4e3e047cc37f8f3b5d71608b19 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 22 Dec 2021 13:22:12 -0800 Subject: [PATCH 173/196] Add a unit test for preserving disk-based diagnostics --- crates/language/src/buffer.rs | 2 +- crates/language/src/tests.rs | 101 ++++++++++++++++++++++++++++++++++ crates/text/src/text.rs | 10 +++- 3 files changed, 110 insertions(+), 3 deletions(-) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 3d28ce19a8742be83b7ed232566d2d9538794e66..10deb5c9caf8580534857e59edaaf38c84d12c11 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -2067,7 +2067,7 @@ impl Default for Diagnostic { message: Default::default(), group_id: Default::default(), is_primary: Default::default(), - is_valid: Default::default(), + is_valid: true, } } } diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index e18589cbc949c0b2e266b854b0fceee998e8bd2c..04e33d9acc0c1c897e94edf726b732520647a841 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -750,6 +750,107 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { }); } +#[gpui::test] +async fn test_preserving_disk_based_diagnostics(mut cx: gpui::TestAppContext) { + let buffer = cx.add_model(|cx| { + let text = " + use a::*; + const b: i32 = c::; + const c: i32 = d; + const e: i32 = f +; + " + .unindent(); + + let mut rust_lang = rust_lang(); + rust_lang.config.language_server = Some(LanguageServerConfig { + disk_based_diagnostic_sources: HashSet::from_iter(["disk".to_string()]), + ..Default::default() + }); + + let mut buffer = Buffer::new(0, text, cx); + buffer.set_language(Some(Arc::new(rust_lang)), None, cx); + buffer + }); + + // Initially, there are three errors. The second one is disk-based. + let diagnostics = vec![ + DiagnosticEntry { + range: PointUtf16::new(1, 16)..PointUtf16::new(1, 18), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::ERROR, + message: "syntax error 1".to_string(), + group_id: 0, + is_primary: true, + is_valid: true, + ..Default::default() + }, + }, + DiagnosticEntry { + range: PointUtf16::new(2, 15)..PointUtf16::new(2, 16), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::ERROR, + message: "cannot find value `d` in this scope".to_string(), + source: Some("disk".to_string()), + group_id: 1, + is_primary: true, + is_valid: true, + ..Default::default() + }, + }, + DiagnosticEntry { + range: PointUtf16::new(3, 17)..PointUtf16::new(3, 18), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::ERROR, + message: "syntax error 2".to_string(), + group_id: 2, + is_primary: true, + is_valid: true, + ..Default::default() + }, + }, + ]; + buffer.update(&mut cx, |buffer, cx| { + buffer + .update_diagnostics(None, diagnostics.clone(), cx) + .unwrap(); + assert_eq!( + buffer + .snapshot() + .diagnostics_in_range::<_, PointUtf16>(PointUtf16::new(0, 0)..PointUtf16::new(4, 0)) + .collect::>(), + diagnostics.as_slice(), + ); + }); + + // The diagnostics are updated, and the disk-based diagnostic is omitted from this message. + let mut new_diagnostics = vec![diagnostics[0].clone(), diagnostics[2].clone()]; + new_diagnostics[0].diagnostic.message = "another syntax error".to_string(); + new_diagnostics[1].diagnostic.message = "yet another syntax error".to_string(); + + buffer.update(&mut cx, |buffer, cx| { + buffer + .update_diagnostics(None, new_diagnostics.clone(), cx) + .unwrap(); + assert_eq!( + buffer + .snapshot() + .diagnostics_in_range::<_, PointUtf16>(PointUtf16::new(0, 0)..PointUtf16::new(4, 0)) + .collect::>(), + &[ + new_diagnostics[0].clone(), + DiagnosticEntry { + range: diagnostics[1].range.clone(), + diagnostic: Diagnostic { + is_valid: false, + ..diagnostics[1].diagnostic.clone() + }, + }, + new_diagnostics[1].clone(), + ], + ); + }); +} + #[gpui::test] async fn test_empty_diagnostic_ranges(mut cx: gpui::TestAppContext) { cx.add_model(|cx| { diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 055e80b29a78fafa4eb67c726dc985d1cb97bcc2..eec52d2fa58d179b02e2cd5e630ee0326b0457f8 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -2057,12 +2057,18 @@ pub trait FromAnchor { impl FromAnchor for Point { fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self { - anchor.to_point(snapshot) + snapshot.summary_for_anchor(anchor) + } +} + +impl FromAnchor for PointUtf16 { + fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self { + snapshot.summary_for_anchor(anchor) } } impl FromAnchor for usize { fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self { - anchor.to_offset(snapshot) + snapshot.summary_for_anchor(anchor) } } From 06d2cdc20d1429e4bc5ff8e211f2aac746984148 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 22 Dec 2021 13:22:24 -0800 Subject: [PATCH 174/196] Remove unused multi_buffer::FromAnchor trait --- crates/editor/src/multi_buffer.rs | 4 ---- 1 file changed, 4 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 63c87d6606563ae79f315e4348ac76166a7a9c5c..309712432a40e6c73cf9e576abff31a2378a4025 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -64,10 +64,6 @@ pub trait ToPoint: 'static + fmt::Debug { fn to_point(&self, snapshot: &MultiBufferSnapshot) -> Point; } -pub trait FromAnchor: 'static { - fn from_anchor(anchor: &Anchor, snapshot: &MultiBufferSnapshot) -> Self; -} - struct BufferState { buffer: ModelHandle, last_version: clock::Global, From b9551ae8b101ea068631a1a92a715d6275509b9e Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 22 Dec 2021 14:50:51 -0800 Subject: [PATCH 175/196] Preserve group ids when updating diagnostics --- crates/language/src/buffer.rs | 170 ++++++++++++++++++++++++---------- crates/language/src/tests.rs | 30 ++++-- 2 files changed, 143 insertions(+), 57 deletions(-) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 10deb5c9caf8580534857e59edaaf38c84d12c11..1a3b08be9bf1e86e0f9bb3b463cc2885d4fa0419 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -7,6 +7,7 @@ pub use crate::{ }; use anyhow::{anyhow, Result}; use clock::ReplicaId; +use collections::hash_map; use futures::FutureExt as _; use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, MutableAppContext, Task}; use lazy_static::lazy_static; @@ -18,11 +19,11 @@ use smol::future::yield_now; use std::{ any::Any, cell::RefCell, - cmp::{self, Reverse}, + cmp::{self, Ordering}, collections::{BTreeMap, HashMap, HashSet}, ffi::OsString, future::Future, - iter::{self, Iterator, Peekable}, + iter::{Iterator, Peekable}, ops::{Deref, DerefMut, Range}, path::{Path, PathBuf}, str, @@ -68,6 +69,7 @@ pub struct Buffer { remote_selections: TreeMap]>>, diagnostics: DiagnosticSet, diagnostics_update_count: usize, + next_diagnostic_group_id: usize, language_server: Option, deferred_ops: OperationQueue, #[cfg(test)] @@ -360,6 +362,7 @@ impl Buffer { remote_selections: Default::default(), diagnostics: Default::default(), diagnostics_update_count: 0, + next_diagnostic_group_id: 0, language_server: None, deferred_ops: OperationQueue::new(), #[cfg(test)] @@ -726,7 +729,20 @@ impl Buffer { mut diagnostics: Vec>, cx: &mut ModelContext, ) -> Result { - diagnostics.sort_unstable_by_key(|d| (d.range.start, Reverse(d.range.end))); + fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering { + Ordering::Equal + .then_with(|| b.is_primary.cmp(&a.is_primary)) + .then_with(|| a.source.cmp(&b.source)) + .then_with(|| a.severity.cmp(&b.severity)) + .then_with(|| a.message.cmp(&b.message)) + } + + diagnostics.sort_unstable_by(|a, b| { + Ordering::Equal + .then_with(|| a.range.start.cmp(&b.range.start)) + .then_with(|| b.range.end.cmp(&a.range.end)) + .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic)) + }); let version = version.map(|version| version as usize); let content = if let Some(version) = version { @@ -803,65 +819,117 @@ impl Buffer { } ix += 1; } - drop(edits_since_save); - let diagnostics = diagnostics.into_iter().map(|entry| DiagnosticEntry { - range: content.anchor_before(entry.range.start)..content.anchor_after(entry.range.end), - diagnostic: entry.diagnostic, - }); - - // Some diagnostic sources are reported on a less frequent basis than others. - // If those sources are absent from this message, then preserve the previous - // diagnostics for those sources, but mark them as stale, and set a time to - // clear them out. - let mut merged_old_disk_based_diagnostics = false; - self.diagnostics = if has_disk_based_diagnostics { - DiagnosticSet::from_sorted_entries(diagnostics, content) - } else { - let mut new_diagnostics = diagnostics.peekable(); - let mut old_diagnostics = self - .diagnostics - .iter() - .filter_map(|entry| { - let is_disk_based = entry + let mut merged_diagnostics = Vec::with_capacity(diagnostics.len()); + let mut old_diagnostics = self + .diagnostics + .iter() + .map(|entry| { + ( + entry, + entry .diagnostic .source .as_ref() - .map_or(false, |source| disk_based_sources.contains(source)); - if is_disk_based { + .map_or(false, |source| disk_based_sources.contains(source)), + ) + }) + .peekable(); + let mut new_diagnostics = diagnostics + .into_iter() + .map(|entry| DiagnosticEntry { + range: content.anchor_before(entry.range.start) + ..content.anchor_after(entry.range.end), + diagnostic: entry.diagnostic, + }) + .peekable(); + + // Compare the old and new diagnostics for two reasons. + // 1. Recycling group ids - diagnostic groups whose primary diagnostic has not + // changed should use the same group id as before, so that downstream code + // can determine which diagnostics are new. + // 2. Preserving disk-based diagnostics - These diagnostic sources are reported + // on a less frequent basis than others. If these sources are absent from this + // message, then preserve the previous diagnostics for those sources, but mark + // them as invalid, and set a time to clear them out. + let mut group_id_replacements = HashMap::new(); + let mut merged_old_disk_based_diagnostics = false; + loop { + match (old_diagnostics.peek(), new_diagnostics.peek()) { + (None, None) => break, + (None, Some(_)) => { + merged_diagnostics.push(new_diagnostics.next().unwrap()); + } + (Some(_), None) => { + let (old_entry, is_disk_based) = old_diagnostics.next().unwrap(); + if is_disk_based && !has_disk_based_diagnostics { + let mut old_entry = old_entry.clone(); + old_entry.diagnostic.is_valid = false; merged_old_disk_based_diagnostics = true; - let mut entry = entry.clone(); - entry.diagnostic.is_valid = false; - Some(entry) - } else { - None + merged_diagnostics.push(old_entry); } - }) - .peekable(); - let merged_diagnostics = - iter::from_fn(|| match (old_diagnostics.peek(), new_diagnostics.peek()) { - (None, None) => None, - (Some(_), None) => old_diagnostics.next(), - (None, Some(_)) => new_diagnostics.next(), - (Some(old), Some(new)) => { - let ordering = old - .range - .start - .cmp(&new.range.start, content) - .unwrap() - .then_with(|| new.range.end.cmp(&old.range.end, content).unwrap()); - if ordering.is_lt() { - old_diagnostics.next() - } else { - new_diagnostics.next() + } + (Some((old, _)), Some(new)) => { + let ordering = Ordering::Equal + .then_with(|| old.range.start.cmp(&new.range.start, content).unwrap()) + .then_with(|| new.range.end.cmp(&old.range.end, content).unwrap()) + .then_with(|| compare_diagnostics(&old.diagnostic, &new.diagnostic)); + match ordering { + Ordering::Less => { + let (old_entry, is_disk_based) = old_diagnostics.next().unwrap(); + if is_disk_based && !has_disk_based_diagnostics { + let mut old_entry = old_entry.clone(); + old_entry.diagnostic.is_valid = false; + merged_old_disk_based_diagnostics = true; + merged_diagnostics.push(old_entry); + } + } + Ordering::Equal => { + let (old_entry, _) = old_diagnostics.next().unwrap(); + let new_entry = new_diagnostics.next().unwrap(); + if new_entry.diagnostic.is_primary { + group_id_replacements.insert( + new_entry.diagnostic.group_id, + old_entry.diagnostic.group_id, + ); + } + merged_diagnostics.push(new_entry); + } + Ordering::Greater => { + let new_entry = new_diagnostics.next().unwrap(); + merged_diagnostics.push(new_entry); } } - }); - DiagnosticSet::from_sorted_entries(merged_diagnostics, content) - }; + } + } + } + drop(old_diagnostics); + + // Having determined which group ids should be recycled, renumber all of + // groups. Any new group that does not correspond to an old group receives + // a brand new group id. + let mut next_diagnostic_group_id = self.next_diagnostic_group_id; + for entry in &mut merged_diagnostics { + if entry.diagnostic.is_valid { + match group_id_replacements.entry(entry.diagnostic.group_id) { + hash_map::Entry::Occupied(e) => entry.diagnostic.group_id = *e.get(), + hash_map::Entry::Vacant(e) => { + entry.diagnostic.group_id = post_inc(&mut next_diagnostic_group_id); + e.insert(entry.diagnostic.group_id); + } + } + } + } + self.diagnostics = DiagnosticSet::from_sorted_entries(merged_diagnostics, content); self.diagnostics_update_count += 1; + self.next_diagnostic_group_id = next_diagnostic_group_id; + + if merged_old_disk_based_diagnostics { + // TODO - spawn a task to clear the old ones + } + cx.notify(); cx.emit(Event::DiagnosticsUpdated); Ok(Operation::UpdateDiagnostics { diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index 04e33d9acc0c1c897e94edf726b732520647a841..be8060b0b01309d174688b9100ae1a3a8696b92b 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -386,6 +386,7 @@ fn test_edit_with_autoindent(cx: &mut MutableAppContext) { // buffer // }); // } + #[gpui::test] fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut MutableAppContext) { cx.add_model(|cx| { @@ -518,6 +519,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'A'".to_string(), + source: Some("disk".to_string()), group_id: 0, is_primary: true, ..Default::default() @@ -528,6 +530,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'BB'".to_string(), + source: Some("disk".to_string()), group_id: 1, is_primary: true, ..Default::default() @@ -537,6 +540,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { range: PointUtf16::new(2, 9)..PointUtf16::new(2, 12), diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, + source: Some("disk".to_string()), message: "undefined variable 'CCC'".to_string(), group_id: 2, is_primary: true, @@ -560,6 +564,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'BB'".to_string(), + source: Some("disk".to_string()), group_id: 1, is_primary: true, ..Default::default() @@ -570,6 +575,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'CCC'".to_string(), + source: Some("disk".to_string()), group_id: 2, is_primary: true, ..Default::default() @@ -608,6 +614,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'A'".to_string(), + source: Some("disk".to_string()), group_id: 0, is_primary: true, ..Default::default() @@ -638,7 +645,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { diagnostic: Diagnostic { severity: DiagnosticSeverity::WARNING, message: "unreachable statement".to_string(), - group_id: 1, + group_id: 3, is_primary: true, ..Default::default() } @@ -648,6 +655,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'A'".to_string(), + source: Some("disk".to_string()), group_id: 0, is_primary: true, ..Default::default() @@ -740,7 +748,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'BB'".to_string(), source: Some("disk".to_string()), - group_id: 1, + group_id: 4, is_primary: true, ..Default::default() }, @@ -751,7 +759,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { } #[gpui::test] -async fn test_preserving_disk_based_diagnostics(mut cx: gpui::TestAppContext) { +async fn test_preserving_old_group_ids_and_disk_based_diagnostics(mut cx: gpui::TestAppContext) { let buffer = cx.add_model(|cx| { let text = " use a::*; @@ -822,10 +830,10 @@ async fn test_preserving_disk_based_diagnostics(mut cx: gpui::TestAppContext) { ); }); - // The diagnostics are updated, and the disk-based diagnostic is omitted from this message. + // The diagnostics are updated. The disk-based diagnostic is omitted, and one + // other diagnostic has changed its message. let mut new_diagnostics = vec![diagnostics[0].clone(), diagnostics[2].clone()]; new_diagnostics[0].diagnostic.message = "another syntax error".to_string(); - new_diagnostics[1].diagnostic.message = "yet another syntax error".to_string(); buffer.update(&mut cx, |buffer, cx| { buffer @@ -837,7 +845,16 @@ async fn test_preserving_disk_based_diagnostics(mut cx: gpui::TestAppContext) { .diagnostics_in_range::<_, PointUtf16>(PointUtf16::new(0, 0)..PointUtf16::new(4, 0)) .collect::>(), &[ - new_diagnostics[0].clone(), + // The changed diagnostic is given a new group id. + DiagnosticEntry { + range: new_diagnostics[0].range.clone(), + diagnostic: Diagnostic { + group_id: 3, + ..new_diagnostics[0].diagnostic.clone() + }, + }, + // The old disk-based diagnostic is marked as invalid, but keeps + // its original group id. DiagnosticEntry { range: diagnostics[1].range.clone(), diagnostic: Diagnostic { @@ -845,6 +862,7 @@ async fn test_preserving_disk_based_diagnostics(mut cx: gpui::TestAppContext) { ..diagnostics[1].diagnostic.clone() }, }, + // The unchanged diagnostic keeps its original group id new_diagnostics[1].clone(), ], ); From 5d8ed535bea5271ddc0e85d480aa62e36f648699 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 22 Dec 2021 15:15:13 -0800 Subject: [PATCH 176/196] Clear out old disk-based diagnostics after 2 seconds --- crates/language/src/buffer.rs | 41 ++++++++++++++++++++++++++++------- 1 file changed, 33 insertions(+), 8 deletions(-) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 1a3b08be9bf1e86e0f9bb3b463cc2885d4fa0419..10527b2a3d9157bc26b636841efe819a550bfd78 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -69,6 +69,7 @@ pub struct Buffer { remote_selections: TreeMap]>>, diagnostics: DiagnosticSet, diagnostics_update_count: usize, + clear_invalid_diagnostics_task: Option>, next_diagnostic_group_id: usize, language_server: Option, deferred_ops: OperationQueue, @@ -363,6 +364,7 @@ impl Buffer { diagnostics: Default::default(), diagnostics_update_count: 0, next_diagnostic_group_id: 0, + clear_invalid_diagnostics_task: None, language_server: None, deferred_ops: OperationQueue::new(), #[cfg(test)] @@ -845,14 +847,14 @@ impl Buffer { }) .peekable(); - // Compare the old and new diagnostics for two reasons. - // 1. Recycling group ids - diagnostic groups whose primary diagnostic has not + // Incorporate the *old* diagnostics into the new diagnostics set, in two ways: + // 1. Recycle group ids - diagnostic groups whose primary diagnostic has not // changed should use the same group id as before, so that downstream code // can determine which diagnostics are new. - // 2. Preserving disk-based diagnostics - These diagnostic sources are reported + // 2. Preserve disk-based diagnostics - Some diagnostic sources are reported // on a less frequent basis than others. If these sources are absent from this // message, then preserve the previous diagnostics for those sources, but mark - // them as invalid, and set a time to clear them out. + // them as invalid, and set a timer to clear them out. let mut group_id_replacements = HashMap::new(); let mut merged_old_disk_based_diagnostics = false; loop { @@ -923,19 +925,42 @@ impl Buffer { } self.diagnostics = DiagnosticSet::from_sorted_entries(merged_diagnostics, content); - self.diagnostics_update_count += 1; self.next_diagnostic_group_id = next_diagnostic_group_id; + // If old disk-based diagnostics were included in this new set, then + // set a timer to remove them if enough time passes before the next + // diagnostics update. if merged_old_disk_based_diagnostics { - // TODO - spawn a task to clear the old ones + self.clear_invalid_diagnostics_task = Some(cx.spawn(|this, mut cx| async move { + smol::Timer::after(Duration::from_secs(2)).await; + this.update(&mut cx, |this, cx| { + let content = this.snapshot(); + this.diagnostics = DiagnosticSet::from_sorted_entries( + this.diagnostics + .iter() + .filter(|d| d.diagnostic.is_valid) + .cloned(), + &content, + ); + let operation = this.did_update_diagnostics(cx); + this.send_operation(operation, cx); + }); + })); + } else if has_disk_based_diagnostics { + self.clear_invalid_diagnostics_task.take(); } + Ok(self.did_update_diagnostics(cx)) + } + + fn did_update_diagnostics(&mut self, cx: &mut ModelContext) -> Operation { + self.diagnostics_update_count += 1; cx.notify(); cx.emit(Event::DiagnosticsUpdated); - Ok(Operation::UpdateDiagnostics { + Operation::UpdateDiagnostics { diagnostics: Arc::from(self.diagnostics.iter().cloned().collect::>()), lamport_timestamp: self.text.lamport_clock.tick(), - }) + } } fn request_autoindent(&mut self, cx: &mut ModelContext) { From a86ba579835b4e3c734539eb7d1bb95f12f3ed3a Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 22 Dec 2021 17:30:14 -0800 Subject: [PATCH 177/196] Add Editor::remove_blocks --- crates/editor/src/display_map.rs | 6 ++---- crates/editor/src/display_map/block_map.rs | 2 +- crates/editor/src/editor.rs | 14 ++++++++++---- crates/editor/src/element.rs | 8 +++----- 4 files changed, 16 insertions(+), 14 deletions(-) diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index a3c95ad4bfa9ef89b9025495cd382cb72aca8aed..bcf7d14906d2a9c526ed74c29916eb40419b19c7 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -5,13 +5,11 @@ mod wrap_map; use crate::{Anchor, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint}; use block_map::{BlockMap, BlockPoint}; +use collections::{HashMap, HashSet}; use fold_map::{FoldMap, ToFoldPoint as _}; use gpui::{fonts::FontId, Entity, ModelContext, ModelHandle}; use language::{Point, Subscription as BufferSubscription}; -use std::{ - collections::{HashMap, HashSet}, - ops::Range, -}; +use std::ops::Range; use sum_tree::Bias; use tab_map::TabMap; use theme::SyntaxTheme; diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 5f879ef1638e08a5a14abee8eb6d36fcd3e40fac..7e0248530831cfd549d39bcf567cd1589d7a85ef 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -1,11 +1,11 @@ use super::wrap_map::{self, WrapEdit, WrapPoint, WrapSnapshot}; use crate::{Anchor, ToOffset, ToPoint as _}; +use collections::{HashMap, HashSet}; use gpui::{AppContext, ElementBox}; use language::Chunk; use parking_lot::Mutex; use std::{ cmp::{self, Ordering}, - collections::{HashMap, HashSet}, fmt::Debug, ops::{Deref, Range}, sync::{ diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index b2243a41516ac2ede06aa48ebbf64414d1377319..aa4495eed9d48bce7ea0ac6b54f61aa0ce7e533e 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -9,6 +9,7 @@ mod test; use aho_corasick::AhoCorasick; use clock::ReplicaId; +use collections::{HashMap, HashSet}; pub use display_map::DisplayPoint; use display_map::*; pub use element::*; @@ -27,7 +28,7 @@ use language::{ BracketPair, Buffer, Diagnostic, DiagnosticSeverity, Language, Point, Selection, SelectionGoal, TransactionId, }; -pub use multi_buffer::{Anchor, ExcerptProperties, MultiBuffer}; +pub use multi_buffer::{Anchor, ExcerptId, ExcerptProperties, MultiBuffer}; use multi_buffer::{AnchorRangeExt, MultiBufferChunks, MultiBufferSnapshot, ToOffset, ToPoint}; use postage::watch; use serde::{Deserialize, Serialize}; @@ -35,7 +36,6 @@ use smallvec::SmallVec; use smol::Timer; use std::{ cmp, - collections::HashMap, iter::{self, FromIterator}, mem, ops::{Deref, Range, RangeInclusive, Sub}, @@ -2893,7 +2893,7 @@ impl Editor { if is_valid != active_diagnostics.is_valid { active_diagnostics.is_valid = is_valid; - let mut new_styles = HashMap::new(); + let mut new_styles = HashMap::default(); for (block_id, diagnostic) in &active_diagnostics.blocks { new_styles.insert( *block_id, @@ -3051,7 +3051,7 @@ impl Editor { } } - let mut result = HashMap::new(); + let mut result = HashMap::default(); result.insert( self.replica_id(cx), @@ -3423,6 +3423,12 @@ impl Editor { } } + pub fn remove_blocks(&mut self, block_ids: HashSet, cx: &mut ViewContext) { + self.display_map.update(cx, |display_map, cx| { + display_map.remove_blocks(block_ids, cx) + }); + } + pub fn insert_blocks

( &mut self, blocks: impl IntoIterator>, diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 9c45207bb01ac5cf02c6f2d0deeaf37ed954ec43..cfe4a99dc4449d7b1eee7ce475053ed502cd3b69 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -1,11 +1,10 @@ -use crate::display_map::{BlockContext, ToDisplayPoint}; - use super::{ + display_map::{BlockContext, ToDisplayPoint}, DisplayPoint, Editor, EditorMode, EditorSettings, EditorSnapshot, EditorStyle, Input, Scroll, - Select, SelectPhase, SoftWrap, MAX_LINE_LEN, + Select, SelectPhase, SoftWrap, ToPoint, MAX_LINE_LEN, }; -use crate::ToPoint; use clock::ReplicaId; +use collections::{BTreeMap, HashMap}; use gpui::{ color::Color, geometry::{ @@ -24,7 +23,6 @@ use language::Chunk; use smallvec::SmallVec; use std::{ cmp::{self, Ordering}, - collections::{BTreeMap, HashMap}, fmt::Write, ops::Range, }; From 435d405d103bb82aa7282991ea9af6e2f1d938d1 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 22 Dec 2021 17:40:00 -0800 Subject: [PATCH 178/196] Implement MultiBuffer::insert_excerpt_after --- crates/editor/src/multi_buffer.rs | 80 ++++++++++++++++++++++++------- 1 file changed, 63 insertions(+), 17 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 309712432a40e6c73cf9e576abff31a2378a4025..a03af9d10acd4b31eda154d2a032d5f39f674c42 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -589,6 +589,18 @@ impl MultiBuffer { props: ExcerptProperties, cx: &mut ModelContext, ) -> ExcerptId + where + O: text::ToOffset, + { + self.insert_excerpt_after(&ExcerptId::max(), props, cx) + } + + pub fn insert_excerpt_after( + &mut self, + prev_excerpt_id: &ExcerptId, + props: ExcerptProperties, + cx: &mut ModelContext, + ) -> ExcerptId where O: text::ToOffset, { @@ -599,19 +611,28 @@ impl MultiBuffer { let range = buffer_snapshot.anchor_before(&props.range.start) ..buffer_snapshot.anchor_after(&props.range.end); let mut snapshot = self.snapshot.borrow_mut(); - let mut prev_id = None; - let edit_start = snapshot.excerpts.summary().text.bytes; - snapshot.excerpts.update_last( + let mut cursor = snapshot.excerpts.cursor::>(); + let mut new_excerpts = cursor.slice(&Some(prev_excerpt_id), Bias::Right, &()); + + let mut prev_id = ExcerptId::min(); + let edit_start = new_excerpts.summary().text.bytes; + new_excerpts.update_last( |excerpt| { excerpt.has_trailing_newline = true; - prev_id = Some(excerpt.id.clone()); + prev_id = excerpt.id.clone(); }, &(), ); - let id = ExcerptId::between(&prev_id.unwrap_or(ExcerptId::min()), &ExcerptId::max()); - self.buffers - .borrow_mut() + let mut next_id = ExcerptId::max(); + if let Some(next_excerpt) = cursor.item() { + next_id = next_excerpt.id.clone(); + } + + let id = ExcerptId::between(&prev_id, &next_id); + + let mut buffers = self.buffers.borrow_mut(); + let buffer_state = buffers .entry(props.buffer.id()) .or_insert_with(|| BufferState { last_version: buffer_snapshot.version().clone(), @@ -623,14 +644,28 @@ impl MultiBuffer { cx.subscribe(&props.buffer, Self::on_buffer_event), ], buffer: props.buffer.clone(), - }) - .excerpts - .push(id.clone()); - let excerpt = Excerpt::new(id.clone(), props.buffer.id(), buffer_snapshot, range, false); - snapshot.excerpts.push(excerpt, &()); + }); + if let Err(ix) = buffer_state.excerpts.binary_search(&id) { + buffer_state.excerpts.insert(ix, id.clone()); + } + + let excerpt = Excerpt::new( + id.clone(), + props.buffer.id(), + buffer_snapshot, + range, + cursor.item().is_some(), + ); + new_excerpts.push(excerpt, &()); + let edit_end = new_excerpts.summary().text.bytes; + + new_excerpts.push_tree(cursor.suffix(&()), &()); + drop(cursor); + snapshot.excerpts = new_excerpts; + self.subscriptions.publish_mut([Edit { old: edit_start..edit_start, - new: edit_start..snapshot.excerpts.summary().text.bytes, + new: edit_start..edit_end, }]); cx.notify(); @@ -2302,8 +2337,17 @@ mod tests { let end_ix = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Bias::Right); let start_ix = buffer.clip_offset(rng.gen_range(0..=end_ix), Bias::Left); let anchor_range = buffer.anchor_before(start_ix)..buffer.anchor_after(end_ix); + let prev_excerpt_ix = rng.gen_range(0..=expected_excerpts.len()); + let prev_excerpt_id = excerpt_ids + .get(prev_excerpt_ix) + .cloned() + .unwrap_or(ExcerptId::max()); + let excerpt_ix = (prev_excerpt_ix + 1).min(expected_excerpts.len()); + log::info!( - "Pushing excerpt for buffer {}: {:?}[{:?}] = {:?}", + "Inserting excerpt at {} of {} for buffer {}: {:?}[{:?}] = {:?}", + excerpt_ix, + expected_excerpts.len(), buffer_handle.id(), buffer.text(), start_ix..end_ix, @@ -2311,7 +2355,8 @@ mod tests { ); let excerpt_id = list.update(cx, |list, cx| { - list.push_excerpt( + list.insert_excerpt_after( + &prev_excerpt_id, ExcerptProperties { buffer: &buffer_handle, range: start_ix..end_ix, @@ -2319,8 +2364,9 @@ mod tests { cx, ) }); - excerpt_ids.push(excerpt_id); - expected_excerpts.push((buffer_handle.clone(), anchor_range)); + + excerpt_ids.insert(excerpt_ix, excerpt_id); + expected_excerpts.insert(excerpt_ix, (buffer_handle.clone(), anchor_range)); } } From 3e59c61a3486a9e3b720ed9fcd80aadfa74ab529 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 22 Dec 2021 18:00:53 -0800 Subject: [PATCH 179/196] Use MultiBuffer::insert_excerpt_after to update project diagnostics view --- crates/diagnostics/src/diagnostics.rs | 155 +++++++++++++++++++++++--- 1 file changed, 138 insertions(+), 17 deletions(-) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 9e028c53218d05894fd9bed61894eb3ffe980dcf..dda69cbf9c51552e469c0bf0d96263482b6baafd 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -1,8 +1,9 @@ use anyhow::Result; +use collections::{hash_map, HashMap, HashSet}; use editor::{ context_header_renderer, diagnostic_block_renderer, diagnostic_header_renderer, - display_map::{BlockDisposition, BlockProperties}, - BuildSettings, Editor, ExcerptProperties, MultiBuffer, + display_map::{BlockDisposition, BlockId, BlockProperties}, + BuildSettings, Editor, ExcerptId, ExcerptProperties, MultiBuffer, }; use gpui::{ action, elements::*, keymap::Binding, AppContext, Entity, ModelHandle, MutableAppContext, @@ -11,7 +12,7 @@ use gpui::{ use language::{Bias, Buffer, Point}; use postage::watch; use project::Project; -use std::ops::Range; +use std::{ops::Range, path::Path, sync::Arc}; use util::TryFutureExt; use workspace::Workspace; @@ -33,9 +34,22 @@ struct ProjectDiagnostics { struct ProjectDiagnosticsEditor { editor: ViewHandle, excerpts: ModelHandle, + path_states: Vec<(Arc, PathState)>, build_settings: BuildSettings, } +#[derive(Default)] +struct PathState { + last_excerpt: ExcerptId, + diagnostic_group_states: HashMap, +} + +#[derive(Default)] +struct DiagnosticGroupState { + excerpts: Vec, + blocks: Vec, +} + impl ProjectDiagnostics { fn new(project: ModelHandle) -> Self { Self { project } @@ -118,6 +132,7 @@ impl ProjectDiagnosticsEditor { excerpts, editor, build_settings, + path_states: Default::default(), } } @@ -132,13 +147,66 @@ impl ProjectDiagnosticsEditor { } fn populate_excerpts(&mut self, buffer: ModelHandle, cx: &mut ViewContext) { - let mut blocks = Vec::new(); - let snapshot = buffer.read(cx).snapshot(); + let snapshot; + let path; + { + let buffer = buffer.read(cx); + snapshot = buffer.snapshot(); + if let Some(file) = buffer.file() { + path = file.path().clone(); + } else { + return; + } + } + let path_ix = match self + .path_states + .binary_search_by_key(&path.as_ref(), |e| e.0.as_ref()) + { + Ok(ix) => ix, + Err(ix) => { + self.path_states.insert( + ix, + ( + path.clone(), + PathState { + last_excerpt: ExcerptId::max(), + diagnostic_group_states: Default::default(), + }, + ), + ); + ix + } + }; + let mut prev_excerpt_id = if path_ix > 0 { + self.path_states[path_ix - 1].1.last_excerpt.clone() + } else { + ExcerptId::min() + }; + let path_state = &mut self.path_states[path_ix].1; + + let mut blocks_to_add = Vec::new(); + let mut blocks_to_remove = HashSet::default(); + let mut excerpts_to_remove = Vec::new(); + let mut block_counts_by_group = Vec::new(); + + let diagnostic_groups = snapshot.diagnostic_groups::(); let excerpts_snapshot = self.excerpts.update(cx, |excerpts, excerpts_cx| { - for group in snapshot.diagnostic_groups::() { + for group in &diagnostic_groups { + let group_id = group.entries[0].diagnostic.group_id; + + let group_state = match path_state.diagnostic_group_states.entry(group_id) { + hash_map::Entry::Occupied(e) => { + prev_excerpt_id = e.get().excerpts.last().unwrap().clone(); + block_counts_by_group.push(0); + continue; + } + hash_map::Entry::Vacant(e) => e.insert(DiagnosticGroupState::default()), + }; + + let mut block_count = 0; let mut pending_range: Option<(Range, usize)> = None; - let mut is_first_excerpt = true; + let mut is_first_excerpt_for_group = true; for (ix, entry) in group.entries.iter().map(Some).chain([None]).enumerate() { if let Some((range, start_ix)) = &mut pending_range { if let Some(entry) = entry { @@ -154,7 +222,8 @@ impl ProjectDiagnosticsEditor { Point::new(range.end.row + CONTEXT_LINE_COUNT, u32::MAX), Bias::Left, ); - let excerpt_id = excerpts.push_excerpt( + let excerpt_id = excerpts.insert_excerpt_after( + &prev_excerpt_id, ExcerptProperties { buffer: &buffer, range: excerpt_start..excerpt_end, @@ -162,13 +231,18 @@ impl ProjectDiagnosticsEditor { excerpts_cx, ); + prev_excerpt_id = excerpt_id.clone(); + group_state.excerpts.push(excerpt_id.clone()); let header_position = (excerpt_id.clone(), language::Anchor::min()); - if is_first_excerpt { + + if is_first_excerpt_for_group { + is_first_excerpt_for_group = false; let primary = &group.entries[group.primary_ix].diagnostic; let mut header = primary.clone(); header.message = primary.message.split('\n').next().unwrap().to_string(); - blocks.push(BlockProperties { + block_count += 1; + blocks_to_add.push(BlockProperties { position: header_position, height: 2, render: diagnostic_header_renderer( @@ -179,7 +253,8 @@ impl ProjectDiagnosticsEditor { disposition: BlockDisposition::Above, }); } else { - blocks.push(BlockProperties { + block_count += 1; + blocks_to_add.push(BlockProperties { position: header_position, height: 1, render: context_header_renderer(self.build_settings.clone()), @@ -187,7 +262,6 @@ impl ProjectDiagnosticsEditor { }); } - is_first_excerpt = false; for entry in &group.entries[*start_ix..ix] { let mut diagnostic = entry.diagnostic.clone(); if diagnostic.is_primary { @@ -198,7 +272,8 @@ impl ProjectDiagnosticsEditor { if !diagnostic.message.is_empty() { let buffer_anchor = snapshot.anchor_before(entry.range.start); - blocks.push(BlockProperties { + block_count += 1; + blocks_to_add.push(BlockProperties { position: (excerpt_id.clone(), buffer_anchor), height: diagnostic.message.matches('\n').count() as u8 + 1, render: diagnostic_block_renderer( @@ -218,14 +293,36 @@ impl ProjectDiagnosticsEditor { pending_range = Some((entry.range.clone(), ix)); } } + + block_counts_by_group.push(block_count); } + path_state + .diagnostic_group_states + .retain(|group_id, group_state| { + if diagnostic_groups + .iter() + .any(|group| group.entries[0].diagnostic.group_id == *group_id) + { + true + } else { + excerpts_to_remove.extend(group_state.excerpts.drain(..)); + blocks_to_remove.extend(group_state.blocks.drain(..)); + false + } + }); + + excerpts_to_remove.sort(); + excerpts.remove_excerpts(excerpts_to_remove.iter(), excerpts_cx); excerpts.snapshot(excerpts_cx) }); + path_state.last_excerpt = prev_excerpt_id; + self.editor.update(cx, |editor, cx| { - editor.insert_blocks( - blocks.into_iter().map(|block| { + editor.remove_blocks(blocks_to_remove, cx); + let block_ids = editor.insert_blocks( + blocks_to_add.into_iter().map(|block| { let (excerpt_id, text_anchor) = block.position; BlockProperties { position: excerpts_snapshot.anchor_in_excerpt(excerpt_id, text_anchor), @@ -236,6 +333,20 @@ impl ProjectDiagnosticsEditor { }), cx, ); + + let mut block_ids = block_ids.into_iter(); + let mut block_counts_by_group = block_counts_by_group.into_iter(); + for group in &diagnostic_groups { + let group_id = group.entries[0].diagnostic.group_id; + let block_count = block_counts_by_group.next().unwrap(); + let group_state = path_state + .diagnostic_group_states + .get_mut(&group_id) + .unwrap(); + group_state + .blocks + .extend(block_ids.by_ref().take(block_count)); + } }); cx.notify(); } @@ -454,7 +565,9 @@ mod tests { assert_eq!( editor.text(), concat!( - // Diagnostic group 1 (error for `y`) + // + // main.rs, diagnostic group 1 + // "\n", // primary message "\n", // filename " let x = vec![];\n", @@ -468,7 +581,9 @@ mod tests { " c(y);\n", "\n", // supporting diagnostic " d(x);\n", - // Diagnostic group 2 (error for `x`) + // + // main.rs, diagnostic group 2 + // "\n", // primary message "\n", // filename "fn main() {\n", @@ -516,11 +631,15 @@ mod tests { assert_eq!( editor.text(), concat!( + // // a.rs + // "\n", // primary message "\n", // filename "const a: i32 = 'a';\n", + // // main.rs, diagnostic group 1 + // "\n", // primary message "\n", // filename " let x = vec![];\n", @@ -534,7 +653,9 @@ mod tests { " c(y);\n", "\n", // supporting diagnostic " d(x);\n", + // // main.rs, diagnostic group 2 + // "\n", // primary message "\n", // filename "fn main() {\n", From c47340000d1cb1d8f0f31596a9d6c63e114b4259 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 22 Dec 2021 21:02:20 -0800 Subject: [PATCH 180/196] Fix remove_excerpts when removing the last N excerpts, N > 1 Also, generalize the randomized test to remove multiple excerpts at a time --- crates/editor/src/multi_buffer.rs | 85 +++++++++++++++++++++---------- 1 file changed, 57 insertions(+), 28 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index a03af9d10acd4b31eda154d2a032d5f39f674c42..fb1e369c1bb0afeac5cc6ebdccb74ef3774c1247 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -689,31 +689,52 @@ impl MultiBuffer { let mut new_excerpts = SumTree::new(); let mut cursor = snapshot.excerpts.cursor::<(Option<&ExcerptId>, usize)>(); let mut edits = Vec::new(); - for excerpt_id in excerpt_ids { + let mut excerpt_ids = excerpt_ids.into_iter().peekable(); + + while let Some(mut excerpt_id) = excerpt_ids.next() { + // Seek to the next excerpt to remove, preserving any preceding excerpts. new_excerpts.push_tree(cursor.slice(&Some(excerpt_id), Bias::Left, &()), &()); - if let Some(excerpt) = cursor.item() { - if excerpt.id == *excerpt_id { - let mut old_start = cursor.start().1; - let old_end = cursor.end(&()).1; - cursor.next(&()); + if let Some(mut excerpt) = cursor.item() { + if excerpt.id != *excerpt_id { + continue; + } + let mut old_start = cursor.start().1; + // Skip over the removed excerpt. + loop { if let Some(buffer_state) = buffers.get_mut(&excerpt.buffer_id) { buffer_state.excerpts.retain(|id| id != excerpt_id); } + cursor.next(&()); - // When removing the last excerpt, remove the trailing newline from - // the previous excerpt. - if cursor.item().is_none() && old_start > 0 { - old_start -= 1; - new_excerpts.update_last(|e| e.has_trailing_newline = false, &()); + // Skip over any subsequent excerpts that are also removed. + if let Some(&next_excerpt_id) = excerpt_ids.peek() { + if let Some(next_excerpt) = cursor.item() { + if next_excerpt.id == *next_excerpt_id { + excerpt = next_excerpt; + excerpt_id = excerpt_ids.next().unwrap(); + continue; + } + } } - let new_start = new_excerpts.summary().text.bytes; - edits.push(Edit { - old: old_start..old_end, - new: new_start..new_start, - }); + break; } + + // When removing the last excerpt, remove the trailing newline from + // the previous excerpt. + if cursor.item().is_none() && old_start > 0 { + old_start -= 1; + new_excerpts.update_last(|e| e.has_trailing_newline = false, &()); + } + + // Push an edit for the removal of this run of excerpts. + let old_end = cursor.start().1; + let new_start = new_excerpts.summary().text.bytes; + edits.push(Edit { + old: old_start..old_end, + new: new_start..new_start, + }); } } new_excerpts.push_tree(cursor.suffix(&()), &()); @@ -2311,18 +2332,26 @@ mod tests { buffer.update(cx, |buf, cx| buf.randomly_edit(&mut rng, 5, cx)); } 20..=29 if !expected_excerpts.is_empty() => { - let ix = rng.gen_range(0..expected_excerpts.len()); - let id = excerpt_ids.remove(ix); - let (buffer, range) = expected_excerpts.remove(ix); - let buffer = buffer.read(cx); - log::info!( - "Removing excerpt {}: {:?}", - ix, - buffer - .text_for_range(range.to_offset(&buffer)) - .collect::(), - ); - list.update(cx, |list, cx| list.remove_excerpts(&[id], cx)); + let mut ids_to_remove = vec![]; + for _ in 0..rng.gen_range(1..=3) { + if expected_excerpts.is_empty() { + break; + } + + let ix = rng.gen_range(0..expected_excerpts.len()); + ids_to_remove.push(excerpt_ids.remove(ix)); + let (buffer, range) = expected_excerpts.remove(ix); + let buffer = buffer.read(cx); + log::info!( + "Removing excerpt {}: {:?}", + ix, + buffer + .text_for_range(range.to_offset(&buffer)) + .collect::(), + ); + } + ids_to_remove.sort_unstable(); + list.update(cx, |list, cx| list.remove_excerpts(&ids_to_remove, cx)); } _ => { let buffer_handle = if buffers.is_empty() || rng.gen_bool(0.4) { From 9164c5f239025b471eb5241110103f33aed30d5b Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 23 Dec 2021 09:57:50 +0100 Subject: [PATCH 181/196] Emit an `UpdateDiagnostics` from Worktree when buffer diagnostics change --- crates/language/src/buffer.rs | 13 ++++--------- crates/project/src/project.rs | 2 +- crates/project/src/worktree.rs | 30 +++++++++++++++++++++++++++++- 3 files changed, 34 insertions(+), 11 deletions(-) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 10527b2a3d9157bc26b636841efe819a550bfd78..1928dab71731dfb48c14ad43cf9b4e60529642d7 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -725,6 +725,10 @@ impl Buffer { cx.notify(); } + pub fn all_diagnostics<'a>(&'a self) -> impl 'a + Iterator> { + self.diagnostics.iter() + } + pub fn update_diagnostics( &mut self, version: Option, @@ -1859,15 +1863,6 @@ impl BufferSnapshot { }) } - pub fn all_diagnostics<'a, O>(&'a self) -> impl 'a + Iterator> - where - O: 'a + FromAnchor, - { - self.diagnostics - .iter() - .map(|diagnostic| diagnostic.resolve(self)) - } - pub fn diagnostics_in_range<'a, T, O>( &'a self, search_range: Range, diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index ae2730e9cbcdf0c043c90f0f601c78268efccc95..d0adbff7932590cfa32fd9efa175a4e6f5112bdc 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -78,7 +78,7 @@ pub struct DiagnosticSummary { } impl DiagnosticSummary { - fn new(diagnostics: &[DiagnosticEntry]) -> Self { + fn new<'a, T: 'a>(diagnostics: impl IntoIterator>) -> Self { let mut this = Self { error_count: 0, warning_count: 0, diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 53ea5f1bf84cc660b3c001e54bdc56d066fde00b..dcb1f401d64b2d7040990f2a3740377c4abf077d 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -1097,15 +1097,43 @@ impl LocalWorktree { buffer }); - this.update(&mut cx, |this, _| { + this.update(&mut cx, |this, cx| { let this = this.as_local_mut().unwrap(); this.open_buffers.insert(buffer.id(), buffer.downgrade()); + cx.subscribe(&buffer, |worktree, buffer, event, cx| { + worktree + .as_local_mut() + .unwrap() + .on_buffer_event(buffer, event, cx); + }) + .detach(); }); Ok(buffer) }) } + fn on_buffer_event( + &mut self, + buffer: ModelHandle, + event: &language::Event, + cx: &mut ModelContext, + ) { + match event { + language::Event::DiagnosticsUpdated => { + let buffer = buffer.read(cx); + if let Some(path) = buffer.file().map(|file| file.path().clone()) { + let diagnostics = buffer.all_diagnostics(); + self.diagnostic_summaries + .insert(path.clone(), DiagnosticSummary::new(diagnostics)); + cx.emit(Event::DiagnosticsUpdated(path)); + cx.notify(); + } + } + _ => {} + } + } + pub fn open_remote_buffer( &mut self, envelope: TypedEnvelope, From da460edb8bea5eddb893a92e30edaf90a6d547dc Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 23 Dec 2021 09:59:39 +0100 Subject: [PATCH 182/196] Remove BufferState when the last buffer's excerpt is removed --- crates/editor/src/multi_buffer.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index fb1e369c1bb0afeac5cc6ebdccb74ef3774c1247..cb011f30da47fcf08497e4e07ade04632c6e2546 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -704,6 +704,9 @@ impl MultiBuffer { loop { if let Some(buffer_state) = buffers.get_mut(&excerpt.buffer_id) { buffer_state.excerpts.retain(|id| id != excerpt_id); + if buffer_state.excerpts.is_empty() { + buffers.remove(&excerpt.buffer_id); + } } cursor.next(&()); From dcf26acaace2b5e784dc60f7843b36362c51b7c3 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 23 Dec 2021 15:32:21 +0100 Subject: [PATCH 183/196] Use a different invalidation strategy for project-wide diagnostics --- crates/diagnostics/src/diagnostics.rs | 452 +++++++++++++++++--------- crates/editor/src/editor.rs | 25 +- crates/language/src/buffer.rs | 149 +-------- crates/language/src/diagnostic_set.rs | 18 +- crates/project/src/worktree.rs | 30 +- 5 files changed, 325 insertions(+), 349 deletions(-) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index dda69cbf9c51552e469c0bf0d96263482b6baafd..74d149fa6236668877e56ecfa7330bc58d7a80e9 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -1,5 +1,5 @@ use anyhow::Result; -use collections::{hash_map, HashMap, HashSet}; +use collections::{HashMap, HashSet}; use editor::{ context_header_renderer, diagnostic_block_renderer, diagnostic_header_renderer, display_map::{BlockDisposition, BlockId, BlockProperties}, @@ -9,20 +9,29 @@ use gpui::{ action, elements::*, keymap::Binding, AppContext, Entity, ModelHandle, MutableAppContext, RenderContext, Task, View, ViewContext, ViewHandle, }; -use language::{Bias, Buffer, Point}; +use language::{Bias, Buffer, Diagnostic, DiagnosticEntry, Point}; use postage::watch; use project::Project; -use std::{ops::Range, path::Path, sync::Arc}; +use std::{cmp::Ordering, ops::Range, path::Path, sync::Arc}; use util::TryFutureExt; use workspace::Workspace; action!(Toggle); +action!(ClearInvalid); const CONTEXT_LINE_COUNT: u32 = 1; pub fn init(cx: &mut MutableAppContext) { - cx.add_bindings([Binding::new("alt-shift-D", Toggle, None)]); + cx.add_bindings([ + Binding::new("alt-shift-D", Toggle, None), + Binding::new( + "alt-shift-C", + ClearInvalid, + Some("ProjectDiagnosticsEditor"), + ), + ]); cx.add_action(ProjectDiagnosticsEditor::toggle); + cx.add_action(ProjectDiagnosticsEditor::clear_invalid); } type Event = editor::Event; @@ -34,20 +43,22 @@ struct ProjectDiagnostics { struct ProjectDiagnosticsEditor { editor: ViewHandle, excerpts: ModelHandle, - path_states: Vec<(Arc, PathState)>, + path_states: Vec<(Arc, Vec)>, build_settings: BuildSettings, } -#[derive(Default)] -struct PathState { - last_excerpt: ExcerptId, - diagnostic_group_states: HashMap, -} - -#[derive(Default)] struct DiagnosticGroupState { + primary_diagnostic: DiagnosticEntry, excerpts: Vec, - blocks: Vec, + blocks: HashMap, + block_count: usize, + is_valid: bool, +} + +enum DiagnosticBlock { + Header(Diagnostic), + Inline(Diagnostic), + Context, } impl ProjectDiagnostics { @@ -146,6 +157,38 @@ impl ProjectDiagnosticsEditor { workspace.add_item(diagnostics, cx); } + fn clear_invalid(&mut self, _: &ClearInvalid, cx: &mut ViewContext) { + let mut blocks_to_delete = HashSet::default(); + let mut excerpts_to_delete = Vec::new(); + let mut path_ixs_to_delete = Vec::new(); + for (ix, (_, groups)) in self.path_states.iter_mut().enumerate() { + groups.retain(|group| { + if group.is_valid { + true + } else { + blocks_to_delete.extend(group.blocks.keys().copied()); + excerpts_to_delete.extend(group.excerpts.iter().cloned()); + false + } + }); + + if groups.is_empty() { + path_ixs_to_delete.push(ix); + } + } + + for ix in path_ixs_to_delete.into_iter().rev() { + self.path_states.remove(ix); + } + + self.excerpts.update(cx, |excerpts, cx| { + excerpts_to_delete.sort_unstable(); + excerpts.remove_excerpts(&excerpts_to_delete, cx) + }); + self.editor + .update(cx, |editor, cx| editor.remove_blocks(blocks_to_delete, cx)); + } + fn populate_excerpts(&mut self, buffer: ModelHandle, cx: &mut ViewContext) { let snapshot; let path; @@ -165,189 +208,260 @@ impl ProjectDiagnosticsEditor { { Ok(ix) => ix, Err(ix) => { - self.path_states.insert( - ix, - ( - path.clone(), - PathState { - last_excerpt: ExcerptId::max(), - diagnostic_group_states: Default::default(), - }, - ), - ); + self.path_states + .insert(ix, (path.clone(), Default::default())); ix } }; + let mut prev_excerpt_id = if path_ix > 0 { - self.path_states[path_ix - 1].1.last_excerpt.clone() + let prev_path_last_group = &self.path_states[path_ix - 1].1.last().unwrap(); + prev_path_last_group.excerpts.last().unwrap().clone() } else { ExcerptId::min() }; - let path_state = &mut self.path_states[path_ix].1; + let groups = &mut self.path_states[path_ix].1; + let mut groups_to_add = Vec::new(); let mut blocks_to_add = Vec::new(); - let mut blocks_to_remove = HashSet::default(); - let mut excerpts_to_remove = Vec::new(); - let mut block_counts_by_group = Vec::new(); - - let diagnostic_groups = snapshot.diagnostic_groups::(); + let mut blocks_to_restyle = HashMap::default(); + let mut diagnostic_blocks = Vec::new(); let excerpts_snapshot = self.excerpts.update(cx, |excerpts, excerpts_cx| { - for group in &diagnostic_groups { - let group_id = group.entries[0].diagnostic.group_id; - - let group_state = match path_state.diagnostic_group_states.entry(group_id) { - hash_map::Entry::Occupied(e) => { - prev_excerpt_id = e.get().excerpts.last().unwrap().clone(); - block_counts_by_group.push(0); - continue; - } - hash_map::Entry::Vacant(e) => e.insert(DiagnosticGroupState::default()), - }; - - let mut block_count = 0; - let mut pending_range: Option<(Range, usize)> = None; - let mut is_first_excerpt_for_group = true; - for (ix, entry) in group.entries.iter().map(Some).chain([None]).enumerate() { - if let Some((range, start_ix)) = &mut pending_range { - if let Some(entry) = entry { - if entry.range.start.row <= range.end.row + 1 + CONTEXT_LINE_COUNT * 2 { - range.end = range.end.max(entry.range.end); - continue; + let mut old_groups = groups.iter_mut().peekable(); + let mut new_groups = snapshot.diagnostic_groups().into_iter().peekable(); + + loop { + let mut to_insert = None; + let mut to_invalidate = None; + let mut to_validate = None; + match (old_groups.peek(), new_groups.peek()) { + (None, None) => break, + (None, Some(_)) => to_insert = new_groups.next(), + (Some(_), None) => to_invalidate = old_groups.next(), + (Some(old_group), Some(new_group)) => { + let old_primary = &old_group.primary_diagnostic; + let new_primary = &new_group.entries[new_group.primary_ix]; + match compare_diagnostics(old_primary, new_primary, &snapshot) { + Ordering::Less => to_invalidate = old_groups.next(), + Ordering::Equal => { + to_validate = old_groups.next(); + new_groups.next(); } + Ordering::Greater => to_insert = new_groups.next(), } + } + } - let excerpt_start = - Point::new(range.start.row.saturating_sub(CONTEXT_LINE_COUNT), 0); - let excerpt_end = snapshot.clip_point( - Point::new(range.end.row + CONTEXT_LINE_COUNT, u32::MAX), - Bias::Left, - ); - let excerpt_id = excerpts.insert_excerpt_after( - &prev_excerpt_id, - ExcerptProperties { - buffer: &buffer, - range: excerpt_start..excerpt_end, - }, - excerpts_cx, - ); - - prev_excerpt_id = excerpt_id.clone(); - group_state.excerpts.push(excerpt_id.clone()); - let header_position = (excerpt_id.clone(), language::Anchor::min()); - - if is_first_excerpt_for_group { - is_first_excerpt_for_group = false; - let primary = &group.entries[group.primary_ix].diagnostic; - let mut header = primary.clone(); - header.message = - primary.message.split('\n').next().unwrap().to_string(); - block_count += 1; - blocks_to_add.push(BlockProperties { - position: header_position, - height: 2, - render: diagnostic_header_renderer( - buffer.clone(), - header, - self.build_settings.clone(), - ), - disposition: BlockDisposition::Above, - }); - } else { - block_count += 1; - blocks_to_add.push(BlockProperties { - position: header_position, - height: 1, - render: context_header_renderer(self.build_settings.clone()), - disposition: BlockDisposition::Above, - }); - } - - for entry in &group.entries[*start_ix..ix] { - let mut diagnostic = entry.diagnostic.clone(); - if diagnostic.is_primary { - let mut lines = entry.diagnostic.message.split('\n'); - lines.next(); - diagnostic.message = lines.collect(); + if let Some(group) = to_insert { + let mut group_state = DiagnosticGroupState { + primary_diagnostic: group.entries[group.primary_ix].clone(), + excerpts: Default::default(), + blocks: Default::default(), + block_count: 0, + is_valid: true, + }; + let mut pending_range: Option<(Range, usize)> = None; + let mut is_first_excerpt_for_group = true; + for (ix, entry) in group.entries.iter().map(Some).chain([None]).enumerate() { + let resolved_entry = entry.map(|e| e.resolve::(&snapshot)); + if let Some((range, start_ix)) = &mut pending_range { + if let Some(entry) = resolved_entry.as_ref() { + if entry.range.start.row + <= range.end.row + 1 + CONTEXT_LINE_COUNT * 2 + { + range.end = range.end.max(entry.range.end); + continue; + } } - if !diagnostic.message.is_empty() { - let buffer_anchor = snapshot.anchor_before(entry.range.start); - block_count += 1; + let excerpt_start = + Point::new(range.start.row.saturating_sub(CONTEXT_LINE_COUNT), 0); + let excerpt_end = snapshot.clip_point( + Point::new(range.end.row + CONTEXT_LINE_COUNT, u32::MAX), + Bias::Left, + ); + let excerpt_id = excerpts.insert_excerpt_after( + &prev_excerpt_id, + ExcerptProperties { + buffer: &buffer, + range: excerpt_start..excerpt_end, + }, + excerpts_cx, + ); + + prev_excerpt_id = excerpt_id.clone(); + group_state.excerpts.push(excerpt_id.clone()); + let header_position = (excerpt_id.clone(), language::Anchor::min()); + + if is_first_excerpt_for_group { + is_first_excerpt_for_group = false; + let primary = &group.entries[group.primary_ix].diagnostic; + let mut header = primary.clone(); + header.message = + primary.message.split('\n').next().unwrap().to_string(); + group_state.block_count += 1; + diagnostic_blocks.push(DiagnosticBlock::Header(header.clone())); blocks_to_add.push(BlockProperties { - position: (excerpt_id.clone(), buffer_anchor), - height: diagnostic.message.matches('\n').count() as u8 + 1, - render: diagnostic_block_renderer( - diagnostic, + position: header_position, + height: 2, + render: diagnostic_header_renderer( + buffer.clone(), + header, true, self.build_settings.clone(), ), - disposition: BlockDisposition::Below, + disposition: BlockDisposition::Above, + }); + } else { + group_state.block_count += 1; + diagnostic_blocks.push(DiagnosticBlock::Context); + blocks_to_add.push(BlockProperties { + position: header_position, + height: 1, + render: context_header_renderer(self.build_settings.clone()), + disposition: BlockDisposition::Above, }); } + + for entry in &group.entries[*start_ix..ix] { + let mut diagnostic = entry.diagnostic.clone(); + if diagnostic.is_primary { + let mut lines = entry.diagnostic.message.split('\n'); + lines.next(); + diagnostic.message = lines.collect(); + } + + if !diagnostic.message.is_empty() { + group_state.block_count += 1; + diagnostic_blocks + .push(DiagnosticBlock::Inline(diagnostic.clone())); + blocks_to_add.push(BlockProperties { + position: (excerpt_id.clone(), entry.range.start.clone()), + height: diagnostic.message.matches('\n').count() as u8 + 1, + render: diagnostic_block_renderer( + diagnostic, + true, + self.build_settings.clone(), + ), + disposition: BlockDisposition::Below, + }); + } + } + + pending_range.take(); } - pending_range.take(); + if let Some(entry) = resolved_entry { + pending_range = Some((entry.range.clone(), ix)); + } } - if let Some(entry) = entry { - pending_range = Some((entry.range.clone(), ix)); + groups_to_add.push(group_state); + } else if let Some(to_invalidate) = to_invalidate { + for (block_id, block) in &to_invalidate.blocks { + match block { + DiagnosticBlock::Header(diagnostic) => { + blocks_to_restyle.insert( + *block_id, + diagnostic_header_renderer( + buffer.clone(), + diagnostic.clone(), + false, + self.build_settings.clone(), + ), + ); + } + DiagnosticBlock::Inline(diagnostic) => { + blocks_to_restyle.insert( + *block_id, + diagnostic_block_renderer( + diagnostic.clone(), + false, + self.build_settings.clone(), + ), + ); + } + DiagnosticBlock::Context => {} + } } - } - block_counts_by_group.push(block_count); - } - - path_state - .diagnostic_group_states - .retain(|group_id, group_state| { - if diagnostic_groups - .iter() - .any(|group| group.entries[0].diagnostic.group_id == *group_id) - { - true - } else { - excerpts_to_remove.extend(group_state.excerpts.drain(..)); - blocks_to_remove.extend(group_state.blocks.drain(..)); - false + to_invalidate.is_valid = false; + prev_excerpt_id = to_invalidate.excerpts.last().unwrap().clone(); + } else if let Some(to_validate) = to_validate { + for (block_id, block) in &to_validate.blocks { + match block { + DiagnosticBlock::Header(diagnostic) => { + blocks_to_restyle.insert( + *block_id, + diagnostic_header_renderer( + buffer.clone(), + diagnostic.clone(), + true, + self.build_settings.clone(), + ), + ); + } + DiagnosticBlock::Inline(diagnostic) => { + blocks_to_restyle.insert( + *block_id, + diagnostic_block_renderer( + diagnostic.clone(), + true, + self.build_settings.clone(), + ), + ); + } + DiagnosticBlock::Context => {} + } } - }); + to_validate.is_valid = true; + prev_excerpt_id = to_validate.excerpts.last().unwrap().clone(); + } else { + unreachable!(); + } + } - excerpts_to_remove.sort(); - excerpts.remove_excerpts(excerpts_to_remove.iter(), excerpts_cx); excerpts.snapshot(excerpts_cx) }); - path_state.last_excerpt = prev_excerpt_id; - self.editor.update(cx, |editor, cx| { - editor.remove_blocks(blocks_to_remove, cx); - let block_ids = editor.insert_blocks( - blocks_to_add.into_iter().map(|block| { - let (excerpt_id, text_anchor) = block.position; - BlockProperties { - position: excerpts_snapshot.anchor_in_excerpt(excerpt_id, text_anchor), - height: block.height, - render: block.render, - disposition: block.disposition, - } - }), - cx, - ); + editor.replace_blocks(blocks_to_restyle, cx); + let mut block_ids = editor + .insert_blocks( + blocks_to_add.into_iter().map(|block| { + let (excerpt_id, text_anchor) = block.position; + BlockProperties { + position: excerpts_snapshot.anchor_in_excerpt(excerpt_id, text_anchor), + height: block.height, + render: block.render, + disposition: block.disposition, + } + }), + cx, + ) + .into_iter() + .zip(diagnostic_blocks); - let mut block_ids = block_ids.into_iter(); - let mut block_counts_by_group = block_counts_by_group.into_iter(); - for group in &diagnostic_groups { - let group_id = group.entries[0].diagnostic.group_id; - let block_count = block_counts_by_group.next().unwrap(); - let group_state = path_state - .diagnostic_group_states - .get_mut(&group_id) - .unwrap(); - group_state - .blocks - .extend(block_ids.by_ref().take(block_count)); + for group_state in &mut groups_to_add { + group_state.blocks = block_ids.by_ref().take(group_state.block_count).collect(); } }); + + groups.extend(groups_to_add); + groups.sort_unstable_by(|a, b| { + let range_a = &a.primary_diagnostic.range; + let range_b = &b.primary_diagnostic.range; + range_a + .start + .cmp(&range_b.start, &snapshot) + .unwrap() + .then_with(|| range_a.end.cmp(&range_b.end, &snapshot).unwrap()) + }); + + if groups.is_empty() { + self.path_states.remove(path_ix); + } + cx.notify(); } } @@ -415,6 +529,24 @@ impl workspace::ItemView for ProjectDiagnosticsEditor { } } +fn compare_diagnostics( + lhs: &DiagnosticEntry, + rhs: &DiagnosticEntry, + snapshot: &language::BufferSnapshot, +) -> Ordering { + lhs.range + .start + .to_offset(&snapshot) + .cmp(&rhs.range.start.to_offset(snapshot)) + .then_with(|| { + lhs.range + .end + .to_offset(&snapshot) + .cmp(&rhs.range.end.to_offset(snapshot)) + }) + .then_with(|| lhs.diagnostic.message.cmp(&rhs.diagnostic.message)) +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index aa4495eed9d48bce7ea0ac6b54f61aa0ce7e533e..5e816e8b8c2374f7d6d469b119c472f422e8fb46 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -3423,12 +3423,6 @@ impl Editor { } } - pub fn remove_blocks(&mut self, block_ids: HashSet, cx: &mut ViewContext) { - self.display_map.update(cx, |display_map, cx| { - display_map.remove_blocks(block_ids, cx) - }); - } - pub fn insert_blocks

( &mut self, blocks: impl IntoIterator>, @@ -3444,6 +3438,22 @@ impl Editor { blocks } + pub fn replace_blocks( + &mut self, + blocks: HashMap, + cx: &mut ViewContext, + ) { + self.display_map + .update(cx, |display_map, _| display_map.replace_blocks(blocks)); + self.request_autoscroll(Autoscroll::Fit, cx); + } + + pub fn remove_blocks(&mut self, block_ids: HashSet, cx: &mut ViewContext) { + self.display_map.update(cx, |display_map, cx| { + display_map.remove_blocks(block_ids, cx) + }); + } + pub fn longest_row(&self, cx: &mut MutableAppContext) -> u32 { self.display_map .update(cx, |map, cx| map.snapshot(cx)) @@ -3790,12 +3800,13 @@ pub fn diagnostic_block_renderer( pub fn diagnostic_header_renderer( buffer: ModelHandle, diagnostic: Diagnostic, + is_valid: bool, build_settings: BuildSettings, ) -> RenderBlock { Arc::new(move |cx| { let settings = build_settings(cx); let mut text_style = settings.style.text.clone(); - text_style.color = diagnostic_style(diagnostic.severity, true, &settings.style).text; + text_style.color = diagnostic_style(diagnostic.severity, is_valid, &settings.style).text; let file_path = if let Some(file) = buffer.read(&**cx).file() { file.path().to_string_lossy().to_string() } else { diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 1928dab71731dfb48c14ad43cf9b4e60529642d7..f1dfcc8ccaa22a22b653fc2213235ba4c7162292 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -7,7 +7,6 @@ pub use crate::{ }; use anyhow::{anyhow, Result}; use clock::ReplicaId; -use collections::hash_map; use futures::FutureExt as _; use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, MutableAppContext, Task}; use lazy_static::lazy_static; @@ -69,8 +68,6 @@ pub struct Buffer { remote_selections: TreeMap]>>, diagnostics: DiagnosticSet, diagnostics_update_count: usize, - clear_invalid_diagnostics_task: Option>, - next_diagnostic_group_id: usize, language_server: Option, deferred_ops: OperationQueue, #[cfg(test)] @@ -363,8 +360,6 @@ impl Buffer { remote_selections: Default::default(), diagnostics: Default::default(), diagnostics_update_count: 0, - next_diagnostic_group_id: 0, - clear_invalid_diagnostics_task: None, language_server: None, deferred_ops: OperationQueue::new(), #[cfg(test)] @@ -777,7 +772,6 @@ impl Buffer { .peekable(); let mut last_edit_old_end = PointUtf16::zero(); let mut last_edit_new_end = PointUtf16::zero(); - let mut has_disk_based_diagnostics = false; let mut ix = 0; 'outer: while ix < diagnostics.len() { let entry = &mut diagnostics[ix]; @@ -793,7 +787,6 @@ impl Buffer { .as_ref() .map_or(false, |source| disk_based_sources.contains(source)) { - has_disk_based_diagnostics = true; while let Some(edit) = edits_since_save.peek() { if edit.old.end <= start { last_edit_old_end = edit.old.end; @@ -826,145 +819,14 @@ impl Buffer { ix += 1; } drop(edits_since_save); - - let mut merged_diagnostics = Vec::with_capacity(diagnostics.len()); - let mut old_diagnostics = self - .diagnostics - .iter() - .map(|entry| { - ( - entry, - entry - .diagnostic - .source - .as_ref() - .map_or(false, |source| disk_based_sources.contains(source)), - ) - }) - .peekable(); - let mut new_diagnostics = diagnostics - .into_iter() - .map(|entry| DiagnosticEntry { - range: content.anchor_before(entry.range.start) - ..content.anchor_after(entry.range.end), - diagnostic: entry.diagnostic, - }) - .peekable(); - - // Incorporate the *old* diagnostics into the new diagnostics set, in two ways: - // 1. Recycle group ids - diagnostic groups whose primary diagnostic has not - // changed should use the same group id as before, so that downstream code - // can determine which diagnostics are new. - // 2. Preserve disk-based diagnostics - Some diagnostic sources are reported - // on a less frequent basis than others. If these sources are absent from this - // message, then preserve the previous diagnostics for those sources, but mark - // them as invalid, and set a timer to clear them out. - let mut group_id_replacements = HashMap::new(); - let mut merged_old_disk_based_diagnostics = false; - loop { - match (old_diagnostics.peek(), new_diagnostics.peek()) { - (None, None) => break, - (None, Some(_)) => { - merged_diagnostics.push(new_diagnostics.next().unwrap()); - } - (Some(_), None) => { - let (old_entry, is_disk_based) = old_diagnostics.next().unwrap(); - if is_disk_based && !has_disk_based_diagnostics { - let mut old_entry = old_entry.clone(); - old_entry.diagnostic.is_valid = false; - merged_old_disk_based_diagnostics = true; - merged_diagnostics.push(old_entry); - } - } - (Some((old, _)), Some(new)) => { - let ordering = Ordering::Equal - .then_with(|| old.range.start.cmp(&new.range.start, content).unwrap()) - .then_with(|| new.range.end.cmp(&old.range.end, content).unwrap()) - .then_with(|| compare_diagnostics(&old.diagnostic, &new.diagnostic)); - match ordering { - Ordering::Less => { - let (old_entry, is_disk_based) = old_diagnostics.next().unwrap(); - if is_disk_based && !has_disk_based_diagnostics { - let mut old_entry = old_entry.clone(); - old_entry.diagnostic.is_valid = false; - merged_old_disk_based_diagnostics = true; - merged_diagnostics.push(old_entry); - } - } - Ordering::Equal => { - let (old_entry, _) = old_diagnostics.next().unwrap(); - let new_entry = new_diagnostics.next().unwrap(); - if new_entry.diagnostic.is_primary { - group_id_replacements.insert( - new_entry.diagnostic.group_id, - old_entry.diagnostic.group_id, - ); - } - merged_diagnostics.push(new_entry); - } - Ordering::Greater => { - let new_entry = new_diagnostics.next().unwrap(); - merged_diagnostics.push(new_entry); - } - } - } - } - } - drop(old_diagnostics); - - // Having determined which group ids should be recycled, renumber all of - // groups. Any new group that does not correspond to an old group receives - // a brand new group id. - let mut next_diagnostic_group_id = self.next_diagnostic_group_id; - for entry in &mut merged_diagnostics { - if entry.diagnostic.is_valid { - match group_id_replacements.entry(entry.diagnostic.group_id) { - hash_map::Entry::Occupied(e) => entry.diagnostic.group_id = *e.get(), - hash_map::Entry::Vacant(e) => { - entry.diagnostic.group_id = post_inc(&mut next_diagnostic_group_id); - e.insert(entry.diagnostic.group_id); - } - } - } - } - - self.diagnostics = DiagnosticSet::from_sorted_entries(merged_diagnostics, content); - self.next_diagnostic_group_id = next_diagnostic_group_id; - - // If old disk-based diagnostics were included in this new set, then - // set a timer to remove them if enough time passes before the next - // diagnostics update. - if merged_old_disk_based_diagnostics { - self.clear_invalid_diagnostics_task = Some(cx.spawn(|this, mut cx| async move { - smol::Timer::after(Duration::from_secs(2)).await; - this.update(&mut cx, |this, cx| { - let content = this.snapshot(); - this.diagnostics = DiagnosticSet::from_sorted_entries( - this.diagnostics - .iter() - .filter(|d| d.diagnostic.is_valid) - .cloned(), - &content, - ); - let operation = this.did_update_diagnostics(cx); - this.send_operation(operation, cx); - }); - })); - } else if has_disk_based_diagnostics { - self.clear_invalid_diagnostics_task.take(); - } - - Ok(self.did_update_diagnostics(cx)) - } - - fn did_update_diagnostics(&mut self, cx: &mut ModelContext) -> Operation { + self.diagnostics = DiagnosticSet::new(diagnostics, content); self.diagnostics_update_count += 1; cx.notify(); cx.emit(Event::DiagnosticsUpdated); - Operation::UpdateDiagnostics { + Ok(Operation::UpdateDiagnostics { diagnostics: Arc::from(self.diagnostics.iter().cloned().collect::>()), lamport_timestamp: self.text.lamport_clock.tick(), - } + }) } fn request_autoindent(&mut self, cx: &mut ModelContext) { @@ -1874,10 +1736,7 @@ impl BufferSnapshot { self.diagnostics.range(search_range, self, true) } - pub fn diagnostic_groups(&self) -> Vec> - where - O: FromAnchor + Ord + Copy, - { + pub fn diagnostic_groups(&self) -> Vec> { self.diagnostics.groups(self) } diff --git a/crates/language/src/diagnostic_set.rs b/crates/language/src/diagnostic_set.rs index 0918424ca0ee83e9df1c2f94bd7e3d85e93470a2..a246a104aa7d0f2e3739d33d9328e0f29cb73beb 100644 --- a/crates/language/src/diagnostic_set.rs +++ b/crates/language/src/diagnostic_set.rs @@ -104,23 +104,19 @@ impl DiagnosticSet { }) } - pub fn groups(&self, buffer: &text::BufferSnapshot) -> Vec> - where - O: FromAnchor + Ord + Copy, - { + pub fn groups(&self, buffer: &text::BufferSnapshot) -> Vec> { let mut groups = HashMap::default(); for entry in self.diagnostics.iter() { - let entry = entry.resolve(buffer); groups .entry(entry.diagnostic.group_id) .or_insert(Vec::new()) - .push(entry); + .push(entry.clone()); } let mut groups = groups .into_values() .filter_map(|mut entries| { - entries.sort_unstable_by_key(|entry| entry.range.start); + entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start, buffer).unwrap()); entries .iter() .position(|entry| entry.diagnostic.is_primary) @@ -130,7 +126,13 @@ impl DiagnosticSet { }) }) .collect::>(); - groups.sort_unstable_by_key(|group| group.entries[group.primary_ix].range.start); + groups.sort_unstable_by(|a, b| { + a.entries[a.primary_ix] + .range + .start + .cmp(&b.entries[b.primary_ix].range.start, buffer) + .unwrap() + }); groups } diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index dcb1f401d64b2d7040990f2a3740377c4abf077d..53ea5f1bf84cc660b3c001e54bdc56d066fde00b 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -1097,43 +1097,15 @@ impl LocalWorktree { buffer }); - this.update(&mut cx, |this, cx| { + this.update(&mut cx, |this, _| { let this = this.as_local_mut().unwrap(); this.open_buffers.insert(buffer.id(), buffer.downgrade()); - cx.subscribe(&buffer, |worktree, buffer, event, cx| { - worktree - .as_local_mut() - .unwrap() - .on_buffer_event(buffer, event, cx); - }) - .detach(); }); Ok(buffer) }) } - fn on_buffer_event( - &mut self, - buffer: ModelHandle, - event: &language::Event, - cx: &mut ModelContext, - ) { - match event { - language::Event::DiagnosticsUpdated => { - let buffer = buffer.read(cx); - if let Some(path) = buffer.file().map(|file| file.path().clone()) { - let diagnostics = buffer.all_diagnostics(); - self.diagnostic_summaries - .insert(path.clone(), DiagnosticSummary::new(diagnostics)); - cx.emit(Event::DiagnosticsUpdated(path)); - cx.notify(); - } - } - _ => {} - } - } - pub fn open_remote_buffer( &mut self, envelope: TypedEnvelope, From 304afc181387312e7c019faf77c2b666bf4db7cc Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 23 Dec 2021 16:33:50 +0100 Subject: [PATCH 184/196] Only preserve excerpts for invalid diagnostics if they contain cursors Co-Authored-By: Nathan Sobo --- crates/diagnostics/src/diagnostics.rs | 89 +++++++++++++++--------- crates/editor/src/editor.rs | 4 ++ crates/editor/src/multi_buffer/anchor.rs | 4 ++ 3 files changed, 64 insertions(+), 33 deletions(-) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 74d149fa6236668877e56ecfa7330bc58d7a80e9..f5565618e4dde89a71bc766ef9b2bcfd0b7f2058 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -223,11 +223,20 @@ impl ProjectDiagnosticsEditor { let groups = &mut self.path_states[path_ix].1; let mut groups_to_add = Vec::new(); + let mut group_ixs_to_remove = Vec::new(); let mut blocks_to_add = Vec::new(); let mut blocks_to_restyle = HashMap::default(); + let mut blocks_to_remove = HashSet::default(); + let selected_excerpts = self + .editor + .read(cx) + .local_anchor_selections() + .iter() + .flat_map(|s| [s.start.excerpt_id().clone(), s.end.excerpt_id().clone()]) + .collect::>(); let mut diagnostic_blocks = Vec::new(); let excerpts_snapshot = self.excerpts.update(cx, |excerpts, excerpts_cx| { - let mut old_groups = groups.iter_mut().peekable(); + let mut old_groups = groups.iter_mut().enumerate().peekable(); let mut new_groups = snapshot.diagnostic_groups().into_iter().peekable(); loop { @@ -238,7 +247,7 @@ impl ProjectDiagnosticsEditor { (None, None) => break, (None, Some(_)) => to_insert = new_groups.next(), (Some(_), None) => to_invalidate = old_groups.next(), - (Some(old_group), Some(new_group)) => { + (Some((_, old_group)), Some(new_group)) => { let old_primary = &old_group.primary_diagnostic; let new_primary = &new_group.entries[new_group.primary_ix]; match compare_diagnostics(old_primary, new_primary, &snapshot) { @@ -357,38 +366,48 @@ impl ProjectDiagnosticsEditor { } groups_to_add.push(group_state); - } else if let Some(to_invalidate) = to_invalidate { - for (block_id, block) in &to_invalidate.blocks { - match block { - DiagnosticBlock::Header(diagnostic) => { - blocks_to_restyle.insert( - *block_id, - diagnostic_header_renderer( - buffer.clone(), - diagnostic.clone(), - false, - self.build_settings.clone(), - ), - ); - } - DiagnosticBlock::Inline(diagnostic) => { - blocks_to_restyle.insert( - *block_id, - diagnostic_block_renderer( - diagnostic.clone(), - false, - self.build_settings.clone(), - ), - ); + } else if let Some((group_ix, group_state)) = to_invalidate { + if group_state + .excerpts + .iter() + .any(|excerpt_id| selected_excerpts.contains(excerpt_id)) + { + for (block_id, block) in &group_state.blocks { + match block { + DiagnosticBlock::Header(diagnostic) => { + blocks_to_restyle.insert( + *block_id, + diagnostic_header_renderer( + buffer.clone(), + diagnostic.clone(), + false, + self.build_settings.clone(), + ), + ); + } + DiagnosticBlock::Inline(diagnostic) => { + blocks_to_restyle.insert( + *block_id, + diagnostic_block_renderer( + diagnostic.clone(), + false, + self.build_settings.clone(), + ), + ); + } + DiagnosticBlock::Context => {} } - DiagnosticBlock::Context => {} } - } - to_invalidate.is_valid = false; - prev_excerpt_id = to_invalidate.excerpts.last().unwrap().clone(); - } else if let Some(to_validate) = to_validate { - for (block_id, block) in &to_validate.blocks { + group_state.is_valid = false; + prev_excerpt_id = group_state.excerpts.last().unwrap().clone(); + } else { + excerpts.remove_excerpts(group_state.excerpts.iter(), excerpts_cx); + group_ixs_to_remove.push(group_ix); + blocks_to_remove.extend(group_state.blocks.keys().copied()); + } + } else if let Some((_, group_state)) = to_validate { + for (block_id, block) in &group_state.blocks { match block { DiagnosticBlock::Header(diagnostic) => { blocks_to_restyle.insert( @@ -414,8 +433,8 @@ impl ProjectDiagnosticsEditor { DiagnosticBlock::Context => {} } } - to_validate.is_valid = true; - prev_excerpt_id = to_validate.excerpts.last().unwrap().clone(); + group_state.is_valid = true; + prev_excerpt_id = group_state.excerpts.last().unwrap().clone(); } else { unreachable!(); } @@ -425,6 +444,7 @@ impl ProjectDiagnosticsEditor { }); self.editor.update(cx, |editor, cx| { + editor.remove_blocks(blocks_to_remove, cx); editor.replace_blocks(blocks_to_restyle, cx); let mut block_ids = editor .insert_blocks( @@ -447,6 +467,9 @@ impl ProjectDiagnosticsEditor { } }); + for ix in group_ixs_to_remove.into_iter().rev() { + groups.remove(ix); + } groups.extend(groups_to_add); groups.sort_unstable_by(|a, b| { let range_a = &a.primary_diagnostic.range; diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 5e816e8b8c2374f7d6d469b119c472f422e8fb46..4953188f40285764fc8f6ad40cd895788e612f8c 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -3116,6 +3116,10 @@ impl Editor { .collect() } + pub fn local_anchor_selections(&self) -> &Arc<[Selection]> { + &self.selections + } + fn resolve_selections<'a, D, I>( &self, selections: I, diff --git a/crates/editor/src/multi_buffer/anchor.rs b/crates/editor/src/multi_buffer/anchor.rs index 2cc4817a92b9f8d22de3e2e3640ea14eec669061..758a62526bf1be8825dc08083f668fc3308e45f4 100644 --- a/crates/editor/src/multi_buffer/anchor.rs +++ b/crates/editor/src/multi_buffer/anchor.rs @@ -28,6 +28,10 @@ impl Anchor { } } + pub fn excerpt_id(&self) -> &ExcerptId { + &self.excerpt_id + } + pub fn cmp<'a>(&self, other: &Anchor, snapshot: &MultiBufferSnapshot) -> Result { let excerpt_id_cmp = self.excerpt_id.cmp(&other.excerpt_id); if excerpt_id_cmp.is_eq() { From b9d1ca4341402aae6bcd7191be1d32683f3af3a8 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 23 Dec 2021 16:47:54 +0100 Subject: [PATCH 185/196] Show only disk-based diagnostics in `ProjectDiagnosticsEditor` Co-Authored-By: Nathan Sobo --- crates/diagnostics/src/diagnostics.rs | 6 +++++- crates/language/src/buffer.rs | 22 +++++----------------- crates/language/src/language.rs | 3 ++- crates/language/src/proto.rs | 6 +++--- crates/language/src/tests.rs | 24 ++++++++++++------------ crates/project/src/worktree.rs | 19 +++++++++++++++---- crates/rpc/proto/zed.proto | 8 ++++---- 7 files changed, 46 insertions(+), 42 deletions(-) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index f5565618e4dde89a71bc766ef9b2bcfd0b7f2058..7337b7b59421f939a6428adc2bb7a229f0968c78 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -237,7 +237,11 @@ impl ProjectDiagnosticsEditor { let mut diagnostic_blocks = Vec::new(); let excerpts_snapshot = self.excerpts.update(cx, |excerpts, excerpts_cx| { let mut old_groups = groups.iter_mut().enumerate().peekable(); - let mut new_groups = snapshot.diagnostic_groups().into_iter().peekable(); + let mut new_groups = snapshot + .diagnostic_groups() + .into_iter() + .filter(|group| group.entries[group.primary_ix].diagnostic.is_disk_based) + .peekable(); loop { let mut to_insert = None; diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index f1dfcc8ccaa22a22b653fc2213235ba4c7162292..475fc6eaab7965716fc3b4fb7447fa77b493ce13 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -19,7 +19,7 @@ use std::{ any::Any, cell::RefCell, cmp::{self, Ordering}, - collections::{BTreeMap, HashMap, HashSet}, + collections::{BTreeMap, HashMap}, ffi::OsString, future::Future, iter::{Iterator, Peekable}, @@ -87,13 +87,13 @@ pub struct BufferSnapshot { #[derive(Clone, Debug, PartialEq, Eq)] pub struct Diagnostic { - pub source: Option, pub code: Option, pub severity: DiagnosticSeverity, pub message: String, pub group_id: usize, pub is_valid: bool, pub is_primary: bool, + pub is_disk_based: bool, } struct LanguageServerState { @@ -733,7 +733,7 @@ impl Buffer { fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering { Ordering::Equal .then_with(|| b.is_primary.cmp(&a.is_primary)) - .then_with(|| a.source.cmp(&b.source)) + .then_with(|| a.is_disk_based.cmp(&b.is_disk_based)) .then_with(|| a.severity.cmp(&b.severity)) .then_with(|| a.message.cmp(&b.message)) } @@ -760,13 +760,6 @@ impl Buffer { self.deref() }; - let empty_set = HashSet::new(); - let disk_based_sources = self - .language - .as_ref() - .and_then(|language| language.disk_based_diagnostic_sources()) - .unwrap_or(&empty_set); - let mut edits_since_save = content .edits_since::(&self.saved_version) .peekable(); @@ -781,12 +774,7 @@ impl Buffer { // Some diagnostics are based on files on disk instead of buffers' // current contents. Adjust these diagnostics' ranges to reflect // any unsaved edits. - if entry - .diagnostic - .source - .as_ref() - .map_or(false, |source| disk_based_sources.contains(source)) - { + if entry.diagnostic.is_disk_based { while let Some(edit) = edits_since_save.peek() { if edit.old.end <= start { last_edit_old_end = edit.old.end; @@ -2008,13 +1996,13 @@ impl operation_queue::Operation for Operation { impl Default for Diagnostic { fn default() -> Self { Self { - source: Default::default(), code: Default::default(), severity: DiagnosticSeverity::ERROR, message: Default::default(), group_id: Default::default(), is_primary: Default::default(), is_valid: true, + is_disk_based: false, } } } diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 317a6ed84a770aba875d33621ce5ab60df6ad396..fe832929a94b1afd52c9d54dd5ad12229a6fbf9d 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -8,13 +8,14 @@ mod tests; use anyhow::{anyhow, Result}; pub use buffer::Operation; pub use buffer::*; +use collections::HashSet; pub use diagnostic_set::DiagnosticEntry; use gpui::AppContext; use highlight_map::HighlightMap; use lazy_static::lazy_static; use parking_lot::Mutex; use serde::Deserialize; -use std::{collections::HashSet, path::Path, str, sync::Arc}; +use std::{path::Path, str, sync::Arc}; use theme::SyntaxTheme; use tree_sitter::{self, Query}; pub use tree_sitter::{Parser, Tree}; diff --git a/crates/language/src/proto.rs b/crates/language/src/proto.rs index 06d3609d59f9d787df2377d20c0e4a9ed84d3cb0..bf33673fc938cedc31d5776b1c15d8a15a038934 100644 --- a/crates/language/src/proto.rs +++ b/crates/language/src/proto.rs @@ -119,7 +119,7 @@ pub fn serialize_diagnostics<'a>( is_primary: entry.diagnostic.is_primary, is_valid: entry.diagnostic.is_valid, code: entry.diagnostic.code.clone(), - source: entry.diagnostic.source.clone(), + is_disk_based: entry.diagnostic.is_disk_based, }) .collect() } @@ -271,10 +271,10 @@ pub fn deserialize_diagnostics( }, message: diagnostic.message, group_id: diagnostic.group_id as usize, - is_primary: diagnostic.is_primary, code: diagnostic.code, - source: diagnostic.source, is_valid: diagnostic.is_valid, + is_primary: diagnostic.is_primary, + is_disk_based: diagnostic.is_disk_based, }, }) }) diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index be8060b0b01309d174688b9100ae1a3a8696b92b..8a177fc472c7da888cdb19c29765f628a398c49a 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -519,7 +519,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'A'".to_string(), - source: Some("disk".to_string()), + is_disk_based: true, group_id: 0, is_primary: true, ..Default::default() @@ -530,7 +530,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'BB'".to_string(), - source: Some("disk".to_string()), + is_disk_based: true, group_id: 1, is_primary: true, ..Default::default() @@ -540,7 +540,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { range: PointUtf16::new(2, 9)..PointUtf16::new(2, 12), diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, - source: Some("disk".to_string()), + is_disk_based: true, message: "undefined variable 'CCC'".to_string(), group_id: 2, is_primary: true, @@ -564,7 +564,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'BB'".to_string(), - source: Some("disk".to_string()), + is_disk_based: true, group_id: 1, is_primary: true, ..Default::default() @@ -575,7 +575,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'CCC'".to_string(), - source: Some("disk".to_string()), + is_disk_based: true, group_id: 2, is_primary: true, ..Default::default() @@ -614,7 +614,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'A'".to_string(), - source: Some("disk".to_string()), + is_disk_based: true, group_id: 0, is_primary: true, ..Default::default() @@ -655,7 +655,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'A'".to_string(), - source: Some("disk".to_string()), + is_disk_based: true, group_id: 0, is_primary: true, ..Default::default() @@ -704,7 +704,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'BB'".to_string(), - source: Some("disk".to_string()), + is_disk_based: true, group_id: 1, is_primary: true, ..Default::default() @@ -715,7 +715,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'A'".to_string(), - source: Some("disk".to_string()), + is_disk_based: true, group_id: 0, is_primary: true, ..Default::default() @@ -736,7 +736,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'A'".to_string(), - source: Some("disk".to_string()), + is_disk_based: true, group_id: 0, is_primary: true, ..Default::default() @@ -747,7 +747,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'BB'".to_string(), - source: Some("disk".to_string()), + is_disk_based: true, group_id: 4, is_primary: true, ..Default::default() @@ -798,7 +798,7 @@ async fn test_preserving_old_group_ids_and_disk_based_diagnostics(mut cx: gpui:: diagnostic: Diagnostic { severity: DiagnosticSeverity::ERROR, message: "cannot find value `d` in this scope".to_string(), - source: Some("disk".to_string()), + is_disk_based: true, group_id: 1, is_primary: true, is_valid: true, diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 53ea5f1bf84cc660b3c001e54bdc56d066fde00b..9fb753b9d2c22f0cb93c5c45a2fd41592a0de80e 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -7,8 +7,8 @@ use ::ignore::gitignore::{Gitignore, GitignoreBuilder}; use anyhow::{anyhow, Context, Result}; use client::{proto, Client, PeerId, TypedEnvelope, UserStore}; use clock::ReplicaId; -use collections::BTreeMap; use collections::{hash_map, HashMap}; +use collections::{BTreeMap, HashSet}; use futures::{Stream, StreamExt}; use fuzzy::CharBag; use gpui::{ @@ -675,6 +675,7 @@ impl Worktree { pub fn update_diagnostics( &mut self, mut params: lsp::PublishDiagnosticsParams, + disk_based_sources: &HashSet, cx: &mut ModelContext, ) -> Result<()> { let this = self.as_local_mut().ok_or_else(|| anyhow!("not local"))?; @@ -712,7 +713,6 @@ impl Worktree { range: diagnostic.range.start.to_point_utf16() ..diagnostic.range.end.to_point_utf16(), diagnostic: Diagnostic { - source: diagnostic.source.clone(), code: diagnostic.code.clone().map(|code| match code { lsp::NumberOrString::Number(code) => code.to_string(), lsp::NumberOrString::String(code) => code, @@ -722,6 +722,10 @@ impl Worktree { group_id, is_primary: false, is_valid: true, + is_disk_based: diagnostic + .source + .as_ref() + .map_or(false, |source| disk_based_sources.contains(source)), }, }); } @@ -1018,6 +1022,10 @@ impl LocalWorktree { .log_err() .flatten() { + let disk_based_sources = language + .disk_based_diagnostic_sources() + .cloned() + .unwrap_or_default(); let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded(); language_server .on_notification::(move |params| { @@ -1028,7 +1036,8 @@ impl LocalWorktree { while let Ok(diagnostics) = diagnostics_rx.recv().await { if let Some(handle) = cx.read(|cx| this.upgrade(cx)) { handle.update(&mut cx, |this, cx| { - this.update_diagnostics(diagnostics, cx).log_err(); + this.update_diagnostics(diagnostics, &disk_based_sources, cx) + .log_err(); }); } else { break; @@ -3812,7 +3821,9 @@ mod tests { }; worktree - .update(&mut cx, |tree, cx| tree.update_diagnostics(message, cx)) + .update(&mut cx, |tree, cx| { + tree.update_diagnostics(message, &Default::default(), cx) + }) .unwrap(); let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot()); diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index 3a36868b8d5b20a8caf306b64311f1cb876f97cd..5ef34960e78511f63b5c77dba664a7d8c716f384 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -304,10 +304,10 @@ message Diagnostic { Severity severity = 3; string message = 4; optional string code = 5; - optional string source = 6; - uint64 group_id = 7; - bool is_primary = 8; - bool is_valid = 9; + uint64 group_id = 6; + bool is_primary = 7; + bool is_valid = 8; + bool is_disk_based = 9; enum Severity { None = 0; From 7b453beebca8df511cd912717466c7e0b8aa1481 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 23 Dec 2021 18:35:50 +0100 Subject: [PATCH 186/196] WIP: Use `cargo check` for on-disk diagnostics Co-Authored-By: Nathan Sobo Co-Authored-By: Max Brunsfeld --- Cargo.lock | 1 + crates/editor/src/items.rs | 4 ++-- crates/language/Cargo.toml | 1 + crates/language/src/language.rs | 26 +++++++++++++++++++++++++- crates/lsp/src/lsp.rs | 6 +++++- crates/project/src/project.rs | 29 +++++++++++++++++++++++++++-- crates/project/src/worktree.rs | 33 +++++++++++++++++++++++---------- 7 files changed, 84 insertions(+), 16 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 299526596c0909b8244e252146357682eaf0e612..41dcf21797dd187e4ceb399d46bb761a5cf37799 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2605,6 +2605,7 @@ name = "language" version = "0.1.0" dependencies = [ "anyhow", + "async-trait", "clock", "collections", "ctor", diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index b662044553ce6cd66b3f1ce1a4305d984e54a270..d88315fff7348aadafc02b9bd6f4c015693c01cd 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -162,12 +162,12 @@ impl ItemView for Editor { let (language, language_server) = worktree.update(&mut cx, |worktree, cx| { let worktree = worktree.as_local_mut().unwrap(); let language = worktree - .languages() + .language_registry() .select_language(new_file.full_path()) .cloned(); let language_server = language .as_ref() - .and_then(|language| worktree.ensure_language_server(language, cx)); + .and_then(|language| worktree.register_language(language, cx)); (language, language_server.clone()) }); diff --git a/crates/language/Cargo.toml b/crates/language/Cargo.toml index a9a781e604b87af2386f674fbdee1bc668c44afa..f8d5c1e8362b497d1708311b4b65c3513b6fa99d 100644 --- a/crates/language/Cargo.toml +++ b/crates/language/Cargo.toml @@ -27,6 +27,7 @@ text = { path = "../text" } theme = { path = "../theme" } util = { path = "../util" } anyhow = "1.0.38" +async-trait = "0.1" futures = "0.3" lazy_static = "1.4" log = "0.4" diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index fe832929a94b1afd52c9d54dd5ad12229a6fbf9d..1c369e738b0aee23cdb2c6ee6bda1d74f2c4071f 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -6,6 +6,7 @@ pub mod proto; mod tests; use anyhow::{anyhow, Result}; +use async_trait::async_trait; pub use buffer::Operation; pub use buffer::*; use collections::HashSet; @@ -15,7 +16,11 @@ use highlight_map::HighlightMap; use lazy_static::lazy_static; use parking_lot::Mutex; use serde::Deserialize; -use std::{path::Path, str, sync::Arc}; +use std::{ + path::{Path, PathBuf}, + str, + sync::Arc, +}; use theme::SyntaxTheme; use tree_sitter::{self, Query}; pub use tree_sitter::{Parser, Tree}; @@ -59,9 +64,18 @@ pub struct BracketPair { pub newline: bool, } +#[async_trait] +pub trait DiagnosticSource: 'static + Send + Sync { + async fn diagnose( + &self, + path: Arc, + ) -> Result>)>>; +} + pub struct Language { pub(crate) config: LanguageConfig, pub(crate) grammar: Option>, + pub(crate) diagnostic_source: Option>, } pub struct Grammar { @@ -126,6 +140,7 @@ impl Language { highlight_map: Default::default(), }) }), + diagnostic_source: None, } } @@ -159,6 +174,11 @@ impl Language { Ok(self) } + pub fn with_diagnostic_source(mut self, source: impl DiagnosticSource) -> Self { + self.diagnostic_source = Some(Arc::new(source)); + self + } + pub fn name(&self) -> &str { self.config.name.as_str() } @@ -192,6 +212,10 @@ impl Language { } } + pub fn diagnostic_source(&self) -> Option<&Arc> { + self.diagnostic_source.as_ref() + } + pub fn disk_based_diagnostic_sources(&self) -> Option<&HashSet> { self.config .language_server diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index ef5435d80c59491f7c271311f6e8a3847a53bab6..769922523c3ca422e97f11e0964aef44c3a247b7 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -226,7 +226,11 @@ impl LanguageServer { process_id: Default::default(), root_path: Default::default(), root_uri: Some(root_uri), - initialization_options: Default::default(), + initialization_options: Some(json!({ + "checkOnSave": { + "enable": false + }, + })), capabilities: lsp_types::ClientCapabilities { experimental: Some(json!({ "serverStatusNotification": true, diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index d0adbff7932590cfa32fd9efa175a4e6f5112bdc..02789e2f27314b21ff4477b4cb250bcb13d92186 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -11,14 +11,14 @@ use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet}; use gpui::{ AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, }; -use language::{Buffer, DiagnosticEntry, LanguageRegistry}; +use language::{Buffer, DiagnosticEntry, Language, LanguageRegistry}; use lsp::DiagnosticSeverity; use postage::{prelude::Stream, watch}; use std::{ path::Path, sync::{atomic::AtomicBool, Arc}, }; -use util::TryFutureExt as _; +use util::{ResultExt, TryFutureExt as _}; pub use fs::*; pub use worktree::*; @@ -503,6 +503,31 @@ impl Project { } } + pub fn diagnose(&self, cx: &mut ModelContext) { + for worktree_handle in &self.worktrees { + if let Some(worktree) = worktree_handle.read(cx).as_local() { + for language in worktree.languages() { + if let Some(diagnostic_source) = language.diagnostic_source().cloned() { + let worktree_path = worktree.abs_path().clone(); + let worktree_handle = worktree_handle.downgrade(); + cx.spawn_weak(|_, cx| async move { + if let Some(diagnostics) = + diagnostic_source.diagnose(worktree_path).await.log_err() + { + if let Some(worktree_handle) = worktree_handle.upgrade(&cx) { + worktree_handle.update(&mut cx, |worktree, cx| { + for (path, diagnostics) in diagnostics {} + }) + } + } + }) + .detach(); + } + } + } + } + } + pub fn diagnostic_summaries<'a>( &'a self, cx: &'a AppContext, diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 9fb753b9d2c22f0cb93c5c45a2fd41592a0de80e..cef2c35e3508145ee8eb1d878121079ff74bfa15 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -291,7 +291,7 @@ impl Worktree { pub fn languages(&self) -> &Arc { match self { - Worktree::Local(worktree) => &worktree.languages, + Worktree::Local(worktree) => &worktree.language_registry, Worktree::Remote(worktree) => &worktree.languages, } } @@ -853,10 +853,11 @@ pub struct LocalWorktree { diagnostics: HashMap, Vec>>, diagnostic_summaries: BTreeMap, DiagnosticSummary>, queued_operations: Vec<(u64, Operation)>, - languages: Arc, + language_registry: Arc, client: Arc, user_store: ModelHandle, fs: Arc, + languages: Vec>, language_servers: HashMap>, } @@ -960,10 +961,11 @@ impl LocalWorktree { diagnostics: Default::default(), diagnostic_summaries: Default::default(), queued_operations: Default::default(), - languages, + language_registry: languages, client, user_store, fs, + languages: Default::default(), language_servers: Default::default(), }; @@ -1004,15 +1006,23 @@ impl LocalWorktree { self.config.collaborators.clone() } - pub fn languages(&self) -> &LanguageRegistry { + pub fn language_registry(&self) -> &LanguageRegistry { + &self.language_registry + } + + pub fn languages(&self) -> &[Arc] { &self.languages } - pub fn ensure_language_server( + pub fn register_language( &mut self, - language: &Language, + language: &Arc, cx: &mut ModelContext, ) -> Option> { + if !self.languages.iter().any(|l| Arc::ptr_eq(l, language)) { + self.languages.push(language.clone()); + } + if let Some(server) = self.language_servers.get(language.name()) { return Some(server.clone()); } @@ -1090,10 +1100,13 @@ impl LocalWorktree { let (diagnostics, language, language_server) = this.update(&mut cx, |this, cx| { let this = this.as_local_mut().unwrap(); let diagnostics = this.diagnostics.remove(&path); - let language = this.languages.select_language(file.full_path()).cloned(); + let language = this + .language_registry + .select_language(file.full_path()) + .cloned(); let server = language .as_ref() - .and_then(|language| this.ensure_language_server(language, cx)); + .and_then(|language| this.register_language(language, cx)); (diagnostics, language, server) }); @@ -1191,8 +1204,8 @@ impl LocalWorktree { self.snapshot.clone() } - pub fn abs_path(&self) -> &Path { - self.snapshot.abs_path.as_ref() + pub fn abs_path(&self) -> &Arc { + &self.snapshot.abs_path } pub fn contains_abs_path(&self, path: &Path) -> bool { From e3ecd87081475f08a531a503693ce3cd881004d8 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Thu, 23 Dec 2021 11:24:35 -0700 Subject: [PATCH 187/196] WIP Co-Authored-By: Antonio Scandurra Co-Authored-By: Max Brunsfeld --- crates/language/src/buffer.rs | 16 ++--- crates/language/src/diagnostic_set.rs | 17 ++++- crates/language/src/language.rs | 2 + crates/language/src/proto.rs | 89 ++++++++++++++------------- crates/project/src/worktree.rs | 77 ++++++++++++++++++++++- crates/rpc/proto/zed.proto | 13 ++-- 6 files changed, 156 insertions(+), 58 deletions(-) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 475fc6eaab7965716fc3b4fb7447fa77b493ce13..764b5e12788413554359880c3245d658977e4c08 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -77,7 +77,7 @@ pub struct Buffer { pub struct BufferSnapshot { text: text::BufferSnapshot, tree: Option, - diagnostics: DiagnosticSet, + diagnostics: HashMap<&'static str, DiagnosticSet>, remote_selections: TreeMap]>>, diagnostics_update_count: usize, is_parsing: bool, @@ -115,7 +115,7 @@ struct LanguageServerSnapshot { pub enum Operation { Buffer(text::Operation), UpdateDiagnostics { - diagnostics: Arc<[DiagnosticEntry]>, + diagnostic_set: Arc, lamport_timestamp: clock::Lamport, }, UpdateSelections { @@ -298,10 +298,12 @@ impl Buffer { proto::deserialize_selections(selection_set.selections), ); } - this.apply_diagnostic_update( - Arc::from(proto::deserialize_diagnostics(message.diagnostics)), - cx, - ); + for diagnostic_set in message.diagnostic_sets { + this.apply_diagnostic_update( + Arc::from(proto::deserialize_diagnostics(diagnostic_set)), + cx, + ); + } Ok(this) } @@ -323,7 +325,7 @@ impl Buffer { selections: proto::serialize_selections(selections), }) .collect(), - diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()), + diagnostics: proto::serialize_diagnostic_set(self.diagnostics.iter()), } } diff --git a/crates/language/src/diagnostic_set.rs b/crates/language/src/diagnostic_set.rs index a246a104aa7d0f2e3739d33d9328e0f29cb73beb..2f275935756d2945fcb9ebf9ae0dcb599d53b5bd 100644 --- a/crates/language/src/diagnostic_set.rs +++ b/crates/language/src/diagnostic_set.rs @@ -8,8 +8,9 @@ use std::{ use sum_tree::{self, Bias, SumTree}; use text::{Anchor, FromAnchor, PointUtf16, ToOffset}; -#[derive(Clone, Default)] +#[derive(Clone, Debug, Default)] pub struct DiagnosticSet { + provider_name: String, diagnostics: SumTree>, } @@ -34,22 +35,32 @@ pub struct Summary { } impl DiagnosticSet { - pub fn from_sorted_entries(iter: I, buffer: &text::BufferSnapshot) -> Self + pub fn provider_name(&self) -> &str { + &self.provider_name + } + + pub fn from_sorted_entries( + provider_name: String, + iter: I, + buffer: &text::BufferSnapshot, + ) -> Self where I: IntoIterator>, { Self { + provider_name, diagnostics: SumTree::from_iter(iter, buffer), } } - pub fn new(iter: I, buffer: &text::BufferSnapshot) -> Self + pub fn new(provider_name: &'static str, iter: I, buffer: &text::BufferSnapshot) -> Self where I: IntoIterator>, { let mut entries = iter.into_iter().collect::>(); entries.sort_unstable_by_key(|entry| (entry.range.start, Reverse(entry.range.end))); Self { + provider_name, diagnostics: SumTree::from_iter( entries.into_iter().map(|entry| DiagnosticEntry { range: buffer.anchor_before(entry.range.start) diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 1c369e738b0aee23cdb2c6ee6bda1d74f2c4071f..e7523648355d0445d3b4f2a725ce610bf90792a8 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -66,6 +66,8 @@ pub struct BracketPair { #[async_trait] pub trait DiagnosticSource: 'static + Send + Sync { + fn name(&self) -> &'static str; + async fn diagnose( &self, path: Arc, diff --git a/crates/language/src/proto.rs b/crates/language/src/proto.rs index bf33673fc938cedc31d5776b1c15d8a15a038934..8dcc741e1dbf5ecd0909f3ce64f74c569f87678b 100644 --- a/crates/language/src/proto.rs +++ b/crates/language/src/proto.rs @@ -1,4 +1,4 @@ -use crate::{diagnostic_set::DiagnosticEntry, Diagnostic, Operation}; +use crate::{diagnostic_set::DiagnosticEntry, Diagnostic, DiagnosticSet, Operation}; use anyhow::{anyhow, Result}; use clock::ReplicaId; use lsp::DiagnosticSeverity; @@ -57,12 +57,12 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation { lamport_timestamp: lamport_timestamp.value, }), Operation::UpdateDiagnostics { - diagnostics, + diagnostic_set, lamport_timestamp, - } => proto::operation::Variant::UpdateDiagnostics(proto::UpdateDiagnostics { + } => proto::operation::Variant::UpdateDiagnosticSet(proto::UpdateDiagnosticSet { replica_id: lamport_timestamp.replica_id as u32, lamport_timestamp: lamport_timestamp.value, - diagnostics: serialize_diagnostics(diagnostics.iter()), + diagnostic_set: Some(serialize_diagnostic_set(&diagnostic_set)), }), }), } @@ -99,29 +99,30 @@ pub fn serialize_selections(selections: &Arc<[Selection]>) -> Vec( - diagnostics: impl IntoIterator>, -) -> Vec { - diagnostics - .into_iter() - .map(|entry| proto::Diagnostic { - start: Some(serialize_anchor(&entry.range.start)), - end: Some(serialize_anchor(&entry.range.end)), - message: entry.diagnostic.message.clone(), - severity: match entry.diagnostic.severity { - DiagnosticSeverity::ERROR => proto::diagnostic::Severity::Error, - DiagnosticSeverity::WARNING => proto::diagnostic::Severity::Warning, - DiagnosticSeverity::INFORMATION => proto::diagnostic::Severity::Information, - DiagnosticSeverity::HINT => proto::diagnostic::Severity::Hint, - _ => proto::diagnostic::Severity::None, - } as i32, - group_id: entry.diagnostic.group_id as u64, - is_primary: entry.diagnostic.is_primary, - is_valid: entry.diagnostic.is_valid, - code: entry.diagnostic.code.clone(), - is_disk_based: entry.diagnostic.is_disk_based, - }) - .collect() +pub fn serialize_diagnostic_set(set: &DiagnosticSet) -> proto::DiagnosticSet { + proto::DiagnosticSet { + provider_name: set.provider_name().to_string(), + diagnostics: set + .iter() + .map(|entry| proto::Diagnostic { + start: Some(serialize_anchor(&entry.range.start)), + end: Some(serialize_anchor(&entry.range.end)), + message: entry.diagnostic.message.clone(), + severity: match entry.diagnostic.severity { + DiagnosticSeverity::ERROR => proto::diagnostic::Severity::Error, + DiagnosticSeverity::WARNING => proto::diagnostic::Severity::Warning, + DiagnosticSeverity::INFORMATION => proto::diagnostic::Severity::Information, + DiagnosticSeverity::HINT => proto::diagnostic::Severity::Hint, + _ => proto::diagnostic::Severity::None, + } as i32, + group_id: entry.diagnostic.group_id as u64, + is_primary: entry.diagnostic.is_primary, + is_valid: entry.diagnostic.is_valid, + code: entry.diagnostic.code.clone(), + is_disk_based: entry.diagnostic.is_disk_based, + }) + .collect(), + } } fn serialize_anchor(anchor: &Anchor) -> proto::Anchor { @@ -207,13 +208,15 @@ pub fn deserialize_operation(message: proto::Operation) -> Result { value: message.lamport_timestamp, }, }, - proto::operation::Variant::UpdateDiagnostics(message) => Operation::UpdateDiagnostics { - diagnostics: Arc::from(deserialize_diagnostics(message.diagnostics)), - lamport_timestamp: clock::Lamport { - replica_id: message.replica_id as ReplicaId, - value: message.lamport_timestamp, - }, - }, + proto::operation::Variant::UpdateDiagnosticSet(message) => { + Operation::UpdateDiagnostics { + diagnostics: Arc::from(deserialize_diagnostic_set(message.diagnostic_set?)), + lamport_timestamp: clock::Lamport { + replica_id: message.replica_id as ReplicaId, + value: message.lamport_timestamp, + }, + } + } }, ) } @@ -253,12 +256,13 @@ pub fn deserialize_selections(selections: Vec) -> Arc<[Selecti ) } -pub fn deserialize_diagnostics( - diagnostics: Vec, -) -> Vec> { - diagnostics - .into_iter() - .filter_map(|diagnostic| { +pub fn deserialize_diagnostic_set( + message: proto::DiagnosticSet, + buffer: &BufferSnapshot, +) -> DiagnosticSet { + DiagnosticSet::from_sorted_entries( + message.provider_name, + message.diagnostics.into_iter().filter_map(|diagnostic| { Some(DiagnosticEntry { range: deserialize_anchor(diagnostic.start?)?..deserialize_anchor(diagnostic.end?)?, diagnostic: Diagnostic { @@ -277,8 +281,9 @@ pub fn deserialize_diagnostics( is_disk_based: diagnostic.is_disk_based, }, }) - }) - .collect() + }), + buffer, + ) } fn deserialize_anchor(anchor: proto::Anchor) -> Option { diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index cef2c35e3508145ee8eb1d878121079ff74bfa15..8349a2aa27615cc68ebee926a09a80d8f7b73479 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -672,6 +672,79 @@ impl Worktree { } } + pub fn update_lsp_diagnostics( + &mut self, + mut params: lsp::PublishDiagnosticsParams, + disk_based_sources: &HashSet, + cx: &mut ModelContext, + ) -> Result<()> { + let this = self.as_local_mut().ok_or_else(|| anyhow!("not local"))?; + let abs_path = params + .uri + .to_file_path() + .map_err(|_| anyhow!("URI is not a file"))?; + let worktree_path = Arc::from( + abs_path + .strip_prefix(&this.abs_path) + .context("path is not within worktree")?, + ); + + let mut group_ids_by_diagnostic_range = HashMap::default(); + let mut diagnostics_by_group_id = HashMap::default(); + let mut next_group_id = 0; + for diagnostic in &mut params.diagnostics { + let source = diagnostic.source.as_ref(); + let code = diagnostic.code.as_ref(); + let group_id = diagnostic_ranges(&diagnostic, &abs_path) + .find_map(|range| group_ids_by_diagnostic_range.get(&(source, code, range))) + .copied() + .unwrap_or_else(|| { + let group_id = post_inc(&mut next_group_id); + for range in diagnostic_ranges(&diagnostic, &abs_path) { + group_ids_by_diagnostic_range.insert((source, code, range), group_id); + } + group_id + }); + + diagnostics_by_group_id + .entry(group_id) + .or_insert(Vec::new()) + .push(DiagnosticEntry { + range: diagnostic.range.start.to_point_utf16() + ..diagnostic.range.end.to_point_utf16(), + diagnostic: Diagnostic { + code: diagnostic.code.clone().map(|code| match code { + lsp::NumberOrString::Number(code) => code.to_string(), + lsp::NumberOrString::String(code) => code, + }), + severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR), + message: mem::take(&mut diagnostic.message), + group_id, + is_primary: false, + is_valid: true, + is_disk_based: diagnostic + .source + .as_ref() + .map_or(false, |source| disk_based_sources.contains(source)), + }, + }); + } + + let diagnostics = diagnostics_by_group_id + .into_values() + .flat_map(|mut diagnostics| { + let primary = diagnostics + .iter_mut() + .min_by_key(|entry| entry.diagnostic.severity) + .unwrap(); + primary.diagnostic.is_primary = true; + diagnostics + }) + .collect::>(); + + self.update_diagnostic_entries(worktree_path, params.version, diagnostics, cx) + } + pub fn update_diagnostics( &mut self, mut params: lsp::PublishDiagnosticsParams, @@ -1046,7 +1119,7 @@ impl LocalWorktree { while let Ok(diagnostics) = diagnostics_rx.recv().await { if let Some(handle) = cx.read(|cx| this.upgrade(cx)) { handle.update(&mut cx, |this, cx| { - this.update_diagnostics(diagnostics, &disk_based_sources, cx) + this.update_lsp_diagnostics(diagnostics, &disk_based_sources, cx) .log_err(); }); } else { @@ -3835,7 +3908,7 @@ mod tests { worktree .update(&mut cx, |tree, cx| { - tree.update_diagnostics(message, &Default::default(), cx) + tree.update_lsp_diagnostics(message, &Default::default(), cx) }) .unwrap(); let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot()); diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index 5ef34960e78511f63b5c77dba664a7d8c716f384..0bcd99278820278bb462c966faf45718664b543c 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -265,7 +265,7 @@ message Buffer { string content = 2; repeated Operation.Edit history = 3; repeated SelectionSet selections = 4; - repeated Diagnostic diagnostics = 5; + repeated DiagnosticSet diagnostic_sets = 5; } message SelectionSet { @@ -292,10 +292,15 @@ enum Bias { Right = 1; } -message UpdateDiagnostics { +message UpdateDiagnosticSet { uint32 replica_id = 1; uint32 lamport_timestamp = 2; - repeated Diagnostic diagnostics = 3; + DiagnosticSet diagnostic_set = 3; +} + +message DiagnosticSet { + string provider_name = 1; + repeated Diagnostic diagnostics = 2; } message Diagnostic { @@ -324,7 +329,7 @@ message Operation { Undo undo = 2; UpdateSelections update_selections = 3; RemoveSelections remove_selections = 4; - UpdateDiagnostics update_diagnostics = 5; + UpdateDiagnosticSet update_diagnostic_set = 5; } message Edit { From d5a17053df6a7e3aaf2fdce22f17a3622d670e65 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 23 Dec 2021 14:21:10 -0800 Subject: [PATCH 188/196] Get code compiling with some todos --- crates/diagnostics/src/diagnostics.rs | 2 + crates/language/src/buffer.rs | 108 +++++++++++++++++--------- crates/language/src/diagnostic_set.rs | 49 +++++++----- crates/language/src/proto.rs | 79 ++++++++++++------- crates/language/src/tests.rs | 8 +- crates/project/src/project.rs | 8 +- crates/project/src/worktree.rs | 58 ++++++++++---- crates/rpc/src/peer.rs | 8 +- crates/zed/src/zed.rs | 2 +- 9 files changed, 212 insertions(+), 110 deletions(-) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 7337b7b59421f939a6428adc2bb7a229f0968c78..0276268784a2dae8f1f67378c3bb3ca5c9ac530a 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -638,6 +638,7 @@ mod tests { worktree.update(&mut cx, |worktree, cx| { worktree .update_diagnostic_entries( + "lsp".into(), Arc::from("/test/main.rs".as_ref()), None, vec![ @@ -764,6 +765,7 @@ mod tests { worktree.update(&mut cx, |worktree, cx| { worktree .update_diagnostic_entries( + "lsp".into(), Arc::from("/test/a.rs".as_ref()), None, vec![DiagnosticEntry { diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 764b5e12788413554359880c3245d658977e4c08..461c88ae42b6c87d7beca9f6570a1e2bce2d8b7c 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -66,7 +66,7 @@ pub struct Buffer { parsing_in_background: bool, parse_count: usize, remote_selections: TreeMap]>>, - diagnostics: DiagnosticSet, + diagnostic_sets: Vec, diagnostics_update_count: usize, language_server: Option, deferred_ops: OperationQueue, @@ -77,7 +77,7 @@ pub struct Buffer { pub struct BufferSnapshot { text: text::BufferSnapshot, tree: Option, - diagnostics: HashMap<&'static str, DiagnosticSet>, + diagnostic_sets: Vec, remote_selections: TreeMap]>>, diagnostics_update_count: usize, is_parsing: bool, @@ -115,7 +115,8 @@ struct LanguageServerSnapshot { pub enum Operation { Buffer(text::Operation), UpdateDiagnostics { - diagnostic_set: Arc, + provider_name: String, + diagnostics: Arc<[DiagnosticEntry]>, lamport_timestamp: clock::Lamport, }, UpdateSelections { @@ -298,9 +299,15 @@ impl Buffer { proto::deserialize_selections(selection_set.selections), ); } + let snapshot = this.snapshot(); for diagnostic_set in message.diagnostic_sets { + let (provider_name, entries) = proto::deserialize_diagnostic_set(diagnostic_set); this.apply_diagnostic_update( - Arc::from(proto::deserialize_diagnostics(diagnostic_set)), + DiagnosticSet::from_sorted_entries( + provider_name, + entries.into_iter().cloned(), + &snapshot, + ), cx, ); } @@ -325,7 +332,13 @@ impl Buffer { selections: proto::serialize_selections(selections), }) .collect(), - diagnostics: proto::serialize_diagnostic_set(self.diagnostics.iter()), + diagnostic_sets: self + .diagnostic_sets + .iter() + .map(|set| { + proto::serialize_diagnostic_set(set.provider_name().to_string(), set.iter()) + }) + .collect(), } } @@ -360,7 +373,7 @@ impl Buffer { pending_autoindent: Default::default(), language: None, remote_selections: Default::default(), - diagnostics: Default::default(), + diagnostic_sets: Default::default(), diagnostics_update_count: 0, language_server: None, deferred_ops: OperationQueue::new(), @@ -374,7 +387,7 @@ impl Buffer { text: self.text.snapshot(), tree: self.syntax_tree(), remote_selections: self.remote_selections.clone(), - diagnostics: self.diagnostics.clone(), + diagnostic_sets: self.diagnostic_sets.clone(), diagnostics_update_count: self.diagnostics_update_count, is_parsing: self.parsing_in_background, language: self.language.clone(), @@ -723,11 +736,13 @@ impl Buffer { } pub fn all_diagnostics<'a>(&'a self) -> impl 'a + Iterator> { - self.diagnostics.iter() + // TODO - enforce ordering between sets + self.diagnostic_sets.iter().flat_map(|set| set.iter()) } pub fn update_diagnostics( &mut self, + provider_name: Arc, version: Option, mut diagnostics: Vec>, cx: &mut ModelContext, @@ -809,12 +824,12 @@ impl Buffer { ix += 1; } drop(edits_since_save); - self.diagnostics = DiagnosticSet::new(diagnostics, content); - self.diagnostics_update_count += 1; - cx.notify(); - cx.emit(Event::DiagnosticsUpdated); + + let set = DiagnosticSet::new(provider_name, diagnostics, content); + self.apply_diagnostic_update(set.clone(), cx); Ok(Operation::UpdateDiagnostics { - diagnostics: Arc::from(self.diagnostics.iter().cloned().collect::>()), + provider_name: set.provider_name().to_string(), + diagnostics: set.iter().cloned().collect(), lamport_timestamp: self.text.lamport_clock.tick(), }) } @@ -1303,12 +1318,13 @@ impl Buffer { Operation::Buffer(_) => { unreachable!("buffer operations should never be applied at this layer") } - Operation::UpdateDiagnostics { diagnostics, .. } => { - diagnostics.iter().all(|diagnostic| { - self.text.can_resolve(&diagnostic.range.start) - && self.text.can_resolve(&diagnostic.range.end) - }) - } + Operation::UpdateDiagnostics { + diagnostics: diagnostic_set, + .. + } => diagnostic_set.iter().all(|diagnostic| { + self.text.can_resolve(&diagnostic.range.start) + && self.text.can_resolve(&diagnostic.range.end) + }), Operation::UpdateSelections { selections, .. } => selections .iter() .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)), @@ -1321,8 +1337,20 @@ impl Buffer { Operation::Buffer(_) => { unreachable!("buffer operations should never be applied at this layer") } - Operation::UpdateDiagnostics { diagnostics, .. } => { - self.apply_diagnostic_update(diagnostics, cx); + Operation::UpdateDiagnostics { + provider_name, + diagnostics: diagnostic_set, + .. + } => { + let snapshot = self.snapshot(); + self.apply_diagnostic_update( + DiagnosticSet::from_sorted_entries( + provider_name, + diagnostic_set.iter().cloned(), + &snapshot, + ), + cx, + ); } Operation::UpdateSelections { replica_id, @@ -1342,14 +1370,18 @@ impl Buffer { } } - fn apply_diagnostic_update( - &mut self, - diagnostics: Arc<[DiagnosticEntry]>, - cx: &mut ModelContext, - ) { - self.diagnostics = DiagnosticSet::from_sorted_entries(diagnostics.iter().cloned(), self); + fn apply_diagnostic_update(&mut self, set: DiagnosticSet, cx: &mut ModelContext) { + match self + .diagnostic_sets + .binary_search_by_key(&set.provider_name(), |set| set.provider_name()) + { + Ok(ix) => self.diagnostic_sets[ix] = set.clone(), + Err(ix) => self.diagnostic_sets.insert(ix, set.clone()), + } + self.diagnostics_update_count += 1; cx.notify(); + cx.emit(Event::DiagnosticsUpdated); } #[cfg(not(test))] @@ -1584,10 +1616,7 @@ impl BufferSnapshot { let mut highlights = None; let mut diagnostic_endpoints = Vec::::new(); if let Some(theme) = theme { - for entry in self - .diagnostics - .range::<_, usize>(range.clone(), self, true) - { + for entry in self.diagnostics_in_range::<_, usize>(range.clone()) { diagnostic_endpoints.push(DiagnosticEndpoint { offset: entry.range.start, is_start: true, @@ -1720,14 +1749,20 @@ impl BufferSnapshot { search_range: Range, ) -> impl 'a + Iterator> where - T: 'a + ToOffset, + T: 'a + Clone + ToOffset, O: 'a + FromAnchor, { - self.diagnostics.range(search_range, self, true) + self.diagnostic_sets + .iter() + .flat_map(move |set| set.range(search_range.clone(), self, true)) } pub fn diagnostic_groups(&self) -> Vec> { - self.diagnostics.groups(self) + let mut groups = Vec::new(); + for set in &self.diagnostic_sets { + set.groups(&mut groups, self); + } + groups } pub fn diagnostic_group<'a, O>( @@ -1737,7 +1772,8 @@ impl BufferSnapshot { where O: 'a + FromAnchor, { - self.diagnostics.group(group_id, self) + todo!(); + [].into_iter() } pub fn diagnostics_update_count(&self) -> usize { @@ -1755,7 +1791,7 @@ impl Clone for BufferSnapshot { text: self.text.clone(), tree: self.tree.clone(), remote_selections: self.remote_selections.clone(), - diagnostics: self.diagnostics.clone(), + diagnostic_sets: self.diagnostic_sets.clone(), diagnostics_update_count: self.diagnostics_update_count, is_parsing: self.is_parsing, language: self.language.clone(), diff --git a/crates/language/src/diagnostic_set.rs b/crates/language/src/diagnostic_set.rs index 2f275935756d2945fcb9ebf9ae0dcb599d53b5bd..7cd13c00dd5f66fb491f82a8d460aa3d977623dc 100644 --- a/crates/language/src/diagnostic_set.rs +++ b/crates/language/src/diagnostic_set.rs @@ -4,13 +4,14 @@ use std::{ cmp::{Ordering, Reverse}, iter, ops::Range, + sync::Arc, }; use sum_tree::{self, Bias, SumTree}; use text::{Anchor, FromAnchor, PointUtf16, ToOffset}; -#[derive(Clone, Debug, Default)] +#[derive(Clone, Debug)] pub struct DiagnosticSet { - provider_name: String, + provider_name: Arc, diagnostics: SumTree>, } @@ -40,7 +41,7 @@ impl DiagnosticSet { } pub fn from_sorted_entries( - provider_name: String, + provider_name: impl Into>, iter: I, buffer: &text::BufferSnapshot, ) -> Self @@ -48,12 +49,12 @@ impl DiagnosticSet { I: IntoIterator>, { Self { - provider_name, + provider_name: provider_name.into(), diagnostics: SumTree::from_iter(iter, buffer), } } - pub fn new(provider_name: &'static str, iter: I, buffer: &text::BufferSnapshot) -> Self + pub fn new(provider_name: Arc, iter: I, buffer: &text::BufferSnapshot) -> Self where I: IntoIterator>, { @@ -115,7 +116,7 @@ impl DiagnosticSet { }) } - pub fn groups(&self, buffer: &text::BufferSnapshot) -> Vec> { + pub fn groups(&self, output: &mut Vec>, buffer: &text::BufferSnapshot) { let mut groups = HashMap::default(); for entry in self.diagnostics.iter() { groups @@ -124,27 +125,24 @@ impl DiagnosticSet { .push(entry.clone()); } - let mut groups = groups - .into_values() - .filter_map(|mut entries| { - entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start, buffer).unwrap()); - entries - .iter() - .position(|entry| entry.diagnostic.is_primary) - .map(|primary_ix| DiagnosticGroup { - entries, - primary_ix, - }) - }) - .collect::>(); - groups.sort_unstable_by(|a, b| { + let start_ix = output.len(); + output.extend(groups.into_values().filter_map(|mut entries| { + entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start, buffer).unwrap()); + entries + .iter() + .position(|entry| entry.diagnostic.is_primary) + .map(|primary_ix| DiagnosticGroup { + entries, + primary_ix, + }) + })); + output[start_ix..].sort_unstable_by(|a, b| { a.entries[a.primary_ix] .range .start .cmp(&b.entries[b.primary_ix].range.start, buffer) .unwrap() }); - groups } pub fn group<'a, O: FromAnchor>( @@ -158,6 +156,15 @@ impl DiagnosticSet { } } +impl Default for DiagnosticSet { + fn default() -> Self { + Self { + provider_name: "".into(), + diagnostics: Default::default(), + } + } +} + impl sum_tree::Item for DiagnosticEntry { type Summary = Summary; diff --git a/crates/language/src/proto.rs b/crates/language/src/proto.rs index 8dcc741e1dbf5ecd0909f3ce64f74c569f87678b..727b6a4d7928c3f82d827c698cfcd2a599c16d98 100644 --- a/crates/language/src/proto.rs +++ b/crates/language/src/proto.rs @@ -1,4 +1,4 @@ -use crate::{diagnostic_set::DiagnosticEntry, Diagnostic, DiagnosticSet, Operation}; +use crate::{diagnostic_set::DiagnosticEntry, Diagnostic, Operation}; use anyhow::{anyhow, Result}; use clock::ReplicaId; use lsp::DiagnosticSeverity; @@ -57,12 +57,16 @@ pub fn serialize_operation(operation: &Operation) -> proto::Operation { lamport_timestamp: lamport_timestamp.value, }), Operation::UpdateDiagnostics { - diagnostic_set, + provider_name, + diagnostics, lamport_timestamp, } => proto::operation::Variant::UpdateDiagnosticSet(proto::UpdateDiagnosticSet { replica_id: lamport_timestamp.replica_id as u32, lamport_timestamp: lamport_timestamp.value, - diagnostic_set: Some(serialize_diagnostic_set(&diagnostic_set)), + diagnostic_set: Some(serialize_diagnostic_set( + provider_name.clone(), + diagnostics.iter(), + )), }), }), } @@ -99,11 +103,14 @@ pub fn serialize_selections(selections: &Arc<[Selection]>) -> Vec proto::DiagnosticSet { +pub fn serialize_diagnostic_set<'a>( + provider_name: String, + diagnostics: impl IntoIterator>, +) -> proto::DiagnosticSet { proto::DiagnosticSet { - provider_name: set.provider_name().to_string(), - diagnostics: set - .iter() + provider_name, + diagnostics: diagnostics + .into_iter() .map(|entry| proto::Diagnostic { start: Some(serialize_anchor(&entry.range.start)), end: Some(serialize_anchor(&entry.range.end)), @@ -209,8 +216,14 @@ pub fn deserialize_operation(message: proto::Operation) -> Result { }, }, proto::operation::Variant::UpdateDiagnosticSet(message) => { + let (provider_name, diagnostics) = deserialize_diagnostic_set( + message + .diagnostic_set + .ok_or_else(|| anyhow!("missing diagnostic set"))?, + ); Operation::UpdateDiagnostics { - diagnostics: Arc::from(deserialize_diagnostic_set(message.diagnostic_set?)), + provider_name, + diagnostics, lamport_timestamp: clock::Lamport { replica_id: message.replica_id as ReplicaId, value: message.lamport_timestamp, @@ -258,31 +271,37 @@ pub fn deserialize_selections(selections: Vec) -> Arc<[Selecti pub fn deserialize_diagnostic_set( message: proto::DiagnosticSet, - buffer: &BufferSnapshot, -) -> DiagnosticSet { - DiagnosticSet::from_sorted_entries( +) -> (String, Arc<[DiagnosticEntry]>) { + ( message.provider_name, - message.diagnostics.into_iter().filter_map(|diagnostic| { - Some(DiagnosticEntry { - range: deserialize_anchor(diagnostic.start?)?..deserialize_anchor(diagnostic.end?)?, - diagnostic: Diagnostic { - severity: match proto::diagnostic::Severity::from_i32(diagnostic.severity)? { - proto::diagnostic::Severity::Error => DiagnosticSeverity::ERROR, - proto::diagnostic::Severity::Warning => DiagnosticSeverity::WARNING, - proto::diagnostic::Severity::Information => DiagnosticSeverity::INFORMATION, - proto::diagnostic::Severity::Hint => DiagnosticSeverity::HINT, - proto::diagnostic::Severity::None => return None, + message + .diagnostics + .into_iter() + .filter_map(|diagnostic| { + Some(DiagnosticEntry { + range: deserialize_anchor(diagnostic.start?)? + ..deserialize_anchor(diagnostic.end?)?, + diagnostic: Diagnostic { + severity: match proto::diagnostic::Severity::from_i32(diagnostic.severity)? + { + proto::diagnostic::Severity::Error => DiagnosticSeverity::ERROR, + proto::diagnostic::Severity::Warning => DiagnosticSeverity::WARNING, + proto::diagnostic::Severity::Information => { + DiagnosticSeverity::INFORMATION + } + proto::diagnostic::Severity::Hint => DiagnosticSeverity::HINT, + proto::diagnostic::Severity::None => return None, + }, + message: diagnostic.message, + group_id: diagnostic.group_id as usize, + code: diagnostic.code, + is_valid: diagnostic.is_valid, + is_primary: diagnostic.is_primary, + is_disk_based: diagnostic.is_disk_based, }, - message: diagnostic.message, - group_id: diagnostic.group_id as usize, - code: diagnostic.code, - is_valid: diagnostic.is_valid, - is_primary: diagnostic.is_primary, - is_disk_based: diagnostic.is_disk_based, - }, + }) }) - }), - buffer, + .collect(), ) } diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index 8a177fc472c7da888cdb19c29765f628a398c49a..73a2bb0bf8049d796e5a8a7f1a44f6eb4b98d940 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -512,6 +512,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { // Receive diagnostics for an earlier version of the buffer. buffer .update_diagnostics( + "lsp".into(), Some(open_notification.text_document.version), vec![ DiagnosticEntry { @@ -607,6 +608,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { // Ensure overlapping diagnostics are highlighted correctly. buffer .update_diagnostics( + "lsp".into(), Some(open_notification.text_document.version), vec![ DiagnosticEntry { @@ -697,6 +699,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { buffer.update(&mut cx, |buffer, cx| { buffer .update_diagnostics( + "lsp".into(), Some(change_notification_2.text_document.version), vec![ DiagnosticEntry { @@ -819,7 +822,7 @@ async fn test_preserving_old_group_ids_and_disk_based_diagnostics(mut cx: gpui:: ]; buffer.update(&mut cx, |buffer, cx| { buffer - .update_diagnostics(None, diagnostics.clone(), cx) + .update_diagnostics("lsp".into(), None, diagnostics.clone(), cx) .unwrap(); assert_eq!( buffer @@ -837,7 +840,7 @@ async fn test_preserving_old_group_ids_and_disk_based_diagnostics(mut cx: gpui:: buffer.update(&mut cx, |buffer, cx| { buffer - .update_diagnostics(None, new_diagnostics.clone(), cx) + .update_diagnostics("lsp".into(), None, new_diagnostics.clone(), cx) .unwrap(); assert_eq!( buffer @@ -882,6 +885,7 @@ async fn test_empty_diagnostic_ranges(mut cx: gpui::TestAppContext) { buffer.set_language(Some(Arc::new(rust_lang())), None, cx); buffer .update_diagnostics( + "lsp".into(), None, vec![ DiagnosticEntry { diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 02789e2f27314b21ff4477b4cb250bcb13d92186..b1151b2d16657d4083cc92e58db091cf323c9d51 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -11,7 +11,7 @@ use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet}; use gpui::{ AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, }; -use language::{Buffer, DiagnosticEntry, Language, LanguageRegistry}; +use language::{Buffer, DiagnosticEntry, LanguageRegistry}; use lsp::DiagnosticSeverity; use postage::{prelude::Stream, watch}; use std::{ @@ -510,13 +510,15 @@ impl Project { if let Some(diagnostic_source) = language.diagnostic_source().cloned() { let worktree_path = worktree.abs_path().clone(); let worktree_handle = worktree_handle.downgrade(); - cx.spawn_weak(|_, cx| async move { + cx.spawn_weak(|_, mut cx| async move { if let Some(diagnostics) = diagnostic_source.diagnose(worktree_path).await.log_err() { if let Some(worktree_handle) = worktree_handle.upgrade(&cx) { worktree_handle.update(&mut cx, |worktree, cx| { - for (path, diagnostics) in diagnostics {} + for (path, diagnostics) in diagnostics { + todo!() + } }) } } diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 8349a2aa27615cc68ebee926a09a80d8f7b73479..aa7d76b6ab911b72b111093b1b57a4140e835f26 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -674,6 +674,7 @@ impl Worktree { pub fn update_lsp_diagnostics( &mut self, + provider_name: Arc, mut params: lsp::PublishDiagnosticsParams, disk_based_sources: &HashSet, cx: &mut ModelContext, @@ -742,11 +743,18 @@ impl Worktree { }) .collect::>(); - self.update_diagnostic_entries(worktree_path, params.version, diagnostics, cx) + self.update_diagnostic_entries( + provider_name, + worktree_path, + params.version, + diagnostics, + cx, + ) } pub fn update_diagnostics( &mut self, + provider_name: Arc, mut params: lsp::PublishDiagnosticsParams, disk_based_sources: &HashSet, cx: &mut ModelContext, @@ -815,11 +823,18 @@ impl Worktree { }) .collect::>(); - self.update_diagnostic_entries(worktree_path, params.version, diagnostics, cx) + self.update_diagnostic_entries( + provider_name, + worktree_path, + params.version, + diagnostics, + cx, + ) } pub fn update_diagnostic_entries( &mut self, + provider_name: Arc, path: Arc, version: Option, diagnostics: Vec>, @@ -836,7 +851,12 @@ impl Worktree { let (remote_id, operation) = buffer.update(cx, |buffer, cx| { ( buffer.remote_id(), - buffer.update_diagnostics(version, diagnostics.clone(), cx), + buffer.update_diagnostics( + provider_name, + version, + diagnostics.clone(), + cx, + ), ) }); self.send_buffer_update(remote_id, operation?, cx); @@ -1100,6 +1120,8 @@ impl LocalWorktree { return Some(server.clone()); } + let name: Arc = language.name().into(); + if let Some(language_server) = language .start_server(self.abs_path(), cx) .log_err() @@ -1115,15 +1137,23 @@ impl LocalWorktree { smol::block_on(diagnostics_tx.send(params)).ok(); }) .detach(); - cx.spawn_weak(|this, mut cx| async move { - while let Ok(diagnostics) = diagnostics_rx.recv().await { - if let Some(handle) = cx.read(|cx| this.upgrade(cx)) { - handle.update(&mut cx, |this, cx| { - this.update_lsp_diagnostics(diagnostics, &disk_based_sources, cx) + cx.spawn_weak(|this, mut cx| { + let provider_name = name.clone(); + async move { + while let Ok(diagnostics) = diagnostics_rx.recv().await { + if let Some(handle) = cx.read(|cx| this.upgrade(cx)) { + handle.update(&mut cx, |this, cx| { + this.update_lsp_diagnostics( + provider_name.clone(), + diagnostics, + &disk_based_sources, + cx, + ) .log_err(); - }); - } else { - break; + }); + } else { + break; + } } } }) @@ -1187,7 +1217,9 @@ impl LocalWorktree { let mut buffer = Buffer::from_file(0, contents, Box::new(file), cx); buffer.set_language(language, language_server, cx); if let Some(diagnostics) = diagnostics { - buffer.update_diagnostics(None, diagnostics, cx).unwrap(); + buffer + .update_diagnostics(todo!(), None, diagnostics, cx) + .unwrap(); } buffer }); @@ -3908,7 +3940,7 @@ mod tests { worktree .update(&mut cx, |tree, cx| { - tree.update_lsp_diagnostics(message, &Default::default(), cx) + tree.update_lsp_diagnostics("lsp".into(), message, &Default::default(), cx) }) .unwrap(); let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot()); diff --git a/crates/rpc/src/peer.rs b/crates/rpc/src/peer.rs index bd5d1c384f241c328bbacf7df2f028f85db68470..7d4adededd47ebbf1cdfdaa7034d7073b493d36d 100644 --- a/crates/rpc/src/peer.rs +++ b/crates/rpc/src/peer.rs @@ -401,7 +401,7 @@ mod tests { content: "path/one content".to_string(), history: vec![], selections: vec![], - diagnostics: vec![], + diagnostic_sets: vec![], }), } ); @@ -424,7 +424,7 @@ mod tests { content: "path/two content".to_string(), history: vec![], selections: vec![], - diagnostics: vec![], + diagnostic_sets: vec![], }), } ); @@ -455,7 +455,7 @@ mod tests { content: "path/one content".to_string(), history: vec![], selections: vec![], - diagnostics: vec![], + diagnostic_sets: vec![], }), } } @@ -467,7 +467,7 @@ mod tests { content: "path/two content".to_string(), history: vec![], selections: vec![], - diagnostics: vec![], + diagnostic_sets: vec![], }), } } diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 713a9a3cac2d3e2bee2aa9bfd701ba5b920c99c2..04a6fc84959970701cb869f4059fc1bb92059534 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -392,7 +392,7 @@ mod tests { .read(cx) .worktrees(cx) .iter() - .map(|w| w.read(cx).as_local().unwrap().abs_path()) + .map(|w| w.read(cx).as_local().unwrap().abs_path().as_ref()) .collect::>(); assert_eq!( worktree_roots, From 4f774e2bde581a803244fb380a470b479ef79edc Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 23 Dec 2021 23:10:28 -0800 Subject: [PATCH 189/196] wip --- crates/diagnostics/src/diagnostics.rs | 4 +- crates/language/src/language.rs | 4 +- crates/project/src/project.rs | 25 ++-- crates/project/src/worktree.rs | 176 +++++++++++++------------- 4 files changed, 105 insertions(+), 104 deletions(-) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 0276268784a2dae8f1f67378c3bb3ca5c9ac530a..a2801a850291d5d49aca43cacaa21f0ec31424ad 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -637,7 +637,7 @@ mod tests { worktree.update(&mut cx, |worktree, cx| { worktree - .update_diagnostic_entries( + .update_point_utf16_diagnostics( "lsp".into(), Arc::from("/test/main.rs".as_ref()), None, @@ -764,7 +764,7 @@ mod tests { worktree.update(&mut cx, |worktree, cx| { worktree - .update_diagnostic_entries( + .update_point_utf16_diagnostics( "lsp".into(), Arc::from("/test/a.rs".as_ref()), None, diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index e7523648355d0445d3b4f2a725ce610bf90792a8..6303cfd37418141636a72e3e2a8dd8d8952e1787 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -66,12 +66,12 @@ pub struct BracketPair { #[async_trait] pub trait DiagnosticSource: 'static + Send + Sync { - fn name(&self) -> &'static str; + fn name(&self) -> Arc; async fn diagnose( &self, path: Arc, - ) -> Result>)>>; + ) -> Result>)>>; } pub struct Language { diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index b1151b2d16657d4083cc92e58db091cf323c9d51..7d6a6d9d02c79a5d5273ce5a8c6f281d523d19d5 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -511,17 +511,22 @@ impl Project { let worktree_path = worktree.abs_path().clone(); let worktree_handle = worktree_handle.downgrade(); cx.spawn_weak(|_, mut cx| async move { - if let Some(diagnostics) = - diagnostic_source.diagnose(worktree_path).await.log_err() - { - if let Some(worktree_handle) = worktree_handle.upgrade(&cx) { - worktree_handle.update(&mut cx, |worktree, cx| { - for (path, diagnostics) in diagnostics { - todo!() - } - }) + let diagnostics = + diagnostic_source.diagnose(worktree_path).await.log_err()?; + let worktree_handle = worktree_handle.upgrade(&cx)?; + worktree_handle.update(&mut cx, |worktree, cx| { + for (path, diagnostics) in diagnostics { + worktree + .update_offset_diagnostics( + diagnostic_source.name(), + path.into(), + diagnostics, + cx, + ) + .log_err()?; } - } + Some(()) + }) }) .detach(); } diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index aa7d76b6ab911b72b111093b1b57a4140e835f26..1284300e96a7ea3d03745500ed79aa1f4ec07ca2 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -743,7 +743,7 @@ impl Worktree { }) .collect::>(); - self.update_diagnostic_entries( + self.update_point_utf16_diagnostics( provider_name, worktree_path, params.version, @@ -752,87 +752,54 @@ impl Worktree { ) } - pub fn update_diagnostics( + pub fn update_offset_diagnostics( &mut self, provider_name: Arc, - mut params: lsp::PublishDiagnosticsParams, - disk_based_sources: &HashSet, + path: Arc, + diagnostics: Vec>, cx: &mut ModelContext, ) -> Result<()> { - let this = self.as_local_mut().ok_or_else(|| anyhow!("not local"))?; - let abs_path = params - .uri - .to_file_path() - .map_err(|_| anyhow!("URI is not a file"))?; - let worktree_path = Arc::from( - abs_path - .strip_prefix(&this.abs_path) - .context("path is not within worktree")?, - ); - - let mut group_ids_by_diagnostic_range = HashMap::default(); - let mut diagnostics_by_group_id = HashMap::default(); - let mut next_group_id = 0; - for diagnostic in &mut params.diagnostics { - let source = diagnostic.source.as_ref(); - let code = diagnostic.code.as_ref(); - let group_id = diagnostic_ranges(&diagnostic, &abs_path) - .find_map(|range| group_ids_by_diagnostic_range.get(&(source, code, range))) - .copied() - .unwrap_or_else(|| { - let group_id = post_inc(&mut next_group_id); - for range in diagnostic_ranges(&diagnostic, &abs_path) { - group_ids_by_diagnostic_range.insert((source, code, range), group_id); - } - group_id - }); - - diagnostics_by_group_id - .entry(group_id) - .or_insert(Vec::new()) - .push(DiagnosticEntry { - range: diagnostic.range.start.to_point_utf16() - ..diagnostic.range.end.to_point_utf16(), - diagnostic: Diagnostic { - code: diagnostic.code.clone().map(|code| match code { - lsp::NumberOrString::Number(code) => code.to_string(), - lsp::NumberOrString::String(code) => code, - }), - severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR), - message: mem::take(&mut diagnostic.message), - group_id, - is_primary: false, - is_valid: true, - is_disk_based: diagnostic - .source - .as_ref() - .map_or(false, |source| disk_based_sources.contains(source)), - }, - }); + let this = self.as_local_mut().unwrap(); + for buffer in this.open_buffers.values() { + if let Some(buffer) = buffer.upgrade(cx) { + if buffer + .read(cx) + .file() + .map_or(false, |file| *file.path() == path) + { + let (remote_id, operation) = buffer.update(cx, |buffer, cx| { + ( + buffer.remote_id(), + buffer.update_diagnostics( + provider_name, + None, + diagnostics + .iter() + .map(|entry| DiagnosticEntry { + range: buffer.offset_to_point_utf16(entry.range.start) + ..buffer.offset_to_point_utf16(entry.range.end), + diagnostic: entry.diagnostic.clone(), + }) + .collect(), + cx, + ), + ) + }); + self.send_buffer_update(remote_id, operation?, cx); + break; + } + } } - let diagnostics = diagnostics_by_group_id - .into_values() - .flat_map(|mut diagnostics| { - let primary = diagnostics - .iter_mut() - .min_by_key(|entry| entry.diagnostic.severity) - .unwrap(); - primary.diagnostic.is_primary = true; - diagnostics - }) - .collect::>(); - - self.update_diagnostic_entries( - provider_name, - worktree_path, - params.version, - diagnostics, - cx, - ) + let this = self.as_local_mut().unwrap(); + this.diagnostic_summaries + .insert(path.clone(), DiagnosticSummary::new(&diagnostics)); + this.offset_diagnostics.insert(path.clone(), diagnostics); + cx.emit(Event::DiagnosticsUpdated(path.clone())); + Ok(()) } - pub fn update_diagnostic_entries( + pub fn update_point_utf16_diagnostics( &mut self, provider_name: Arc, path: Arc, @@ -868,11 +835,26 @@ impl Worktree { let this = self.as_local_mut().unwrap(); this.diagnostic_summaries .insert(path.clone(), DiagnosticSummary::new(&diagnostics)); - this.diagnostics.insert(path.clone(), diagnostics); + this.point_utf16_diagnostics + .insert(path.clone(), diagnostics); cx.emit(Event::DiagnosticsUpdated(path.clone())); Ok(()) } + fn convert_diagnostics( + diagnostics: &[DiagnosticEntry], + buffer: &Buffer, + ) -> Vec> { + diagnostics + .iter() + .map(|entry| DiagnosticEntry { + range: buffer.offset_to_point_utf16(entry.range.start) + ..buffer.offset_to_point_utf16(entry.range.end), + diagnostic: entry.diagnostic.clone(), + }) + .collect() + } + fn send_buffer_update( &mut self, buffer_id: u64, @@ -943,7 +925,8 @@ pub struct LocalWorktree { loading_buffers: LoadingBuffers, open_buffers: HashMap>, shared_buffers: HashMap>>, - diagnostics: HashMap, Vec>>, + point_utf16_diagnostics: HashMap, Vec>>, + offset_diagnostics: HashMap, Vec>>, diagnostic_summaries: BTreeMap, DiagnosticSummary>, queued_operations: Vec<(u64, Operation)>, language_registry: Arc, @@ -1051,7 +1034,8 @@ impl LocalWorktree { loading_buffers: Default::default(), open_buffers: Default::default(), shared_buffers: Default::default(), - diagnostics: Default::default(), + point_utf16_diagnostics: Default::default(), + offset_diagnostics: Default::default(), diagnostic_summaries: Default::default(), queued_operations: Default::default(), language_registry: languages, @@ -1200,27 +1184,39 @@ impl LocalWorktree { .update(&mut cx, |t, cx| t.as_local().unwrap().load(&path, cx)) .await?; - let (diagnostics, language, language_server) = this.update(&mut cx, |this, cx| { - let this = this.as_local_mut().unwrap(); - let diagnostics = this.diagnostics.remove(&path); - let language = this - .language_registry - .select_language(file.full_path()) - .cloned(); - let server = language - .as_ref() - .and_then(|language| this.register_language(language, cx)); - (diagnostics, language, server) - }); + let (point_utf16_diagnostics, offset_diagnostics, language, language_server) = this + .update(&mut cx, |this, cx| { + let this = this.as_local_mut().unwrap(); + let point_utf16_diagnostics = this.point_utf16_diagnostics.remove(&path); + let offset_diagnostics = this.offset_diagnostics.remove(&path); + let language = this + .language_registry + .select_language(file.full_path()) + .cloned(); + let server = language + .as_ref() + .and_then(|language| this.register_language(language, cx)); + ( + point_utf16_diagnostics, + offset_diagnostics, + language, + server, + ) + }); let buffer = cx.add_model(|cx| { let mut buffer = Buffer::from_file(0, contents, Box::new(file), cx); buffer.set_language(language, language_server, cx); - if let Some(diagnostics) = diagnostics { + if let Some(diagnostics) = point_utf16_diagnostics { buffer .update_diagnostics(todo!(), None, diagnostics, cx) .unwrap(); } + if let Some(diagnostics) = offset_diagnostics { + buffer + .update_offset_diagnostics(todo!(), None, diagnostics, cx) + .unwrap(); + } buffer }); From 11e3874b4a472a07e05ed4d245b880a39c5a04ad Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 24 Dec 2021 12:07:26 +0100 Subject: [PATCH 190/196] Remove duplication when assigning diagnostics and hardcode provider names --- crates/diagnostics/src/diagnostics.rs | 24 ++--- crates/language/src/buffer.rs | 72 +++++++------ crates/language/src/diagnostic_set.rs | 4 +- crates/language/src/language.rs | 16 ++- crates/project/src/project.rs | 8 +- crates/project/src/worktree.rs | 141 +++++++++----------------- crates/text/src/rope.rs | 35 +++++++ crates/text/src/text.rs | 32 ++++++ 8 files changed, 176 insertions(+), 156 deletions(-) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index a2801a850291d5d49aca43cacaa21f0ec31424ad..ad527fe3c76feffc25edf37e0150842408f48535 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -579,7 +579,7 @@ mod tests { use super::*; use client::{http::ServerResponse, test::FakeHttpClient, Client, UserStore}; use gpui::TestAppContext; - use language::{Diagnostic, DiagnosticEntry, DiagnosticSeverity, LanguageRegistry, PointUtf16}; + use language::{Diagnostic, DiagnosticEntry, DiagnosticSeverity, LanguageRegistry}; use project::FakeFs; use serde_json::json; use std::sync::Arc; @@ -637,13 +637,11 @@ mod tests { worktree.update(&mut cx, |worktree, cx| { worktree - .update_point_utf16_diagnostics( - "lsp".into(), + .update_diagnostics_from_provider( Arc::from("/test/main.rs".as_ref()), - None, vec![ DiagnosticEntry { - range: PointUtf16::new(1, 8)..PointUtf16::new(1, 9), + range: 20..21, diagnostic: Diagnostic { message: "move occurs because `x` has type `Vec`, which does not implement the `Copy` trait" @@ -655,7 +653,7 @@ mod tests { }, }, DiagnosticEntry { - range: PointUtf16::new(2, 8)..PointUtf16::new(2, 9), + range: 40..41, diagnostic: Diagnostic { message: "move occurs because `y` has type `Vec`, which does not implement the `Copy` trait" @@ -667,7 +665,7 @@ mod tests { }, }, DiagnosticEntry { - range: PointUtf16::new(3, 6)..PointUtf16::new(3, 7), + range: 58..59, diagnostic: Diagnostic { message: "value moved here".to_string(), severity: DiagnosticSeverity::INFORMATION, @@ -677,7 +675,7 @@ mod tests { }, }, DiagnosticEntry { - range: PointUtf16::new(4, 6)..PointUtf16::new(4, 7), + range: 68..69, diagnostic: Diagnostic { message: "value moved here".to_string(), severity: DiagnosticSeverity::INFORMATION, @@ -687,7 +685,7 @@ mod tests { }, }, DiagnosticEntry { - range: PointUtf16::new(7, 6)..PointUtf16::new(7, 7), + range: 112..113, diagnostic: Diagnostic { message: "use of moved value\nvalue used here after move".to_string(), severity: DiagnosticSeverity::ERROR, @@ -697,7 +695,7 @@ mod tests { }, }, DiagnosticEntry { - range: PointUtf16::new(8, 6)..PointUtf16::new(8, 7), + range: 122..123, diagnostic: Diagnostic { message: "use of moved value\nvalue used here after move".to_string(), severity: DiagnosticSeverity::ERROR, @@ -764,12 +762,10 @@ mod tests { worktree.update(&mut cx, |worktree, cx| { worktree - .update_point_utf16_diagnostics( - "lsp".into(), + .update_diagnostics_from_provider( Arc::from("/test/a.rs".as_ref()), - None, vec![DiagnosticEntry { - range: PointUtf16::new(0, 15)..PointUtf16::new(0, 15), + range: 15..15, diagnostic: Diagnostic { message: "mismatched types\nexpected `usize`, found `char`".to_string(), severity: DiagnosticSeverity::ERROR, diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 461c88ae42b6c87d7beca9f6570a1e2bce2d8b7c..8bc894bf2b8638b5bf9cb15f423f90da96244fd1 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -23,7 +23,7 @@ use std::{ ffi::OsString, future::Future, iter::{Iterator, Peekable}, - ops::{Deref, DerefMut, Range}, + ops::{Add, Deref, DerefMut, Range, Sub}, path::{Path, PathBuf}, str, sync::Arc, @@ -31,7 +31,7 @@ use std::{ vec, }; use sum_tree::TreeMap; -use text::operation_queue::OperationQueue; +use text::{operation_queue::OperationQueue, rope::TextDimension}; pub use text::{Buffer as TextBuffer, Operation as _, *}; use theme::SyntaxTheme; use tree_sitter::{InputEdit, Parser, QueryCursor, Tree}; @@ -85,6 +85,12 @@ pub struct BufferSnapshot { parse_count: usize, } +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct GroupId { + source: Arc, + id: usize, +} + #[derive(Clone, Debug, PartialEq, Eq)] pub struct Diagnostic { pub code: Option, @@ -740,13 +746,16 @@ impl Buffer { self.diagnostic_sets.iter().flat_map(|set| set.iter()) } - pub fn update_diagnostics( + pub fn update_diagnostics( &mut self, provider_name: Arc, version: Option, - mut diagnostics: Vec>, + mut diagnostics: Vec>, cx: &mut ModelContext, - ) -> Result { + ) -> Result + where + T: ToPoint + Ord + Clip + TextDimension + Add + Sub + Copy, + { fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering { Ordering::Equal .then_with(|| b.is_primary.cmp(&a.is_primary)) @@ -755,13 +764,6 @@ impl Buffer { .then_with(|| a.message.cmp(&b.message)) } - diagnostics.sort_unstable_by(|a, b| { - Ordering::Equal - .then_with(|| a.range.start.cmp(&b.range.start)) - .then_with(|| b.range.end.cmp(&a.range.end)) - .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic)) - }); - let version = version.map(|version| version as usize); let content = if let Some(version) = version { let language_server = self.language_server.as_mut().unwrap(); @@ -777,14 +779,18 @@ impl Buffer { self.deref() }; - let mut edits_since_save = content - .edits_since::(&self.saved_version) - .peekable(); - let mut last_edit_old_end = PointUtf16::zero(); - let mut last_edit_new_end = PointUtf16::zero(); - let mut ix = 0; - 'outer: while ix < diagnostics.len() { - let entry = &mut diagnostics[ix]; + diagnostics.sort_unstable_by(|a, b| { + Ordering::Equal + .then_with(|| a.range.start.cmp(&b.range.start)) + .then_with(|| b.range.end.cmp(&a.range.end)) + .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic)) + }); + + let mut sanitized_diagnostics = Vec::new(); + let mut edits_since_save = content.edits_since::(&self.saved_version).peekable(); + let mut last_edit_old_end = T::default(); + let mut last_edit_new_end = T::default(); + 'outer: for entry in diagnostics { let mut start = entry.range.start; let mut end = entry.range.end; @@ -798,7 +804,6 @@ impl Buffer { last_edit_new_end = edit.new.end; edits_since_save.next(); } else if edit.old.start <= end && edit.old.end >= start { - diagnostics.remove(ix); continue 'outer; } else { break; @@ -809,23 +814,26 @@ impl Buffer { end = last_edit_new_end + (end - last_edit_old_end); } - entry.range = content.clip_point_utf16(start, Bias::Left) - ..content.clip_point_utf16(end, Bias::Right); - + let range = start.clip(Bias::Left, content)..end.clip(Bias::Right, content); + let mut range = range.start.to_point(content)..range.end.to_point(content); // Expand empty ranges by one character - if entry.range.start == entry.range.end { - entry.range.end.column += 1; - entry.range.end = content.clip_point_utf16(entry.range.end, Bias::Right); - if entry.range.start == entry.range.end && entry.range.end.column > 0 { - entry.range.start.column -= 1; - entry.range.start = content.clip_point_utf16(entry.range.start, Bias::Left); + if range.start == range.end { + range.end.column += 1; + range.end = content.clip_point(range.end, Bias::Right); + if range.start == range.end && range.end.column > 0 { + range.start.column -= 1; + range.start = content.clip_point(range.start, Bias::Left); } } - ix += 1; + + sanitized_diagnostics.push(DiagnosticEntry { + range, + diagnostic: entry.diagnostic, + }); } drop(edits_since_save); - let set = DiagnosticSet::new(provider_name, diagnostics, content); + let set = DiagnosticSet::new(provider_name, sanitized_diagnostics, content); self.apply_diagnostic_update(set.clone(), cx); Ok(Operation::UpdateDiagnostics { provider_name: set.provider_name().to_string(), diff --git a/crates/language/src/diagnostic_set.rs b/crates/language/src/diagnostic_set.rs index 7cd13c00dd5f66fb491f82a8d460aa3d977623dc..05e19e635a3b044dc7416c72b272d795d9e85fd9 100644 --- a/crates/language/src/diagnostic_set.rs +++ b/crates/language/src/diagnostic_set.rs @@ -7,7 +7,7 @@ use std::{ sync::Arc, }; use sum_tree::{self, Bias, SumTree}; -use text::{Anchor, FromAnchor, PointUtf16, ToOffset}; +use text::{Anchor, FromAnchor, Point, ToOffset}; #[derive(Clone, Debug)] pub struct DiagnosticSet { @@ -56,7 +56,7 @@ impl DiagnosticSet { pub fn new(provider_name: Arc, iter: I, buffer: &text::BufferSnapshot) -> Self where - I: IntoIterator>, + I: IntoIterator>, { let mut entries = iter.into_iter().collect::>(); entries.sort_unstable_by_key(|entry| (entry.range.start, Reverse(entry.range.end))); diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 6303cfd37418141636a72e3e2a8dd8d8952e1787..26c0e84261e5984f1ed72a306955cec073434516 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -65,9 +65,7 @@ pub struct BracketPair { } #[async_trait] -pub trait DiagnosticSource: 'static + Send + Sync { - fn name(&self) -> Arc; - +pub trait DiagnosticProvider: 'static + Send + Sync { async fn diagnose( &self, path: Arc, @@ -77,7 +75,7 @@ pub trait DiagnosticSource: 'static + Send + Sync { pub struct Language { pub(crate) config: LanguageConfig, pub(crate) grammar: Option>, - pub(crate) diagnostic_source: Option>, + pub(crate) diagnostic_provider: Option>, } pub struct Grammar { @@ -142,7 +140,7 @@ impl Language { highlight_map: Default::default(), }) }), - diagnostic_source: None, + diagnostic_provider: None, } } @@ -176,8 +174,8 @@ impl Language { Ok(self) } - pub fn with_diagnostic_source(mut self, source: impl DiagnosticSource) -> Self { - self.diagnostic_source = Some(Arc::new(source)); + pub fn with_diagnostic_provider(mut self, source: impl DiagnosticProvider) -> Self { + self.diagnostic_provider = Some(Arc::new(source)); self } @@ -214,8 +212,8 @@ impl Language { } } - pub fn diagnostic_source(&self) -> Option<&Arc> { - self.diagnostic_source.as_ref() + pub fn diagnostic_provider(&self) -> Option<&Arc> { + self.diagnostic_provider.as_ref() } pub fn disk_based_diagnostic_sources(&self) -> Option<&HashSet> { diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 7d6a6d9d02c79a5d5273ce5a8c6f281d523d19d5..2a524216dbb578286882183cbe9bdc89cdd7caf6 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -507,18 +507,16 @@ impl Project { for worktree_handle in &self.worktrees { if let Some(worktree) = worktree_handle.read(cx).as_local() { for language in worktree.languages() { - if let Some(diagnostic_source) = language.diagnostic_source().cloned() { + if let Some(provider) = language.diagnostic_provider().cloned() { let worktree_path = worktree.abs_path().clone(); let worktree_handle = worktree_handle.downgrade(); cx.spawn_weak(|_, mut cx| async move { - let diagnostics = - diagnostic_source.diagnose(worktree_path).await.log_err()?; + let diagnostics = provider.diagnose(worktree_path).await.log_err()?; let worktree_handle = worktree_handle.upgrade(&cx)?; worktree_handle.update(&mut cx, |worktree, cx| { for (path, diagnostics) in diagnostics { worktree - .update_offset_diagnostics( - diagnostic_source.name(), + .update_diagnostics_from_provider( path.into(), diagnostics, cx, diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 1284300e96a7ea3d03745500ed79aa1f4ec07ca2..4ec5679c611fb7e2de362f3effb0b58998810e10 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -50,6 +50,8 @@ use util::{post_inc, ResultExt, TryFutureExt}; lazy_static! { static ref GITIGNORE: &'static OsStr = OsStr::new(".gitignore"); + static ref DIAGNOSTIC_PROVIDER_NAME: Arc = Arc::from("diagnostic_source"); + static ref LSP_PROVIDER_NAME: Arc = Arc::from("lsp"); } #[derive(Clone, Debug)] @@ -672,9 +674,8 @@ impl Worktree { } } - pub fn update_lsp_diagnostics( + pub fn update_diagnostics_from_lsp( &mut self, - provider_name: Arc, mut params: lsp::PublishDiagnosticsParams, disk_based_sources: &HashSet, cx: &mut ModelContext, @@ -743,44 +744,21 @@ impl Worktree { }) .collect::>(); - self.update_point_utf16_diagnostics( - provider_name, - worktree_path, - params.version, - diagnostics, - cx, - ) - } - - pub fn update_offset_diagnostics( - &mut self, - provider_name: Arc, - path: Arc, - diagnostics: Vec>, - cx: &mut ModelContext, - ) -> Result<()> { let this = self.as_local_mut().unwrap(); for buffer in this.open_buffers.values() { if let Some(buffer) = buffer.upgrade(cx) { if buffer .read(cx) .file() - .map_or(false, |file| *file.path() == path) + .map_or(false, |file| *file.path() == worktree_path) { let (remote_id, operation) = buffer.update(cx, |buffer, cx| { ( buffer.remote_id(), buffer.update_diagnostics( - provider_name, - None, - diagnostics - .iter() - .map(|entry| DiagnosticEntry { - range: buffer.offset_to_point_utf16(entry.range.start) - ..buffer.offset_to_point_utf16(entry.range.end), - diagnostic: entry.diagnostic.clone(), - }) - .collect(), + LSP_PROVIDER_NAME.clone(), + params.version, + diagnostics.clone(), cx, ), ) @@ -793,18 +771,17 @@ impl Worktree { let this = self.as_local_mut().unwrap(); this.diagnostic_summaries - .insert(path.clone(), DiagnosticSummary::new(&diagnostics)); - this.offset_diagnostics.insert(path.clone(), diagnostics); - cx.emit(Event::DiagnosticsUpdated(path.clone())); + .insert(worktree_path.clone(), DiagnosticSummary::new(&diagnostics)); + this.lsp_diagnostics + .insert(worktree_path.clone(), diagnostics); + cx.emit(Event::DiagnosticsUpdated(worktree_path.clone())); Ok(()) } - pub fn update_point_utf16_diagnostics( + pub fn update_diagnostics_from_provider( &mut self, - provider_name: Arc, path: Arc, - version: Option, - diagnostics: Vec>, + diagnostics: Vec>, cx: &mut ModelContext, ) -> Result<()> { let this = self.as_local_mut().unwrap(); @@ -819,8 +796,8 @@ impl Worktree { ( buffer.remote_id(), buffer.update_diagnostics( - provider_name, - version, + DIAGNOSTIC_PROVIDER_NAME.clone(), + None, diagnostics.clone(), cx, ), @@ -835,26 +812,11 @@ impl Worktree { let this = self.as_local_mut().unwrap(); this.diagnostic_summaries .insert(path.clone(), DiagnosticSummary::new(&diagnostics)); - this.point_utf16_diagnostics - .insert(path.clone(), diagnostics); + this.provider_diagnostics.insert(path.clone(), diagnostics); cx.emit(Event::DiagnosticsUpdated(path.clone())); Ok(()) } - fn convert_diagnostics( - diagnostics: &[DiagnosticEntry], - buffer: &Buffer, - ) -> Vec> { - diagnostics - .iter() - .map(|entry| DiagnosticEntry { - range: buffer.offset_to_point_utf16(entry.range.start) - ..buffer.offset_to_point_utf16(entry.range.end), - diagnostic: entry.diagnostic.clone(), - }) - .collect() - } - fn send_buffer_update( &mut self, buffer_id: u64, @@ -925,8 +887,8 @@ pub struct LocalWorktree { loading_buffers: LoadingBuffers, open_buffers: HashMap>, shared_buffers: HashMap>>, - point_utf16_diagnostics: HashMap, Vec>>, - offset_diagnostics: HashMap, Vec>>, + lsp_diagnostics: HashMap, Vec>>, + provider_diagnostics: HashMap, Vec>>, diagnostic_summaries: BTreeMap, DiagnosticSummary>, queued_operations: Vec<(u64, Operation)>, language_registry: Arc, @@ -1034,8 +996,8 @@ impl LocalWorktree { loading_buffers: Default::default(), open_buffers: Default::default(), shared_buffers: Default::default(), - point_utf16_diagnostics: Default::default(), - offset_diagnostics: Default::default(), + lsp_diagnostics: Default::default(), + provider_diagnostics: Default::default(), diagnostic_summaries: Default::default(), queued_operations: Default::default(), language_registry: languages, @@ -1104,8 +1066,6 @@ impl LocalWorktree { return Some(server.clone()); } - let name: Arc = language.name().into(); - if let Some(language_server) = language .start_server(self.abs_path(), cx) .log_err() @@ -1121,23 +1081,15 @@ impl LocalWorktree { smol::block_on(diagnostics_tx.send(params)).ok(); }) .detach(); - cx.spawn_weak(|this, mut cx| { - let provider_name = name.clone(); - async move { - while let Ok(diagnostics) = diagnostics_rx.recv().await { - if let Some(handle) = cx.read(|cx| this.upgrade(cx)) { - handle.update(&mut cx, |this, cx| { - this.update_lsp_diagnostics( - provider_name.clone(), - diagnostics, - &disk_based_sources, - cx, - ) + cx.spawn_weak(|this, mut cx| async move { + while let Ok(diagnostics) = diagnostics_rx.recv().await { + if let Some(handle) = cx.read(|cx| this.upgrade(cx)) { + handle.update(&mut cx, |this, cx| { + this.update_diagnostics_from_lsp(diagnostics, &disk_based_sources, cx) .log_err(); - }); - } else { - break; - } + }); + } else { + break; } } }) @@ -1184,11 +1136,11 @@ impl LocalWorktree { .update(&mut cx, |t, cx| t.as_local().unwrap().load(&path, cx)) .await?; - let (point_utf16_diagnostics, offset_diagnostics, language, language_server) = this - .update(&mut cx, |this, cx| { + let (lsp_diagnostics, provider_diagnostics, language, language_server) = + this.update(&mut cx, |this, cx| { let this = this.as_local_mut().unwrap(); - let point_utf16_diagnostics = this.point_utf16_diagnostics.remove(&path); - let offset_diagnostics = this.offset_diagnostics.remove(&path); + let lsp_diagnostics = this.lsp_diagnostics.remove(&path); + let provider_diagnostics = this.provider_diagnostics.remove(&path); let language = this .language_registry .select_language(file.full_path()) @@ -1196,31 +1148,32 @@ impl LocalWorktree { let server = language .as_ref() .and_then(|language| this.register_language(language, cx)); - ( - point_utf16_diagnostics, - offset_diagnostics, - language, - server, - ) + (lsp_diagnostics, provider_diagnostics, language, server) }); + let mut buffer_operations = Vec::new(); let buffer = cx.add_model(|cx| { let mut buffer = Buffer::from_file(0, contents, Box::new(file), cx); buffer.set_language(language, language_server, cx); - if let Some(diagnostics) = point_utf16_diagnostics { - buffer - .update_diagnostics(todo!(), None, diagnostics, cx) + if let Some(diagnostics) = lsp_diagnostics { + let op = buffer + .update_diagnostics(LSP_PROVIDER_NAME.clone(), None, diagnostics, cx) .unwrap(); + buffer_operations.push(op); } - if let Some(diagnostics) = offset_diagnostics { - buffer - .update_offset_diagnostics(todo!(), None, diagnostics, cx) + if let Some(diagnostics) = provider_diagnostics { + let op = buffer + .update_diagnostics(DIAGNOSTIC_PROVIDER_NAME.clone(), None, diagnostics, cx) .unwrap(); + buffer_operations.push(op); } buffer }); - this.update(&mut cx, |this, _| { + this.update(&mut cx, |this, cx| { + for op in buffer_operations { + this.send_buffer_update(buffer.read(cx).remote_id(), op, cx); + } let this = this.as_local_mut().unwrap(); this.open_buffers.insert(buffer.id(), buffer.downgrade()); }); @@ -3936,7 +3889,7 @@ mod tests { worktree .update(&mut cx, |tree, cx| { - tree.update_lsp_diagnostics("lsp".into(), message, &Default::default(), cx) + tree.update_diagnostics_from_lsp(message, &Default::default(), cx) }) .unwrap(); let buffer = buffer.read_with(&cx, |buffer, _| buffer.snapshot()); diff --git a/crates/text/src/rope.rs b/crates/text/src/rope.rs index 5b9cef2cc6fa2fbe24b9365b03b294a44ba4b14b..70399e15f544e4ecc77fef0efb19da2d84976071 100644 --- a/crates/text/src/rope.rs +++ b/crates/text/src/rope.rs @@ -205,6 +205,19 @@ impl Rope { .map_or(0, |chunk| chunk.point_utf16_to_offset(overshoot)) } + pub fn point_utf16_to_point(&self, point: PointUtf16) -> Point { + if point >= self.summary().lines_utf16 { + return self.summary().lines; + } + let mut cursor = self.chunks.cursor::<(PointUtf16, Point)>(); + cursor.seek(&point, Bias::Left, &()); + let overshoot = point - cursor.start().0; + cursor.start().1 + + cursor + .item() + .map_or(Point::zero(), |chunk| chunk.point_utf16_to_point(overshoot)) + } + pub fn clip_offset(&self, mut offset: usize, bias: Bias) -> usize { let mut cursor = self.chunks.cursor::(); cursor.seek(&offset, Bias::Left, &()); @@ -583,6 +596,28 @@ impl Chunk { offset } + fn point_utf16_to_point(&self, target: PointUtf16) -> Point { + let mut point = Point::zero(); + let mut point_utf16 = PointUtf16::zero(); + for ch in self.0.chars() { + if point_utf16 >= target { + if point_utf16 > target { + panic!("point {:?} is inside of character {:?}", target, ch); + } + break; + } + + if ch == '\n' { + point_utf16 += PointUtf16::new(1, 0); + point += Point::new(1, 0); + } else { + point_utf16 += PointUtf16::new(0, ch.len_utf16() as u32); + point += Point::new(0, ch.len_utf8() as u32); + } + } + point + } + fn clip_point(&self, target: Point, bias: Bias) -> Point { for (row, line) in self.0.split('\n').enumerate() { if row == target.row as usize { diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index eec52d2fa58d179b02e2cd5e630ee0326b0457f8..5debb2f18d4b6af2d0d007bc061d62e6de11ff27 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -1307,6 +1307,10 @@ impl BufferSnapshot { self.visible_text.point_utf16_to_offset(point) } + pub fn point_utf16_to_point(&self, point: PointUtf16) -> Point { + self.visible_text.point_utf16_to_point(point) + } + pub fn offset_to_point(&self, offset: usize) -> Point { self.visible_text.offset_to_point(offset) } @@ -2045,12 +2049,40 @@ impl ToPoint for usize { } } +impl ToPoint for PointUtf16 { + fn to_point<'a>(&self, snapshot: &BufferSnapshot) -> Point { + snapshot.point_utf16_to_point(*self) + } +} + impl ToPoint for Point { fn to_point<'a>(&self, _: &BufferSnapshot) -> Point { *self } } +pub trait Clip { + fn clip(&self, bias: Bias, snapshot: &BufferSnapshot) -> Self; +} + +impl Clip for usize { + fn clip(&self, bias: Bias, snapshot: &BufferSnapshot) -> Self { + snapshot.clip_offset(*self, bias) + } +} + +impl Clip for Point { + fn clip(&self, bias: Bias, snapshot: &BufferSnapshot) -> Self { + snapshot.clip_point(*self, bias) + } +} + +impl Clip for PointUtf16 { + fn clip(&self, bias: Bias, snapshot: &BufferSnapshot) -> Self { + snapshot.clip_point_utf16(*self, bias) + } +} + pub trait FromAnchor { fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self; } From 393009a05c41c42dc6e32b5782e88e13e5820707 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 24 Dec 2021 12:08:55 +0100 Subject: [PATCH 191/196] Implement `Buffer::diagnostic_group` --- crates/editor/src/editor.rs | 19 ++-- crates/editor/src/items.rs | 6 +- crates/editor/src/multi_buffer.rs | 5 +- crates/language/src/buffer.rs | 34 +++--- crates/language/src/tests.rs | 183 +++++++++++++++++------------- crates/project/src/worktree.rs | 138 ++++++++++++---------- crates/server/src/rpc.rs | 1 + 7 files changed, 222 insertions(+), 164 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 4953188f40285764fc8f6ad40cd895788e612f8c..37c8c7983d9155794f107a7291399eca3142c753 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -2844,19 +2844,19 @@ impl Editor { loop { let next_group = buffer .diagnostics_in_range::<_, usize>(search_start..buffer.len()) - .find_map(|entry| { + .find_map(|(provider_name, entry)| { if entry.diagnostic.is_primary && !entry.range.is_empty() && Some(entry.range.end) != active_primary_range.as_ref().map(|r| *r.end()) { - Some((entry.range, entry.diagnostic.group_id)) + Some((provider_name, entry.range, entry.diagnostic.group_id)) } else { None } }); - if let Some((primary_range, group_id)) = next_group { - self.activate_diagnostics(group_id, cx); + if let Some((provider_name, primary_range, group_id)) = next_group { + self.activate_diagnostics(provider_name, group_id, cx); self.update_selections( vec![Selection { id: selection.id, @@ -2884,7 +2884,7 @@ impl Editor { let primary_range_start = active_diagnostics.primary_range.start.to_offset(&buffer); let is_valid = buffer .diagnostics_in_range::<_, usize>(active_diagnostics.primary_range.clone()) - .any(|entry| { + .any(|(_, entry)| { entry.diagnostic.is_primary && !entry.range.is_empty() && entry.range.start == primary_range_start @@ -2910,7 +2910,12 @@ impl Editor { } } - fn activate_diagnostics(&mut self, group_id: usize, cx: &mut ViewContext) { + fn activate_diagnostics( + &mut self, + provider_name: &str, + group_id: usize, + cx: &mut ViewContext, + ) { self.dismiss_diagnostics(cx); self.active_diagnostics = self.display_map.update(cx, |display_map, cx| { let buffer = self.buffer.read(cx).snapshot(cx); @@ -2919,7 +2924,7 @@ impl Editor { let mut primary_message = None; let mut group_end = Point::zero(); let diagnostic_group = buffer - .diagnostic_group::(group_id) + .diagnostic_group::(provider_name, group_id) .map(|entry| { if entry.range.end > group_end { group_end = entry.range.end; diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index d88315fff7348aadafc02b9bd6f4c015693c01cd..df819ec520004124054d339caed0b4f085f745f4 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -298,9 +298,9 @@ impl DiagnosticMessage { let new_diagnostic = buffer .read(cx) .diagnostics_in_range::<_, usize>(cursor_position..cursor_position) - .filter(|entry| !entry.range.is_empty()) - .min_by_key(|entry| (entry.diagnostic.severity, entry.range.len())) - .map(|entry| entry.diagnostic); + .filter(|(_, entry)| !entry.range.is_empty()) + .min_by_key(|(_, entry)| (entry.diagnostic.severity, entry.range.len())) + .map(|(_, entry)| entry.diagnostic); if new_diagnostic != self.diagnostic { self.diagnostic = new_diagnostic; cx.notify(); diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index cb011f30da47fcf08497e4e07ade04632c6e2546..36c8577bf0699b6439c9740bf76c97b05b166d7a 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -1470,6 +1470,7 @@ impl MultiBufferSnapshot { pub fn diagnostic_group<'a, O>( &'a self, + provider_name: &'a str, group_id: usize, ) -> impl Iterator> + 'a where @@ -1477,13 +1478,13 @@ impl MultiBufferSnapshot { { self.as_singleton() .into_iter() - .flat_map(move |buffer| buffer.diagnostic_group(group_id)) + .flat_map(move |buffer| buffer.diagnostic_group(provider_name, group_id)) } pub fn diagnostics_in_range<'a, T, O>( &'a self, range: Range, - ) -> impl Iterator> + 'a + ) -> impl Iterator)> + 'a where T: 'a + ToOffset, O: 'a + text::FromAnchor, diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 8bc894bf2b8638b5bf9cb15f423f90da96244fd1..29724db677995b3a3aed65cfcc100684e4758bca 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -23,7 +23,7 @@ use std::{ ffi::OsString, future::Future, iter::{Iterator, Peekable}, - ops::{Add, Deref, DerefMut, Range, Sub}, + ops::{Deref, DerefMut, Range, Sub}, path::{Path, PathBuf}, str, sync::Arc, @@ -741,11 +741,6 @@ impl Buffer { cx.notify(); } - pub fn all_diagnostics<'a>(&'a self) -> impl 'a + Iterator> { - // TODO - enforce ordering between sets - self.diagnostic_sets.iter().flat_map(|set| set.iter()) - } - pub fn update_diagnostics( &mut self, provider_name: Arc, @@ -754,7 +749,7 @@ impl Buffer { cx: &mut ModelContext, ) -> Result where - T: ToPoint + Ord + Clip + TextDimension + Add + Sub + Copy, + T: Copy + Ord + TextDimension + Sub + Clip + ToPoint, { fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering { Ordering::Equal @@ -810,8 +805,10 @@ impl Buffer { } } - start = last_edit_new_end + (start - last_edit_old_end); - end = last_edit_new_end + (end - last_edit_old_end); + start = last_edit_new_end; + start.add_assign(&(start - last_edit_old_end)); + end = last_edit_new_end; + end.add_assign(&(end - last_edit_old_end)); } let range = start.clip(Bias::Left, content)..end.clip(Bias::Right, content); @@ -1624,7 +1621,7 @@ impl BufferSnapshot { let mut highlights = None; let mut diagnostic_endpoints = Vec::::new(); if let Some(theme) = theme { - for entry in self.diagnostics_in_range::<_, usize>(range.clone()) { + for (_, entry) in self.diagnostics_in_range::<_, usize>(range.clone()) { diagnostic_endpoints.push(DiagnosticEndpoint { offset: entry.range.start, is_start: true, @@ -1755,14 +1752,15 @@ impl BufferSnapshot { pub fn diagnostics_in_range<'a, T, O>( &'a self, search_range: Range, - ) -> impl 'a + Iterator> + ) -> impl 'a + Iterator)> where T: 'a + Clone + ToOffset, O: 'a + FromAnchor, { - self.diagnostic_sets - .iter() - .flat_map(move |set| set.range(search_range.clone(), self, true)) + self.diagnostic_sets.iter().flat_map(move |set| { + set.range(search_range.clone(), self, true) + .map(|e| (set.provider_name(), e)) + }) } pub fn diagnostic_groups(&self) -> Vec> { @@ -1775,13 +1773,17 @@ impl BufferSnapshot { pub fn diagnostic_group<'a, O>( &'a self, + provider_name: &str, group_id: usize, ) -> impl 'a + Iterator> where O: 'a + FromAnchor, { - todo!(); - [].into_iter() + self.diagnostic_sets + .iter() + .find(|s| s.provider_name() == provider_name) + .into_iter() + .flat_map(move |s| s.group(group_id, self)) } pub fn diagnostics_update_count(&self) -> usize { diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index 73a2bb0bf8049d796e5a8a7f1a44f6eb4b98d940..81c5865e1138050046e91616d018ee37de2d508e 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -560,28 +560,34 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0)) .collect::>(), &[ - DiagnosticEntry { - range: Point::new(3, 9)..Point::new(3, 11), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::ERROR, - message: "undefined variable 'BB'".to_string(), - is_disk_based: true, - group_id: 1, - is_primary: true, - ..Default::default() - }, - }, - DiagnosticEntry { - range: Point::new(4, 9)..Point::new(4, 12), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::ERROR, - message: "undefined variable 'CCC'".to_string(), - is_disk_based: true, - group_id: 2, - is_primary: true, - ..Default::default() + ( + "lsp", + DiagnosticEntry { + range: Point::new(3, 9)..Point::new(3, 11), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::ERROR, + message: "undefined variable 'BB'".to_string(), + is_disk_based: true, + group_id: 1, + is_primary: true, + ..Default::default() + }, } - } + ), + ( + "lsp", + DiagnosticEntry { + range: Point::new(4, 9)..Point::new(4, 12), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::ERROR, + message: "undefined variable 'CCC'".to_string(), + is_disk_based: true, + group_id: 2, + is_primary: true, + ..Default::default() + } + } + ) ] ); assert_eq!( @@ -642,27 +648,33 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0)) .collect::>(), &[ - DiagnosticEntry { - range: Point::new(2, 9)..Point::new(2, 12), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::WARNING, - message: "unreachable statement".to_string(), - group_id: 3, - is_primary: true, - ..Default::default() + ( + "lsp", + DiagnosticEntry { + range: Point::new(2, 9)..Point::new(2, 12), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::WARNING, + message: "unreachable statement".to_string(), + group_id: 3, + is_primary: true, + ..Default::default() + } } - }, - DiagnosticEntry { - range: Point::new(2, 9)..Point::new(2, 10), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::ERROR, - message: "undefined variable 'A'".to_string(), - is_disk_based: true, - group_id: 0, - is_primary: true, - ..Default::default() - }, - } + ), + ( + "lsp", + DiagnosticEntry { + range: Point::new(2, 9)..Point::new(2, 10), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::ERROR, + message: "undefined variable 'A'".to_string(), + is_disk_based: true, + group_id: 0, + is_primary: true, + ..Default::default() + }, + } + ) ] ); assert_eq!( @@ -734,28 +746,34 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { .diagnostics_in_range::<_, Point>(0..buffer.len()) .collect::>(), &[ - DiagnosticEntry { - range: Point::new(2, 21)..Point::new(2, 22), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::ERROR, - message: "undefined variable 'A'".to_string(), - is_disk_based: true, - group_id: 0, - is_primary: true, - ..Default::default() + ( + "lsp", + DiagnosticEntry { + range: Point::new(2, 21)..Point::new(2, 22), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::ERROR, + message: "undefined variable 'A'".to_string(), + is_disk_based: true, + group_id: 0, + is_primary: true, + ..Default::default() + } } - }, - DiagnosticEntry { - range: Point::new(3, 9)..Point::new(3, 11), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::ERROR, - message: "undefined variable 'BB'".to_string(), - is_disk_based: true, - group_id: 4, - is_primary: true, - ..Default::default() - }, - } + ), + ( + "lsp", + DiagnosticEntry { + range: Point::new(3, 9)..Point::new(3, 11), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::ERROR, + message: "undefined variable 'BB'".to_string(), + is_disk_based: true, + group_id: 4, + is_primary: true, + ..Default::default() + }, + } + ) ] ); }); @@ -829,7 +847,10 @@ async fn test_preserving_old_group_ids_and_disk_based_diagnostics(mut cx: gpui:: .snapshot() .diagnostics_in_range::<_, PointUtf16>(PointUtf16::new(0, 0)..PointUtf16::new(4, 0)) .collect::>(), - diagnostics.as_slice(), + diagnostics + .iter() + .map(|entry| ("lsp", entry.clone())) + .collect::>(), ); }); @@ -849,24 +870,30 @@ async fn test_preserving_old_group_ids_and_disk_based_diagnostics(mut cx: gpui:: .collect::>(), &[ // The changed diagnostic is given a new group id. - DiagnosticEntry { - range: new_diagnostics[0].range.clone(), - diagnostic: Diagnostic { - group_id: 3, - ..new_diagnostics[0].diagnostic.clone() - }, - }, + ( + "lsp", + DiagnosticEntry { + range: new_diagnostics[0].range.clone(), + diagnostic: Diagnostic { + group_id: 3, + ..new_diagnostics[0].diagnostic.clone() + }, + } + ), // The old disk-based diagnostic is marked as invalid, but keeps // its original group id. - DiagnosticEntry { - range: diagnostics[1].range.clone(), - diagnostic: Diagnostic { - is_valid: false, - ..diagnostics[1].diagnostic.clone() - }, - }, + ( + "lsp", + DiagnosticEntry { + range: diagnostics[1].range.clone(), + diagnostic: Diagnostic { + is_valid: false, + ..diagnostics[1].diagnostic.clone() + }, + } + ), // The unchanged diagnostic keeps its original group id - new_diagnostics[1].clone(), + ("lsp", new_diagnostics[1].clone()), ], ); }); diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 4ec5679c611fb7e2de362f3effb0b58998810e10..f99837fee0334bf7c5c5d23d9ef40b0e4a773f39 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -3729,22 +3729,25 @@ mod tests { .unwrap(); buffer.read_with(&cx, |buffer, _| { - let diagnostics = buffer - .snapshot() + let snapshot = buffer.snapshot(); + let diagnostics = snapshot .diagnostics_in_range::<_, Point>(0..buffer.len()) .collect::>(); assert_eq!( diagnostics, - &[DiagnosticEntry { - range: Point::new(0, 9)..Point::new(0, 10), - diagnostic: Diagnostic { - severity: lsp::DiagnosticSeverity::ERROR, - message: "undefined variable 'A'".to_string(), - group_id: 0, - is_primary: true, - ..Default::default() + &[( + LSP_PROVIDER_NAME.as_ref(), + DiagnosticEntry { + range: Point::new(0, 9)..Point::new(0, 10), + diagnostic: Diagnostic { + severity: lsp::DiagnosticSeverity::ERROR, + message: "undefined variable 'A'".to_string(), + group_id: 0, + is_primary: true, + ..Default::default() + } } - }] + )] ) }); } @@ -3899,61 +3902,78 @@ mod tests { .diagnostics_in_range::<_, Point>(0..buffer.len()) .collect::>(), &[ - DiagnosticEntry { - range: Point::new(1, 8)..Point::new(1, 9), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::WARNING, - message: "error 1".to_string(), - group_id: 0, - is_primary: true, - ..Default::default() + ( + LSP_PROVIDER_NAME.as_ref(), + DiagnosticEntry { + range: Point::new(1, 8)..Point::new(1, 9), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::WARNING, + message: "error 1".to_string(), + group_id: 0, + is_primary: true, + ..Default::default() + } } - }, - DiagnosticEntry { - range: Point::new(1, 8)..Point::new(1, 9), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::HINT, - message: "error 1 hint 1".to_string(), - group_id: 0, - is_primary: false, - ..Default::default() + ), + ( + LSP_PROVIDER_NAME.as_ref(), + DiagnosticEntry { + range: Point::new(1, 8)..Point::new(1, 9), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::HINT, + message: "error 1 hint 1".to_string(), + group_id: 0, + is_primary: false, + ..Default::default() + } } - }, - DiagnosticEntry { - range: Point::new(1, 13)..Point::new(1, 15), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::HINT, - message: "error 2 hint 1".to_string(), - group_id: 1, - is_primary: false, - ..Default::default() + ), + ( + LSP_PROVIDER_NAME.as_ref(), + DiagnosticEntry { + range: Point::new(1, 13)..Point::new(1, 15), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::HINT, + message: "error 2 hint 1".to_string(), + group_id: 1, + is_primary: false, + ..Default::default() + } } - }, - DiagnosticEntry { - range: Point::new(1, 13)..Point::new(1, 15), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::HINT, - message: "error 2 hint 2".to_string(), - group_id: 1, - is_primary: false, - ..Default::default() + ), + ( + LSP_PROVIDER_NAME.as_ref(), + DiagnosticEntry { + range: Point::new(1, 13)..Point::new(1, 15), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::HINT, + message: "error 2 hint 2".to_string(), + group_id: 1, + is_primary: false, + ..Default::default() + } } - }, - DiagnosticEntry { - range: Point::new(2, 8)..Point::new(2, 17), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::ERROR, - message: "error 2".to_string(), - group_id: 1, - is_primary: true, - ..Default::default() + ), + ( + LSP_PROVIDER_NAME.as_ref(), + DiagnosticEntry { + range: Point::new(2, 8)..Point::new(2, 17), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::ERROR, + message: "error 2".to_string(), + group_id: 1, + is_primary: true, + ..Default::default() + } } - } + ) ] ); assert_eq!( - buffer.diagnostic_group::(0).collect::>(), + buffer + .diagnostic_group::(&LSP_PROVIDER_NAME, 0) + .collect::>(), &[ DiagnosticEntry { range: Point::new(1, 8)..Point::new(1, 9), @@ -3978,7 +3998,9 @@ mod tests { ] ); assert_eq!( - buffer.diagnostic_group::(1).collect::>(), + buffer + .diagnostic_group::(&LSP_PROVIDER_NAME, 1) + .collect::>(), &[ DiagnosticEntry { range: Point::new(1, 13)..Point::new(1, 15), diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index 0aa269e28a838bfbde1b0713513ff1175a1b4940..331126729b19de98d13690308820b30ec9ac9552 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -1831,6 +1831,7 @@ mod tests { buffer .snapshot() .diagnostics_in_range::<_, Point>(0..buffer.len()) + .map(|(_, entry)| entry) .collect::>(), &[ DiagnosticEntry { From a85e400b359a03d9cbdf9b132facd1bd5f66a526 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 24 Dec 2021 16:22:22 +0100 Subject: [PATCH 192/196] Start on a `DiagnosticProvider` implementation for Rust Co-Authored-By: Nathan Sobo --- Cargo.lock | 1 + crates/language/src/buffer.rs | 7 +- crates/language/src/language.rs | 10 +-- crates/workspace/src/workspace.rs | 14 +++- crates/zed/Cargo.toml | 1 + crates/zed/src/language.rs | 115 ++++++++++++++++++++++++++++++ 6 files changed, 138 insertions(+), 10 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 41dcf21797dd187e4ceb399d46bb761a5cf37799..2407ec6140794954aab59235bf3ea974462f6a6b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5702,6 +5702,7 @@ dependencies = [ "chat_panel", "client", "clock", + "collections", "contacts_panel", "crossbeam-channel", "ctor", diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 29724db677995b3a3aed65cfcc100684e4758bca..64be4a9c04e6a9da9a3b3257b3f224db724ee22e 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -805,10 +805,13 @@ impl Buffer { } } + let start_overshoot = start - last_edit_old_end; start = last_edit_new_end; - start.add_assign(&(start - last_edit_old_end)); + start.add_assign(&start_overshoot); + + let end_overshoot = end - last_edit_old_end; end = last_edit_new_end; - end.add_assign(&(end - last_edit_old_end)); + end.add_assign(&end_overshoot); } let range = start.clip(Bias::Left, content)..end.clip(Bias::Right, content); diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 26c0e84261e5984f1ed72a306955cec073434516..d6c13a7fd45854de7fe992a1737c95a00c780b8b 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -9,18 +9,14 @@ use anyhow::{anyhow, Result}; use async_trait::async_trait; pub use buffer::Operation; pub use buffer::*; -use collections::HashSet; +use collections::{HashMap, HashSet}; pub use diagnostic_set::DiagnosticEntry; use gpui::AppContext; use highlight_map::HighlightMap; use lazy_static::lazy_static; use parking_lot::Mutex; use serde::Deserialize; -use std::{ - path::{Path, PathBuf}, - str, - sync::Arc, -}; +use std::{path::Path, str, sync::Arc}; use theme::SyntaxTheme; use tree_sitter::{self, Query}; pub use tree_sitter::{Parser, Tree}; @@ -69,7 +65,7 @@ pub trait DiagnosticProvider: 'static + Send + Sync { async fn diagnose( &self, path: Arc, - ) -> Result>)>>; + ) -> Result, Vec>>>; } pub struct Language { diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index fdce6f6ff21cdeafde981f67dfad04a776425491..311b68e096d90217f30e8dd1aa5645e12e086ee8 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -791,16 +791,24 @@ impl Workspace { { error!("failed to save item: {:?}, ", error); } + + handle.update(&mut cx, |this, cx| { + this.project.update(cx, |project, cx| project.diagnose(cx)) + }); }) .detach(); } }, ); } else { - cx.spawn(|_, mut cx| async move { + cx.spawn(|this, mut cx| async move { if let Err(error) = cx.update(|cx| item.save(cx)).unwrap().await { error!("failed to save item: {:?}, ", error); } + + this.update(&mut cx, |this, cx| { + this.project.update(cx, |project, cx| project.diagnose(cx)) + }); }) .detach(); } @@ -832,6 +840,10 @@ impl Workspace { if let Err(error) = result { error!("failed to save item: {:?}, ", error); } + + handle.update(&mut cx, |this, cx| { + this.project.update(cx, |project, cx| project.diagnose(cx)) + }); }) .detach() } diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 452122d55a65083a7a267deb2ad15e84d503cbc7..2b9d7150f2b067e802a55d6ee46a4a23f325a00e 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -29,6 +29,7 @@ test-support = [ [dependencies] chat_panel = { path = "../chat_panel" } +collections = { path = "../collections" } client = { path = "../client" } clock = { path = "../clock" } contacts_panel = { path = "../contacts_panel" } diff --git a/crates/zed/src/language.rs b/crates/zed/src/language.rs index a84d2cbd40b7a9d16734056e29ce79c18a173bff..653c66392c376e44d3fc13dae6e228e2184743e3 100644 --- a/crates/zed/src/language.rs +++ b/crates/zed/src/language.rs @@ -7,6 +7,120 @@ use std::{str, sync::Arc}; #[folder = "languages"] struct LanguageDir; +mod rust { + use anyhow::Result; + use async_trait::async_trait; + use collections::{HashMap, HashSet}; + use language::{Diagnostic, DiagnosticEntry, DiagnosticSeverity}; + use parking_lot::Mutex; + use serde::Deserialize; + use serde_json::Deserializer; + use smol::process::Command; + use std::path::{Path, PathBuf}; + use std::sync::Arc; + + #[derive(Default)] + pub struct DiagnosticProvider { + reported_paths: Mutex>>, + } + + #[derive(Debug, Deserialize)] + struct Check { + message: CompilerMessage, + } + + #[derive(Debug, Deserialize)] + struct CompilerMessage { + code: ErrorCode, + spans: Vec, + message: String, + level: ErrorLevel, + } + + #[derive(Debug, Deserialize)] + enum ErrorLevel { + #[serde(rename = "warning")] + Warning, + #[serde(rename = "error")] + Error, + #[serde(rename = "note")] + Note, + } + + #[derive(Debug, Deserialize)] + struct ErrorCode { + code: String, + } + + #[derive(Debug, Deserialize)] + struct Span { + is_primary: bool, + file_name: PathBuf, + byte_start: usize, + byte_end: usize, + } + + #[async_trait] + impl language::DiagnosticProvider for DiagnosticProvider { + async fn diagnose( + &self, + path: Arc, + ) -> Result, Vec>>> { + let output = Command::new("cargo") + .arg("check") + .args(["--message-format", "json"]) + .current_dir(&path) + .output() + .await?; + + let mut group_id = 0; + let mut diagnostics_by_path = HashMap::default(); + let mut new_reported_paths = HashSet::default(); + for value in + Deserializer::from_slice(&output.stdout).into_iter::<&serde_json::value::RawValue>() + { + if let Ok(check) = serde_json::from_str::(value?.get()) { + let severity = match check.message.level { + ErrorLevel::Warning => DiagnosticSeverity::WARNING, + ErrorLevel::Error => DiagnosticSeverity::ERROR, + ErrorLevel::Note => DiagnosticSeverity::INFORMATION, + }; + for span in check.message.spans { + let span_path: Arc = span.file_name.into(); + new_reported_paths.insert(span_path.clone()); + diagnostics_by_path + .entry(span_path) + .or_insert(Vec::new()) + .push(DiagnosticEntry { + range: span.byte_start..span.byte_end, + diagnostic: Diagnostic { + code: Some(check.message.code.code.clone()), + severity, + message: check.message.message.clone(), + group_id, + is_valid: true, + is_primary: span.is_primary, + is_disk_based: true, + }, + }); + } + group_id += 1; + } + } + + let reported_paths = &mut *self.reported_paths.lock(); + for old_reported_path in reported_paths.iter() { + if !diagnostics_by_path.contains_key(old_reported_path) { + diagnostics_by_path.insert(old_reported_path.clone(), Default::default()); + } + } + *reported_paths = new_reported_paths; + + Ok(diagnostics_by_path) + } + } +} + pub fn build_language_registry() -> LanguageRegistry { let mut languages = LanguageRegistry::default(); languages.add(Arc::new(rust())); @@ -24,6 +138,7 @@ fn rust() -> Language { .unwrap() .with_indents_query(load_query("rust/indents.scm").as_ref()) .unwrap() + .with_diagnostic_provider(rust::DiagnosticProvider::default()) } fn markdown() -> Language { From adeea9da66dad820c7731ca2d45558e8bf8a34c9 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 24 Dec 2021 17:06:18 +0100 Subject: [PATCH 193/196] Parse `children` from `cargo check` output to provide hints Co-Authored-By: Nathan Sobo --- crates/zed/src/language.rs | 82 +++++++++++++++++++++++++++++++++----- 1 file changed, 73 insertions(+), 9 deletions(-) diff --git a/crates/zed/src/language.rs b/crates/zed/src/language.rs index 653c66392c376e44d3fc13dae6e228e2184743e3..293deada4054270b61a8cc0f7ff7401a7a944394 100644 --- a/crates/zed/src/language.rs +++ b/crates/zed/src/language.rs @@ -31,10 +31,11 @@ mod rust { #[derive(Debug, Deserialize)] struct CompilerMessage { - code: ErrorCode, + code: Option, spans: Vec, message: String, level: ErrorLevel, + children: Vec, } #[derive(Debug, Deserialize)] @@ -43,6 +44,8 @@ mod rust { Warning, #[serde(rename = "error")] Error, + #[serde(rename = "help")] + Help, #[serde(rename = "note")] Note, } @@ -52,24 +55,30 @@ mod rust { code: String, } - #[derive(Debug, Deserialize)] + #[derive(Clone, Debug, Deserialize)] struct Span { is_primary: bool, file_name: PathBuf, byte_start: usize, byte_end: usize, + expansion: Option>, + } + + #[derive(Clone, Debug, Deserialize)] + struct Expansion { + span: Span, } #[async_trait] impl language::DiagnosticProvider for DiagnosticProvider { async fn diagnose( &self, - path: Arc, + root_path: Arc, ) -> Result, Vec>>> { let output = Command::new("cargo") .arg("check") .args(["--message-format", "json"]) - .current_dir(&path) + .current_dir(&root_path) .output() .await?; @@ -80,13 +89,21 @@ mod rust { Deserializer::from_slice(&output.stdout).into_iter::<&serde_json::value::RawValue>() { if let Ok(check) = serde_json::from_str::(value?.get()) { - let severity = match check.message.level { + let check_severity = match check.message.level { ErrorLevel::Warning => DiagnosticSeverity::WARNING, ErrorLevel::Error => DiagnosticSeverity::ERROR, + ErrorLevel::Help => DiagnosticSeverity::HINT, ErrorLevel::Note => DiagnosticSeverity::INFORMATION, }; - for span in check.message.spans { - let span_path: Arc = span.file_name.into(); + + let mut primary_span = None; + for mut span in check.message.spans { + if let Some(mut expansion) = span.expansion { + expansion.span.is_primary = span.is_primary; + span = expansion.span; + } + + let span_path: Arc = span.file_name.as_path().into(); new_reported_paths.insert(span_path.clone()); diagnostics_by_path .entry(span_path) @@ -94,8 +111,8 @@ mod rust { .push(DiagnosticEntry { range: span.byte_start..span.byte_end, diagnostic: Diagnostic { - code: Some(check.message.code.code.clone()), - severity, + code: check.message.code.as_ref().map(|c| c.code.clone()), + severity: check_severity, message: check.message.message.clone(), group_id, is_valid: true, @@ -103,7 +120,54 @@ mod rust { is_disk_based: true, }, }); + + if span.is_primary { + primary_span = Some(span); + } + } + + for mut child in check.message.children { + if child.spans.is_empty() { + if let Some(primary_span) = primary_span.clone() { + child.spans.push(primary_span); + } + } else { + // TODO + continue; + } + + let child_severity = match child.level { + ErrorLevel::Warning => DiagnosticSeverity::WARNING, + ErrorLevel::Error => DiagnosticSeverity::ERROR, + ErrorLevel::Help => DiagnosticSeverity::HINT, + ErrorLevel::Note => DiagnosticSeverity::INFORMATION, + }; + + for mut span in child.spans { + if let Some(expansion) = span.expansion { + span = expansion.span; + } + + let span_path: Arc = span.file_name.as_path().into(); + new_reported_paths.insert(span_path.clone()); + diagnostics_by_path + .entry(span_path) + .or_insert(Vec::new()) + .push(DiagnosticEntry { + range: span.byte_start..span.byte_end, + diagnostic: Diagnostic { + code: child.code.as_ref().map(|c| c.code.clone()), + severity: child_severity, + message: child.message.clone(), + group_id, + is_valid: true, + is_primary: false, + is_disk_based: true, + }, + }); + } } + group_id += 1; } } From a3df5971559848bddfe83a9735729a8dc48d27a8 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Fri, 24 Dec 2021 13:33:11 -0700 Subject: [PATCH 194/196] Make diagnostics disk-based in test --- crates/diagnostics/src/diagnostics.rs | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index ad527fe3c76feffc25edf37e0150842408f48535..2b5b9a09f48fcfcd7779a1880fcec69e316d4f48 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -648,6 +648,7 @@ mod tests { .to_string(), severity: DiagnosticSeverity::INFORMATION, is_primary: false, + is_disk_based: true, group_id: 1, ..Default::default() }, @@ -660,6 +661,7 @@ mod tests { .to_string(), severity: DiagnosticSeverity::INFORMATION, is_primary: false, + is_disk_based: true, group_id: 0, ..Default::default() }, @@ -670,6 +672,7 @@ mod tests { message: "value moved here".to_string(), severity: DiagnosticSeverity::INFORMATION, is_primary: false, + is_disk_based: true, group_id: 1, ..Default::default() }, @@ -680,6 +683,7 @@ mod tests { message: "value moved here".to_string(), severity: DiagnosticSeverity::INFORMATION, is_primary: false, + is_disk_based: true, group_id: 0, ..Default::default() }, @@ -690,6 +694,7 @@ mod tests { message: "use of moved value\nvalue used here after move".to_string(), severity: DiagnosticSeverity::ERROR, is_primary: true, + is_disk_based: true, group_id: 0, ..Default::default() }, @@ -700,6 +705,7 @@ mod tests { message: "use of moved value\nvalue used here after move".to_string(), severity: DiagnosticSeverity::ERROR, is_primary: true, + is_disk_based: true, group_id: 1, ..Default::default() }, @@ -770,6 +776,7 @@ mod tests { message: "mismatched types\nexpected `usize`, found `char`".to_string(), severity: DiagnosticSeverity::ERROR, is_primary: true, + is_disk_based: true, group_id: 0, ..Default::default() }, From ce4142eab39118a2ecd43a0473a3e7035087191e Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Fri, 24 Dec 2021 13:47:45 -0700 Subject: [PATCH 195/196] Remove special handling of multi-line primary diagnostic messages and fix tests --- crates/diagnostics/src/diagnostics.rs | 85 ++++++++++++++++++--------- 1 file changed, 57 insertions(+), 28 deletions(-) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 2b5b9a09f48fcfcd7779a1880fcec69e316d4f48..a626091c036269839e834654a72c324febedd80e 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -309,17 +309,14 @@ impl ProjectDiagnosticsEditor { if is_first_excerpt_for_group { is_first_excerpt_for_group = false; let primary = &group.entries[group.primary_ix].diagnostic; - let mut header = primary.clone(); - header.message = - primary.message.split('\n').next().unwrap().to_string(); group_state.block_count += 1; - diagnostic_blocks.push(DiagnosticBlock::Header(header.clone())); + diagnostic_blocks.push(DiagnosticBlock::Header(primary.clone())); blocks_to_add.push(BlockProperties { position: header_position, height: 2, render: diagnostic_header_renderer( buffer.clone(), - header, + primary.clone(), true, self.build_settings.clone(), ), @@ -337,22 +334,17 @@ impl ProjectDiagnosticsEditor { } for entry in &group.entries[*start_ix..ix] { - let mut diagnostic = entry.diagnostic.clone(); - if diagnostic.is_primary { - let mut lines = entry.diagnostic.message.split('\n'); - lines.next(); - diagnostic.message = lines.collect(); - } - - if !diagnostic.message.is_empty() { + if !entry.diagnostic.is_primary { group_state.block_count += 1; diagnostic_blocks - .push(DiagnosticBlock::Inline(diagnostic.clone())); + .push(DiagnosticBlock::Inline(entry.diagnostic.clone())); blocks_to_add.push(BlockProperties { position: (excerpt_id.clone(), entry.range.start.clone()), - height: diagnostic.message.matches('\n').count() as u8 + 1, + height: entry.diagnostic.message.matches('\n').count() + as u8 + + 1, render: diagnostic_block_renderer( - diagnostic, + entry.diagnostic.clone(), true, self.build_settings.clone(), ), @@ -691,7 +683,7 @@ mod tests { DiagnosticEntry { range: 112..113, diagnostic: Diagnostic { - message: "use of moved value\nvalue used here after move".to_string(), + message: "use of moved value".to_string(), severity: DiagnosticSeverity::ERROR, is_primary: true, is_disk_based: true, @@ -699,10 +691,21 @@ mod tests { ..Default::default() }, }, + DiagnosticEntry { + range: 112..113, + diagnostic: Diagnostic { + message: "value used here after move".to_string(), + severity: DiagnosticSeverity::INFORMATION, + is_primary: false, + is_disk_based: true, + group_id: 0, + ..Default::default() + }, + }, DiagnosticEntry { range: 122..123, diagnostic: Diagnostic { - message: "use of moved value\nvalue used here after move".to_string(), + message: "use of moved value".to_string(), severity: DiagnosticSeverity::ERROR, is_primary: true, is_disk_based: true, @@ -710,6 +713,17 @@ mod tests { ..Default::default() }, }, + DiagnosticEntry { + range: 122..123, + diagnostic: Diagnostic { + message: "value used here after move".to_string(), + severity: DiagnosticSeverity::INFORMATION, + is_primary: false, + is_disk_based: true, + group_id: 1, + ..Default::default() + }, + }, ], cx, ) @@ -770,17 +784,30 @@ mod tests { worktree .update_diagnostics_from_provider( Arc::from("/test/a.rs".as_ref()), - vec![DiagnosticEntry { - range: 15..15, - diagnostic: Diagnostic { - message: "mismatched types\nexpected `usize`, found `char`".to_string(), - severity: DiagnosticSeverity::ERROR, - is_primary: true, - is_disk_based: true, - group_id: 0, - ..Default::default() + vec![ + DiagnosticEntry { + range: 15..15, + diagnostic: Diagnostic { + message: "mismatched types".to_string(), + severity: DiagnosticSeverity::ERROR, + is_primary: true, + is_disk_based: true, + group_id: 0, + ..Default::default() + }, }, - }], + DiagnosticEntry { + range: 15..15, + diagnostic: Diagnostic { + message: "expected `usize`, found `char`".to_string(), + severity: DiagnosticSeverity::INFORMATION, + is_primary: false, + is_disk_based: true, + group_id: 0, + ..Default::default() + }, + }, + ], cx, ) .unwrap(); @@ -801,6 +828,8 @@ mod tests { "\n", // primary message "\n", // filename "const a: i32 = 'a';\n", + "\n", // supporting diagnostic + "\n", // context line // // main.rs, diagnostic group 1 // From cf81f5a5556c2e4aebf20b7d79ed874492092b3e Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Fri, 24 Dec 2021 16:36:21 -0700 Subject: [PATCH 196/196] Update tests to reflect that we no longer attempt to recycle group ids --- crates/language/src/tests.rs | 124 +---------------------------------- 1 file changed, 2 insertions(+), 122 deletions(-) diff --git a/crates/language/src/tests.rs b/crates/language/src/tests.rs index 81c5865e1138050046e91616d018ee37de2d508e..73ac9266da753c5cd25032937a8ac6f0fca5116e 100644 --- a/crates/language/src/tests.rs +++ b/crates/language/src/tests.rs @@ -655,7 +655,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { diagnostic: Diagnostic { severity: DiagnosticSeverity::WARNING, message: "unreachable statement".to_string(), - group_id: 3, + group_id: 1, is_primary: true, ..Default::default() } @@ -768,7 +768,7 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { severity: DiagnosticSeverity::ERROR, message: "undefined variable 'BB'".to_string(), is_disk_based: true, - group_id: 4, + group_id: 1, is_primary: true, ..Default::default() }, @@ -779,126 +779,6 @@ async fn test_diagnostics(mut cx: gpui::TestAppContext) { }); } -#[gpui::test] -async fn test_preserving_old_group_ids_and_disk_based_diagnostics(mut cx: gpui::TestAppContext) { - let buffer = cx.add_model(|cx| { - let text = " - use a::*; - const b: i32 = c::; - const c: i32 = d; - const e: i32 = f +; - " - .unindent(); - - let mut rust_lang = rust_lang(); - rust_lang.config.language_server = Some(LanguageServerConfig { - disk_based_diagnostic_sources: HashSet::from_iter(["disk".to_string()]), - ..Default::default() - }); - - let mut buffer = Buffer::new(0, text, cx); - buffer.set_language(Some(Arc::new(rust_lang)), None, cx); - buffer - }); - - // Initially, there are three errors. The second one is disk-based. - let diagnostics = vec![ - DiagnosticEntry { - range: PointUtf16::new(1, 16)..PointUtf16::new(1, 18), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::ERROR, - message: "syntax error 1".to_string(), - group_id: 0, - is_primary: true, - is_valid: true, - ..Default::default() - }, - }, - DiagnosticEntry { - range: PointUtf16::new(2, 15)..PointUtf16::new(2, 16), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::ERROR, - message: "cannot find value `d` in this scope".to_string(), - is_disk_based: true, - group_id: 1, - is_primary: true, - is_valid: true, - ..Default::default() - }, - }, - DiagnosticEntry { - range: PointUtf16::new(3, 17)..PointUtf16::new(3, 18), - diagnostic: Diagnostic { - severity: DiagnosticSeverity::ERROR, - message: "syntax error 2".to_string(), - group_id: 2, - is_primary: true, - is_valid: true, - ..Default::default() - }, - }, - ]; - buffer.update(&mut cx, |buffer, cx| { - buffer - .update_diagnostics("lsp".into(), None, diagnostics.clone(), cx) - .unwrap(); - assert_eq!( - buffer - .snapshot() - .diagnostics_in_range::<_, PointUtf16>(PointUtf16::new(0, 0)..PointUtf16::new(4, 0)) - .collect::>(), - diagnostics - .iter() - .map(|entry| ("lsp", entry.clone())) - .collect::>(), - ); - }); - - // The diagnostics are updated. The disk-based diagnostic is omitted, and one - // other diagnostic has changed its message. - let mut new_diagnostics = vec![diagnostics[0].clone(), diagnostics[2].clone()]; - new_diagnostics[0].diagnostic.message = "another syntax error".to_string(); - - buffer.update(&mut cx, |buffer, cx| { - buffer - .update_diagnostics("lsp".into(), None, new_diagnostics.clone(), cx) - .unwrap(); - assert_eq!( - buffer - .snapshot() - .diagnostics_in_range::<_, PointUtf16>(PointUtf16::new(0, 0)..PointUtf16::new(4, 0)) - .collect::>(), - &[ - // The changed diagnostic is given a new group id. - ( - "lsp", - DiagnosticEntry { - range: new_diagnostics[0].range.clone(), - diagnostic: Diagnostic { - group_id: 3, - ..new_diagnostics[0].diagnostic.clone() - }, - } - ), - // The old disk-based diagnostic is marked as invalid, but keeps - // its original group id. - ( - "lsp", - DiagnosticEntry { - range: diagnostics[1].range.clone(), - diagnostic: Diagnostic { - is_valid: false, - ..diagnostics[1].diagnostic.clone() - }, - } - ), - // The unchanged diagnostic keeps its original group id - ("lsp", new_diagnostics[1].clone()), - ], - ); - }); -} - #[gpui::test] async fn test_empty_diagnostic_ranges(mut cx: gpui::TestAppContext) { cx.add_model(|cx| {