From e8a9eee84f1a32ebae821d85ced0de97e8cb7a7d Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Mon, 31 May 2021 16:27:54 +0200 Subject: [PATCH 01/40] WIP --- zed/src/editor/buffer.rs | 126 +++++++++---------------- zed/src/editor/buffer/anchor.rs | 25 ++--- zed/src/editor/display_map/fold_map.rs | 25 ++++- zed/src/time.rs | 2 +- 4 files changed, 83 insertions(+), 95 deletions(-) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index 7fd9c4c9d6d81f0511dc430eea4afbce0dc70d60..390149646e34d017196ec99ac68b96f6403cd4c0 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -1498,16 +1498,12 @@ impl Buffer { Operation::UpdateSelections { selections, .. } => { if let Some(selections) = selections { selections.iter().all(|selection| { - let contains_start = match selection.start { - Anchor::Middle { insertion_id, .. } => { - self.version.observed(insertion_id) - } + let contains_start = match &selection.start { + Anchor::Middle { version, .. } => self.version >= *version, _ => true, }; - let contains_end = match selection.end { - Anchor::Middle { insertion_id, .. } => { - self.version.observed(insertion_id) - } + let contains_end = match &selection.end { + Anchor::Middle { version, .. } => self.version >= *version, _ => true, }; contains_start && contains_end @@ -1980,62 +1976,15 @@ impl Buffer { let max_offset = self.len(); assert!(offset <= max_offset, "offset is out of range"); - let seek_bias; - match bias { - AnchorBias::Left => { - if offset == 0 { - return Anchor::Start; - } else { - seek_bias = SeekBias::Left; - } - } - AnchorBias::Right => { - if offset == max_offset { - return Anchor::End; - } else { - seek_bias = SeekBias::Right; - } - } - }; - - let mut cursor = self.fragments.cursor::(); - cursor.seek(&offset, seek_bias, &()); - let fragment = cursor.item().unwrap(); - let offset_in_fragment = offset - cursor.start(); - let offset_in_insertion = fragment.range_in_insertion.start + offset_in_fragment; - let anchor = Anchor::Middle { - insertion_id: fragment.insertion.id, - offset: offset_in_insertion, - bias, - }; - anchor - } - - fn fragment_id_for_anchor(&self, anchor: &Anchor) -> Result<&FragmentId> { - match anchor { - Anchor::Start => Ok(FragmentId::max_value()), - Anchor::End => Ok(FragmentId::min_value()), + if offset == 0 && bias == AnchorBias::Left { + Anchor::Start + } else if offset == max_offset && bias == AnchorBias::Right { + Anchor::End + } else { Anchor::Middle { - insertion_id, offset, bias, - .. - } => { - let seek_bias = match bias { - AnchorBias::Left => SeekBias::Left, - AnchorBias::Right => SeekBias::Right, - }; - - let splits = self - .insertion_splits - .get(&insertion_id) - .ok_or_else(|| anyhow!("split does not exist for insertion id"))?; - let mut splits_cursor = splits.cursor::(); - splits_cursor.seek(offset, seek_bias, &()); - splits_cursor - .item() - .ok_or_else(|| anyhow!("split offset is out of range")) - .map(|split| &split.fragment_id) + version: self.version(), } } } @@ -2045,31 +1994,44 @@ impl Buffer { Anchor::Start => TextSummary::default(), Anchor::End => self.text_summary(), Anchor::Middle { - insertion_id, offset, bias, + version, } => { - let seek_bias = match bias { - AnchorBias::Left => SeekBias::Left, - AnchorBias::Right => SeekBias::Right, - }; - - let splits = self - .insertion_splits - .get(&insertion_id) - .expect("split does not exist for insertion id"); - let mut splits_cursor = splits.cursor::(); - splits_cursor.seek(offset, seek_bias, &()); - let split = splits_cursor.item().expect("split offset is out of range"); - - let mut fragments_cursor = self.fragments.cursor::(); - fragments_cursor.seek(&FragmentIdRef::new(&split.fragment_id), SeekBias::Left, &()); - let fragment = fragments_cursor.item().expect("fragment id does not exist"); - - let mut ix = *fragments_cursor.start(); - if fragment.visible { - ix += offset - fragment.range_in_insertion.start; + let mut cursor = self + .fragments + .filter::<_, usize>(|summary| !(*version >= summary.max_version), &()); + + let mut old_offset = 0; + let mut new_offset = 0; + while let Some(fragment) = cursor.item() { + let bytes_since_last_fragment = *cursor.start() - new_offset; + let comparison = offset.cmp(&(old_offset + bytes_since_last_fragment)); + if comparison == cmp::Ordering::Greater + || (comparison == cmp::Ordering::Equal && *bias == AnchorBias::Right) + { + old_offset += bytes_since_last_fragment; + new_offset += bytes_since_last_fragment; + + if fragment.was_visible(version, &self.undo_map) { + let comparison = offset.cmp(&(old_offset + fragment.visible_len())); + if comparison == cmp::Ordering::Greater + || (comparison == cmp::Ordering::Equal + && *bias == AnchorBias::Right) + { + old_offset += fragment.len(); + } else { + break; + } + } + new_offset += fragment.visible_len(); + cursor.next(&()); + } else { + break; + } } + + let ix = new_offset + offset.saturating_sub(old_offset); self.text_summary_for_range(0..ix) } } diff --git a/zed/src/editor/buffer/anchor.rs b/zed/src/editor/buffer/anchor.rs index 3b2687f96daf63e6149d3400f4034fe8b8e94359..607c5d4aa884e8676fc1a8fe11dda553e4b63146 100644 --- a/zed/src/editor/buffer/anchor.rs +++ b/zed/src/editor/buffer/anchor.rs @@ -1,17 +1,16 @@ -use super::Buffer; +use super::{Buffer, ToOffset}; use crate::time; use anyhow::Result; -use std::cmp::Ordering; -use std::ops::Range; +use std::{cmp::Ordering, ops::Range}; #[derive(Clone, Eq, PartialEq, Debug, Hash)] pub enum Anchor { Start, End, Middle { - insertion_id: time::Local, offset: usize, bias: AnchorBias, + version: time::Global, }, } @@ -55,18 +54,22 @@ impl Anchor { Anchor::Middle { offset: self_offset, bias: self_bias, - .. + version: self_version, }, Anchor::Middle { offset: other_offset, bias: other_bias, - .. + version: other_version, }, - ) => buffer - .fragment_id_for_anchor(self)? - .cmp(buffer.fragment_id_for_anchor(other)?) - .then_with(|| self_offset.cmp(other_offset)) - .then_with(|| self_bias.cmp(other_bias)), + ) => { + let offset_comparison = if self_version == other_version { + self_offset.cmp(other_offset) + } else { + self.to_offset(buffer).cmp(&other.to_offset(buffer)) + }; + + offset_comparison.then_with(|| self_bias.cmp(other_bias)) + } }) } diff --git a/zed/src/editor/display_map/fold_map.rs b/zed/src/editor/display_map/fold_map.rs index 6fc3c7175beb92626312209897e8c505cd0a4e20..1866e86608d32e7609d4b2a89b06ed9b92d87129 100644 --- a/zed/src/editor/display_map/fold_map.rs +++ b/zed/src/editor/display_map/fold_map.rs @@ -1027,11 +1027,34 @@ mod tests { for _ in 0..operations { log::info!("text: {:?}", buffer.read(cx).text()); + { + let buffer = buffer.read(cx); + let mut cursor = map.folds.cursor::<(), ()>(); + cursor.next(buffer); + let mut prev_fold: Option<&Fold> = None; + while let Some(fold) = cursor.item() { + if let Some(prev_fold) = prev_fold { + let prev_fold = prev_fold.0.start.to_offset(buffer) + ..prev_fold.0.end.to_offset(buffer); + let fold = fold.0.start.to_offset(buffer)..fold.0.end.to_offset(buffer); + assert!( + fold.start > prev_fold.start + || (fold.start == prev_fold.start && fold.end <= prev_fold.end), + "prev fold {:?}\ncurr fold {:?}", + prev_fold, + fold + ); + } + + prev_fold = Some(fold); + cursor.next(buffer); + } + } match rng.gen_range(0..=100) { 0..=34 => { let buffer = buffer.read(cx); let mut to_fold = Vec::new(); - for _ in 0..rng.gen_range(1..=5) { + for _ in 0..rng.gen_range(1..=2) { let end = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Right); let start = buffer.clip_offset(rng.gen_range(0..=end), Left); to_fold.push(start..end); diff --git a/zed/src/time.rs b/zed/src/time.rs index f1cf4eef916c9c7995cbc333a18d3a2d51a1ccd2..8668ebfe9cfaed30a66dca3a22aec8ba18e01891 100644 --- a/zed/src/time.rs +++ b/zed/src/time.rs @@ -54,7 +54,7 @@ impl<'a> AddAssign<&'a Local> for Local { } } -#[derive(Clone, Debug, Default, Eq, PartialEq)] +#[derive(Clone, Debug, Default, Hash, Eq, PartialEq)] pub struct Global(SmallVec<[Local; 3]>); impl Global { From 311e1b0f5e53bff1a5330e1c27ad1e7a08db541b Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 1 Jun 2021 12:50:10 +0200 Subject: [PATCH 02/40] Supply a context when adding summary to `Dimension` --- zed/src/editor/buffer.rs | 41 ++++---- zed/src/editor/buffer/rope.rs | 32 +++--- zed/src/editor/display_map/fold_map.rs | 110 ++++++++++----------- zed/src/operation_queue.rs | 2 +- zed/src/sum_tree.rs | 130 ++++++++++++++----------- zed/src/sum_tree/cursor.rs | 122 ++++++++++++----------- zed/src/worktree.rs | 8 +- 7 files changed, 234 insertions(+), 211 deletions(-) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index 390149646e34d017196ec99ac68b96f6403cd4c0..2241b87b2f0ca4a7ce2b16897fd165dbaf62da35 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -360,7 +360,7 @@ struct FragmentTextSummary { } impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary { - fn add_summary(&mut self, summary: &'a FragmentSummary) { + fn add_summary(&mut self, summary: &'a FragmentSummary, _: &()) { self.visible += summary.text.visible; self.deleted += summary.text.deleted; } @@ -825,7 +825,7 @@ impl Buffer { } pub fn len(&self) -> usize { - self.fragments.extent::() + self.fragments.extent::(&()) } pub fn line_len(&self, row: u32) -> u32 { @@ -871,9 +871,10 @@ impl Buffer { pub fn edits_since<'a>(&'a self, since: time::Global) -> impl 'a + Iterator { let since_2 = since.clone(); - let cursor = self - .fragments - .filter(move |summary| summary.max_version.changed_since(&since_2)); + let cursor = self.fragments.filter( + move |summary| summary.max_version.changed_since(&since_2), + &(), + ); Edits { deleted_text: &self.deleted_text, @@ -1201,7 +1202,7 @@ impl Buffer { let fragment = fragments_cursor.item().unwrap().clone(); new_ropes.push_fragment(&fragment, fragment.visible); new_fragments.push(fragment, &()); - fragments_cursor.next(); + fragments_cursor.next(&()); } while let Some(fragment) = fragments_cursor.item() { @@ -1291,7 +1292,7 @@ impl Buffer { new_fragments.push(fragment, &()); } - fragments_cursor.next(); + fragments_cursor.next(&()); } if let Some(new_text) = new_text { @@ -1420,7 +1421,7 @@ impl Buffer { new_ropes.push_fragment(&fragment, was_visible); new_fragments.push(fragment.clone(), &()); - fragments_cursor.next(); + fragments_cursor.next(&()); if let Some(split_id) = insertion_splits.next() { let slice = fragments_cursor.slice(&FragmentIdRef::new(split_id), SeekBias::Left, &()); @@ -1453,7 +1454,7 @@ impl Buffer { new_ropes.push_fragment(&fragment, fragment_was_visible); new_fragments.push(fragment, &()); - fragments_cursor.next(); + fragments_cursor.next(&()); } } } @@ -1704,9 +1705,9 @@ impl Buffer { }, &(), ); - splits_cursor.next(); + splits_cursor.next(&()); new_split_tree.push_tree( - splits_cursor.slice(&old_split_tree.extent::(), SeekBias::Right, &()), + splits_cursor.slice(&old_split_tree.extent::(&()), SeekBias::Right, &()), &(), ); self.insertion_splits @@ -1716,7 +1717,7 @@ impl Buffer { new_fragments.push(fragment, &()); // Scan forward until we find a fragment that is not fully contained by the current splice. - fragments_cursor.next(); + fragments_cursor.next(&()); if let Some(range) = cur_range.clone() { while let Some(fragment) = fragments_cursor.item() { let fragment_summary = fragments_cursor.item_summary().unwrap(); @@ -1733,7 +1734,7 @@ impl Buffer { new_ropes.push_fragment(&new_fragment, fragment_was_visible); new_fragments.push(new_fragment, &()); - fragments_cursor.next(); + fragments_cursor.next(&()); if range.end == fragment_end { end_id = Some(fragment.insertion.id); @@ -1912,9 +1913,9 @@ impl Buffer { ); } - cursor.next(); + cursor.next(&()); new_split_tree.push_tree( - cursor.slice(&old_split_tree.extent::(), SeekBias::Right, &()), + cursor.slice(&old_split_tree.extent::(&()), SeekBias::Right, &()), &(), ); @@ -2261,7 +2262,7 @@ impl<'a, F: Fn(&FragmentSummary) -> bool> Iterator for Edits<'a, F> { } } - self.cursor.next(); + self.cursor.next(&()); } change @@ -2445,7 +2446,7 @@ impl<'a> FragmentIdRef<'a> { } impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentIdRef<'a> { - fn add_summary(&mut self, summary: &'a FragmentSummary) { + fn add_summary(&mut self, summary: &'a FragmentSummary, _: &()) { self.0 = Some(&summary.max_fragment_id) } } @@ -2543,7 +2544,7 @@ impl Default for FragmentSummary { } impl<'a> sum_tree::Dimension<'a, FragmentSummary> for usize { - fn add_summary(&mut self, summary: &FragmentSummary) { + fn add_summary(&mut self, summary: &FragmentSummary, _: &()) { *self += summary.text.visible; } } @@ -2573,7 +2574,7 @@ impl Default for InsertionSplitSummary { } impl<'a> sum_tree::Dimension<'a, InsertionSplitSummary> for usize { - fn add_summary(&mut self, summary: &InsertionSplitSummary) { + fn add_summary(&mut self, summary: &InsertionSplitSummary, _: &()) { *self += summary.extent; } } @@ -3661,7 +3662,7 @@ mod tests { let text = " mod x { mod y { - + } } " diff --git a/zed/src/editor/buffer/rope.rs b/zed/src/editor/buffer/rope.rs index 98b317c0ed29c8db0c143977faa513238083d28e..05b30e389604504f21172894d26cbc73782ec23c 100644 --- a/zed/src/editor/buffer/rope.rs +++ b/zed/src/editor/buffer/rope.rs @@ -25,13 +25,13 @@ impl Rope { pub fn append(&mut self, rope: Rope) { let mut chunks = rope.chunks.cursor::<(), ()>(); - chunks.next(); + chunks.next(&()); if let Some(chunk) = chunks.item() { if self.chunks.last().map_or(false, |c| c.0.len() < CHUNK_BASE) || chunk.0.len() < CHUNK_BASE { self.push(&chunk.0); - chunks.next(); + chunks.next(&()); } } @@ -99,11 +99,11 @@ impl Rope { } pub fn len(&self) -> usize { - self.chunks.extent() + self.chunks.extent(&()) } pub fn max_point(&self) -> Point { - self.chunks.extent() + self.chunks.extent(&()) } pub fn cursor(&self, offset: usize) -> Cursor { @@ -218,12 +218,12 @@ impl<'a> Cursor<'a> { let mut slice = Rope::new(); if let Some(start_chunk) = self.chunks.item() { let start_ix = self.offset - self.chunks.start(); - let end_ix = cmp::min(end_offset, self.chunks.end()) - self.chunks.start(); + let end_ix = cmp::min(end_offset, self.chunks.end(&())) - self.chunks.start(); slice.push(&start_chunk.0[start_ix..end_ix]); } - if end_offset > self.chunks.end() { - self.chunks.next(); + if end_offset > self.chunks.end(&()) { + self.chunks.next(&()); slice.append(Rope { chunks: self.chunks.slice(&end_offset, SeekBias::Right, &()), }); @@ -243,12 +243,12 @@ impl<'a> Cursor<'a> { let mut summary = TextSummary::default(); if let Some(start_chunk) = self.chunks.item() { let start_ix = self.offset - self.chunks.start(); - let end_ix = cmp::min(end_offset, self.chunks.end()) - self.chunks.start(); + let end_ix = cmp::min(end_offset, self.chunks.end(&())) - self.chunks.start(); summary = TextSummary::from(&start_chunk.0[start_ix..end_ix]); } - if end_offset > self.chunks.end() { - self.chunks.next(); + if end_offset > self.chunks.end(&()) { + self.chunks.next(&()); summary += &self.chunks.summary(&end_offset, SeekBias::Right, &()); if let Some(end_chunk) = self.chunks.item() { let end_ix = end_offset - self.chunks.start(); @@ -260,7 +260,7 @@ impl<'a> Cursor<'a> { } pub fn suffix(mut self) -> Rope { - self.slice(self.rope.chunks.extent()) + self.slice(self.rope.chunks.extent(&())) } pub fn offset(&self) -> usize { @@ -285,7 +285,7 @@ impl<'a> Chunks<'a> { } pub fn seek(&mut self, offset: usize) { - if offset >= self.chunks.end() { + if offset >= self.chunks.end(&()) { self.chunks.seek_forward(&offset, SeekBias::Right, &()); } else { self.chunks.seek(&offset, SeekBias::Right, &()); @@ -312,7 +312,7 @@ impl<'a> Iterator for Chunks<'a> { fn next(&mut self) -> Option { let result = self.peek(); if result.is_some() { - self.chunks.next(); + self.chunks.next(&()); } result } @@ -478,19 +478,19 @@ impl std::ops::AddAssign for TextSummary { } impl<'a> sum_tree::Dimension<'a, TextSummary> for TextSummary { - fn add_summary(&mut self, summary: &'a TextSummary) { + fn add_summary(&mut self, summary: &'a TextSummary, _: &()) { *self += summary; } } impl<'a> sum_tree::Dimension<'a, TextSummary> for usize { - fn add_summary(&mut self, summary: &'a TextSummary) { + fn add_summary(&mut self, summary: &'a TextSummary, _: &()) { *self += summary.bytes; } } impl<'a> sum_tree::Dimension<'a, TextSummary> for Point { - fn add_summary(&mut self, summary: &'a TextSummary) { + fn add_summary(&mut self, summary: &'a TextSummary, _: &()) { *self += &summary.lines; } } diff --git a/zed/src/editor/display_map/fold_map.rs b/zed/src/editor/display_map/fold_map.rs index 1866e86608d32e7609d4b2a89b06ed9b92d87129..135ecb566f254be3d3a35536917aa5f2e49084a2 100644 --- a/zed/src/editor/display_map/fold_map.rs +++ b/zed/src/editor/display_map/fold_map.rs @@ -12,6 +12,7 @@ use gpui::{AppContext, ModelHandle}; use parking_lot::{Mutex, MutexGuard}; use std::{ cmp::{self, Ordering}, + iter, ops::Range, }; @@ -80,7 +81,13 @@ impl FoldMap { where T: ToOffset, { - self.intersecting_folds(range, cx).map(|f| &f.0) + let buffer = self.buffer.read(cx); + let mut folds = self.intersecting_folds(range, cx); + iter::from_fn(move || { + let item = folds.item().map(|f| &f.0); + folds.next(buffer); + item + }) } pub fn fold( @@ -149,7 +156,7 @@ impl FoldMap { ..Default::default() }); fold_ixs_to_delete.push(*folds_cursor.start()); - folds_cursor.next(); + folds_cursor.next(buffer); } } @@ -167,7 +174,7 @@ impl FoldMap { let mut folds = SumTree::new(); for fold_ix in fold_ixs_to_delete { folds.push_tree(cursor.slice(&fold_ix, SeekBias::Right, buffer), buffer); - cursor.next(); + cursor.next(buffer); } folds.push_tree(cursor.suffix(buffer), buffer); folds @@ -186,10 +193,13 @@ impl FoldMap { let buffer = self.buffer.read(cx); let start = buffer.anchor_before(range.start.to_offset(buffer)); let end = buffer.anchor_after(range.end.to_offset(buffer)); - self.folds.filter::<_, usize>(move |summary| { - start.cmp(&summary.max_end, buffer).unwrap() == Ordering::Less - && end.cmp(&summary.min_start, buffer).unwrap() == Ordering::Greater - }) + self.folds.filter::<_, usize>( + move |summary| { + start.cmp(&summary.max_end, buffer).unwrap() == Ordering::Less + && end.cmp(&summary.min_start, buffer).unwrap() == Ordering::Greater + }, + buffer, + ) } pub fn intersects_fold(&self, offset: T, cx: &AppContext) -> bool @@ -212,8 +222,8 @@ impl FoldMap { if transform.display_text.is_some() { return true; } - if cursor.end().row() == display_row { - cursor.next() + if cursor.end(&()).row() == display_row { + cursor.next(&()) } else { break; } @@ -244,7 +254,7 @@ impl FoldMap { let overshoot = point - cursor.start().buffer.lines; DisplayPoint(cmp::min( cursor.start().display.lines + overshoot, - cursor.end().display.lines, + cursor.end(&()).display.lines, )) } @@ -276,7 +286,7 @@ impl FoldMap { edit.old_range.start = *cursor.start(); cursor.seek(&edit.old_range.end, SeekBias::Right, &()); - cursor.next(); + cursor.next(&()); let mut delta = edit.delta(); loop { @@ -293,7 +303,7 @@ impl FoldMap { if next_edit.old_range.end >= edit.old_range.end { edit.old_range.end = next_edit.old_range.end; cursor.seek(&edit.old_range.end, SeekBias::Right, &()); - cursor.next(); + cursor.next(&()); } } else { break; @@ -306,9 +316,14 @@ impl FoldMap { let anchor = buffer.anchor_before(edit.new_range.start); let mut folds_cursor = self.folds.cursor::<_, ()>(); folds_cursor.seek(&Fold(anchor..Anchor::End), SeekBias::Left, buffer); - let mut folds = folds_cursor - .map(|f| f.0.start.to_offset(buffer)..f.0.end.to_offset(buffer)) - .peekable(); + let mut folds = iter::from_fn(move || { + let item = folds_cursor + .item() + .map(|f| f.0.start.to_offset(buffer)..f.0.end.to_offset(buffer)); + folds_cursor.next(buffer); + item + }) + .peekable(); while folds .peek() @@ -501,7 +516,7 @@ impl FoldMapSnapshot { if offset.0 == transform_start || matches!(bias, Bias::Left) { DisplayOffset(transform_start) } else { - DisplayOffset(cursor.end().display.bytes) + DisplayOffset(cursor.end(&()).display.bytes) } } else { let overshoot = offset.0 - transform_start; @@ -526,7 +541,7 @@ impl FoldMapSnapshot { if point.0 == transform_start || matches!(bias, Bias::Left) { DisplayPoint(transform_start) } else { - DisplayPoint(cursor.end().display.lines) + DisplayPoint(cursor.end(&()).display.lines) } } else { let overshoot = point.0 - transform_start; @@ -574,7 +589,7 @@ impl sum_tree::Summary for TransformSummary { } impl<'a> sum_tree::Dimension<'a, TransformSummary> for TransformSummary { - fn add_summary(&mut self, summary: &'a TransformSummary) { + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { sum_tree::Summary::add_summary(self, summary, &()); } } @@ -649,7 +664,7 @@ impl sum_tree::Summary for FoldSummary { } impl<'a> sum_tree::Dimension<'a, FoldSummary> for Fold { - fn add_summary(&mut self, summary: &'a FoldSummary) { + fn add_summary(&mut self, summary: &'a FoldSummary, _: &Buffer) { self.0.start = summary.start.clone(); self.0.end = summary.end.clone(); } @@ -662,7 +677,7 @@ impl<'a> sum_tree::SeekDimension<'a, FoldSummary> for Fold { } impl<'a> sum_tree::Dimension<'a, FoldSummary> for usize { - fn add_summary(&mut self, summary: &'a FoldSummary) { + fn add_summary(&mut self, summary: &'a FoldSummary, _: &Buffer) { *self += summary.count; } } @@ -676,8 +691,8 @@ impl<'a> Iterator for BufferRows<'a> { type Item = u32; fn next(&mut self) -> Option { - while self.display_point > self.cursor.end().display.lines { - self.cursor.next(); + while self.display_point > self.cursor.end(&()).display.lines { + self.cursor.next(&()); if self.cursor.item().is_none() { // TODO: Return a bool from next? break; @@ -717,10 +732,10 @@ impl<'a> Iterator for Chunks<'a> { self.buffer_offset += transform.summary.buffer.bytes; self.buffer_chunks.seek(self.buffer_offset); - while self.buffer_offset >= self.transform_cursor.end().buffer.bytes + while self.buffer_offset >= self.transform_cursor.end(&()).buffer.bytes && self.transform_cursor.item().is_some() { - self.transform_cursor.next(); + self.transform_cursor.next(&()); } return Some(display_text); @@ -732,10 +747,10 @@ impl<'a> Iterator for Chunks<'a> { chunk = &chunk[offset_in_chunk..]; // Truncate the chunk so that it ends at the next fold. - let region_end = self.transform_cursor.end().buffer.bytes - self.buffer_offset; + let region_end = self.transform_cursor.end(&()).buffer.bytes - self.buffer_offset; if chunk.len() >= region_end { chunk = &chunk[0..region_end]; - self.transform_cursor.next(); + self.transform_cursor.next(&()); } else { self.buffer_chunks.next(); } @@ -772,10 +787,10 @@ impl<'a> Iterator for HighlightedChunks<'a> { self.buffer_offset += transform.summary.buffer.bytes; self.buffer_chunks.seek(self.buffer_offset); - while self.buffer_offset >= self.transform_cursor.end().buffer.bytes + while self.buffer_offset >= self.transform_cursor.end(&()).buffer.bytes && self.transform_cursor.item().is_some() { - self.transform_cursor.next(); + self.transform_cursor.next(&()); } return Some((display_text, StyleId::default())); @@ -796,10 +811,10 @@ impl<'a> Iterator for HighlightedChunks<'a> { chunk = &chunk[offset_in_chunk..]; // Truncate the chunk so that it ends at the next fold. - let region_end = self.transform_cursor.end().buffer.bytes - self.buffer_offset; + let region_end = self.transform_cursor.end(&()).buffer.bytes - self.buffer_offset; if chunk.len() >= region_end { chunk = &chunk[0..region_end]; - self.transform_cursor.next(); + self.transform_cursor.next(&()); } else { self.buffer_chunk.take(); } @@ -813,7 +828,7 @@ impl<'a> Iterator for HighlightedChunks<'a> { } impl<'a> sum_tree::Dimension<'a, TransformSummary> for DisplayPoint { - fn add_summary(&mut self, summary: &'a TransformSummary) { + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += &summary.display.lines; } } @@ -822,19 +837,19 @@ impl<'a> sum_tree::Dimension<'a, TransformSummary> for DisplayPoint { pub struct DisplayOffset(usize); impl<'a> sum_tree::Dimension<'a, TransformSummary> for DisplayOffset { - fn add_summary(&mut self, summary: &'a TransformSummary) { + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { self.0 += &summary.display.bytes; } } impl<'a> sum_tree::Dimension<'a, TransformSummary> for Point { - fn add_summary(&mut self, summary: &'a TransformSummary) { + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { *self += &summary.buffer.lines; } } impl<'a> sum_tree::Dimension<'a, TransformSummary> for usize { - fn add_summary(&mut self, summary: &'a TransformSummary) { + fn add_summary(&mut self, summary: &'a TransformSummary, _: &()) { *self += &summary.buffer.bytes; } } @@ -1027,29 +1042,6 @@ mod tests { for _ in 0..operations { log::info!("text: {:?}", buffer.read(cx).text()); - { - let buffer = buffer.read(cx); - let mut cursor = map.folds.cursor::<(), ()>(); - cursor.next(buffer); - let mut prev_fold: Option<&Fold> = None; - while let Some(fold) = cursor.item() { - if let Some(prev_fold) = prev_fold { - let prev_fold = prev_fold.0.start.to_offset(buffer) - ..prev_fold.0.end.to_offset(buffer); - let fold = fold.0.start.to_offset(buffer)..fold.0.end.to_offset(buffer); - assert!( - fold.start > prev_fold.start - || (fold.start == prev_fold.start && fold.end <= prev_fold.end), - "prev fold {:?}\ncurr fold {:?}", - prev_fold, - fold - ); - } - - prev_fold = Some(fold); - cursor.next(buffer); - } - } match rng.gen_range(0..=100) { 0..=34 => { let buffer = buffer.read(cx); @@ -1195,7 +1187,7 @@ mod tests { let start = buffer.clip_offset(rng.gen_range(0..=end), Left); let expected_folds = map .folds - .items() + .items(buffer) .into_iter() .filter(|fold| { let start = buffer.anchor_before(start); @@ -1250,7 +1242,7 @@ mod tests { fn merged_fold_ranges(&self, cx: &AppContext) -> Vec> { let buffer = self.buffer.read(cx); - let mut folds = self.folds.items(); + let mut folds = self.folds.items(buffer); // Ensure sorting doesn't change how folds get merged and displayed. folds.sort_by(|a, b| a.0.cmp(&b.0, buffer).unwrap()); let mut fold_ranges = folds diff --git a/zed/src/operation_queue.rs b/zed/src/operation_queue.rs index 2c0e234fe98cd30c6d3fa72b210a93c0441b8249..681d13587033abba5c6b6fb5da67f163d427c7af 100644 --- a/zed/src/operation_queue.rs +++ b/zed/src/operation_queue.rs @@ -89,7 +89,7 @@ impl<'a> Add<&'a Self> for OperationSummary { } impl<'a> Dimension<'a, OperationSummary> for OperationKey { - fn add_summary(&mut self, summary: &OperationSummary) { + fn add_summary(&mut self, summary: &OperationSummary, _: &()) { assert!(*self <= summary.key); *self = summary.key; } diff --git a/zed/src/sum_tree.rs b/zed/src/sum_tree.rs index 8a2f9eb47594a6fdb81392468cb0210f7fa58d34..35b91833761027d2edd71f089c7ad1e5057ad28f 100644 --- a/zed/src/sum_tree.rs +++ b/zed/src/sum_tree.rs @@ -29,11 +29,11 @@ pub trait Summary: Default + Clone + fmt::Debug { } pub trait Dimension<'a, S: Summary>: Clone + fmt::Debug + Default { - fn add_summary(&mut self, _summary: &'a S); + fn add_summary(&mut self, _summary: &'a S, _: &S::Context); } impl<'a, T: Summary> Dimension<'a, T> for () { - fn add_summary(&mut self, _: &'a T) {} + fn add_summary(&mut self, _: &'a T, _: &T::Context) {} } pub trait SeekDimension<'a, T: Summary>: Dimension<'a, T> { @@ -71,9 +71,15 @@ impl SumTree { } #[allow(unused)] - pub fn items(&self) -> Vec { + pub fn items(&self, cx: &::Context) -> Vec { + let mut items = Vec::new(); let mut cursor = self.cursor::<(), ()>(); - cursor.cloned().collect() + cursor.next(cx); + while let Some(item) = cursor.item() { + items.push(item.clone()); + cursor.next(cx); + } + items } pub fn cursor<'a, S, U>(&'a self) -> Cursor @@ -84,12 +90,16 @@ impl SumTree { Cursor::new(self) } - pub fn filter<'a, F, U>(&'a self, filter_node: F) -> FilterCursor + pub fn filter<'a, F, U>( + &'a self, + filter_node: F, + cx: &::Context, + ) -> FilterCursor where F: Fn(&T::Summary) -> bool, U: Dimension<'a, T::Summary>, { - FilterCursor::new(self, filter_node) + FilterCursor::new(self, filter_node, cx) } #[allow(dead_code)] @@ -141,11 +151,14 @@ impl SumTree { } } - pub fn extent<'a, D: Dimension<'a, T::Summary>>(&'a self) -> D { + pub fn extent<'a, D: Dimension<'a, T::Summary>>( + &'a self, + cx: &::Context, + ) -> D { let mut extent = D::default(); match self.0.as_ref() { Node::Internal { summary, .. } | Node::Leaf { summary, .. } => { - extent.add_summary(summary); + extent.add_summary(summary, cx); } } extent @@ -434,7 +447,7 @@ impl SumTree { if let Some(old_item) = old_item { if old_item.key() == new_key { removed.push(old_item.clone()); - cursor.next(); + cursor.next(cx); } } @@ -580,7 +593,10 @@ mod tests { tree2.extend(50..100, &()); tree1.push_tree(tree2, &()); - assert_eq!(tree1.items(), (0..20).chain(50..100).collect::>()); + assert_eq!( + tree1.items(&()), + (0..20).chain(50..100).collect::>() + ); } #[test] @@ -596,16 +612,16 @@ mod tests { tree.extend(rng.sample_iter(distributions::Standard).take(count), &()); for _ in 0..5 { - let splice_end = rng.gen_range(0..tree.extent::().0 + 1); + let splice_end = rng.gen_range(0..tree.extent::(&()).0 + 1); let splice_start = rng.gen_range(0..splice_end + 1); let count = rng.gen_range(0..3); - let tree_end = tree.extent::(); + let tree_end = tree.extent::(&()); let new_items = rng .sample_iter(distributions::Standard) .take(count) .collect::>(); - let mut reference_items = tree.items(); + let mut reference_items = tree.items(&()); reference_items.splice(splice_start..splice_end, new_items.clone()); tree = { @@ -617,11 +633,12 @@ mod tests { new_tree }; - assert_eq!(tree.items(), reference_items); + assert_eq!(tree.items(&()), reference_items); - let mut filter_cursor = tree.filter::<_, Count>(|summary| summary.contains_even); + let mut filter_cursor = + tree.filter::<_, Count>(|summary| summary.contains_even, &()); let mut reference_filter = tree - .items() + .items(&()) .into_iter() .enumerate() .filter(|(_, item)| (item & 1) == 0); @@ -629,11 +646,11 @@ mod tests { let (reference_index, reference_item) = reference_filter.next().unwrap(); assert_eq!(actual_item, &reference_item); assert_eq!(filter_cursor.start().0, reference_index); - filter_cursor.next(); + filter_cursor.next(&()); } assert!(reference_filter.next().is_none()); - let mut pos = rng.gen_range(0..tree.extent::().0 + 1); + let mut pos = rng.gen_range(0..tree.extent::(&()).0 + 1); let mut before_start = false; let mut cursor = tree.cursor::(); cursor.seek(&Count(pos), SeekBias::Right, &()); @@ -654,13 +671,13 @@ mod tests { } if i < 5 { - cursor.next(); + cursor.next(&()); if pos < reference_items.len() { pos += 1; before_start = false; } } else { - cursor.prev(); + cursor.prev(&()); if pos == 0 { before_start = true; } @@ -670,7 +687,7 @@ mod tests { } for _ in 0..10 { - let end = rng.gen_range(0..tree.extent::().0 + 1); + let end = rng.gen_range(0..tree.extent::(&()).0 + 1); let start = rng.gen_range(0..end + 1); let start_bias = if rng.gen() { SeekBias::Left @@ -701,7 +718,7 @@ mod tests { let tree = SumTree::::new(); let mut cursor = tree.cursor::(); assert_eq!( - cursor.slice(&Count(0), SeekBias::Right, &()).items(), + cursor.slice(&Count(0), SeekBias::Right, &()).items(&()), Vec::::new() ); assert_eq!(cursor.item(), None); @@ -713,25 +730,28 @@ mod tests { tree.extend(vec![1], &()); let mut cursor = tree.cursor::(); assert_eq!( - cursor.slice(&Count(0), SeekBias::Right, &()).items(), + cursor.slice(&Count(0), SeekBias::Right, &()).items(&()), Vec::::new() ); assert_eq!(cursor.item(), Some(&1)); assert_eq!(cursor.prev_item(), None); assert_eq!(cursor.start(), &Sum(0)); - cursor.next(); + cursor.next(&()); assert_eq!(cursor.item(), None); assert_eq!(cursor.prev_item(), Some(&1)); assert_eq!(cursor.start(), &Sum(1)); - cursor.prev(); + cursor.prev(&()); assert_eq!(cursor.item(), Some(&1)); assert_eq!(cursor.prev_item(), None); assert_eq!(cursor.start(), &Sum(0)); let mut cursor = tree.cursor::(); - assert_eq!(cursor.slice(&Count(1), SeekBias::Right, &()).items(), [1]); + assert_eq!( + cursor.slice(&Count(1), SeekBias::Right, &()).items(&()), + [1] + ); assert_eq!(cursor.item(), None); assert_eq!(cursor.prev_item(), Some(&1)); assert_eq!(cursor.start(), &Sum(1)); @@ -739,8 +759,8 @@ mod tests { cursor.seek(&Count(0), SeekBias::Right, &()); assert_eq!( cursor - .slice(&tree.extent::(), SeekBias::Right, &()) - .items(), + .slice(&tree.extent::(&()), SeekBias::Right, &()) + .items(&()), [1] ); assert_eq!(cursor.item(), None); @@ -753,70 +773,70 @@ mod tests { let mut cursor = tree.cursor::(); assert_eq!( - cursor.slice(&Count(2), SeekBias::Right, &()).items(), + cursor.slice(&Count(2), SeekBias::Right, &()).items(&()), [1, 2] ); assert_eq!(cursor.item(), Some(&3)); assert_eq!(cursor.prev_item(), Some(&2)); assert_eq!(cursor.start(), &Sum(3)); - cursor.next(); + cursor.next(&()); assert_eq!(cursor.item(), Some(&4)); assert_eq!(cursor.prev_item(), Some(&3)); assert_eq!(cursor.start(), &Sum(6)); - cursor.next(); + cursor.next(&()); assert_eq!(cursor.item(), Some(&5)); assert_eq!(cursor.prev_item(), Some(&4)); assert_eq!(cursor.start(), &Sum(10)); - cursor.next(); + cursor.next(&()); assert_eq!(cursor.item(), Some(&6)); assert_eq!(cursor.prev_item(), Some(&5)); assert_eq!(cursor.start(), &Sum(15)); - cursor.next(); - cursor.next(); + cursor.next(&()); + cursor.next(&()); assert_eq!(cursor.item(), None); assert_eq!(cursor.prev_item(), Some(&6)); assert_eq!(cursor.start(), &Sum(21)); - cursor.prev(); + cursor.prev(&()); assert_eq!(cursor.item(), Some(&6)); assert_eq!(cursor.prev_item(), Some(&5)); assert_eq!(cursor.start(), &Sum(15)); - cursor.prev(); + cursor.prev(&()); assert_eq!(cursor.item(), Some(&5)); assert_eq!(cursor.prev_item(), Some(&4)); assert_eq!(cursor.start(), &Sum(10)); - cursor.prev(); + cursor.prev(&()); assert_eq!(cursor.item(), Some(&4)); assert_eq!(cursor.prev_item(), Some(&3)); assert_eq!(cursor.start(), &Sum(6)); - cursor.prev(); + cursor.prev(&()); assert_eq!(cursor.item(), Some(&3)); assert_eq!(cursor.prev_item(), Some(&2)); assert_eq!(cursor.start(), &Sum(3)); - cursor.prev(); + cursor.prev(&()); assert_eq!(cursor.item(), Some(&2)); assert_eq!(cursor.prev_item(), Some(&1)); assert_eq!(cursor.start(), &Sum(1)); - cursor.prev(); + cursor.prev(&()); assert_eq!(cursor.item(), Some(&1)); assert_eq!(cursor.prev_item(), None); assert_eq!(cursor.start(), &Sum(0)); - cursor.prev(); + cursor.prev(&()); assert_eq!(cursor.item(), None); assert_eq!(cursor.prev_item(), None); assert_eq!(cursor.start(), &Sum(0)); - cursor.next(); + cursor.next(&()); assert_eq!(cursor.item(), Some(&1)); assert_eq!(cursor.prev_item(), None); assert_eq!(cursor.start(), &Sum(0)); @@ -824,9 +844,9 @@ mod tests { let mut cursor = tree.cursor::(); assert_eq!( cursor - .slice(&tree.extent::(), SeekBias::Right, &()) - .items(), - tree.items() + .slice(&tree.extent::(&()), SeekBias::Right, &()) + .items(&()), + tree.items(&()) ); assert_eq!(cursor.item(), None); assert_eq!(cursor.prev_item(), Some(&6)); @@ -835,8 +855,8 @@ mod tests { cursor.seek(&Count(3), SeekBias::Right, &()); assert_eq!( cursor - .slice(&tree.extent::(), SeekBias::Right, &()) - .items(), + .slice(&tree.extent::(&()), SeekBias::Right, &()) + .items(&()), [4, 5, 6] ); assert_eq!(cursor.item(), None); @@ -852,15 +872,15 @@ mod tests { // Slicing without resetting starts from where the cursor is parked at. cursor.seek(&Count(1), SeekBias::Right, &()); assert_eq!( - cursor.slice(&Count(3), SeekBias::Right, &()).items(), + cursor.slice(&Count(3), SeekBias::Right, &()).items(&()), vec![2, 3] ); assert_eq!( - cursor.slice(&Count(6), SeekBias::Left, &()).items(), + cursor.slice(&Count(6), SeekBias::Left, &()).items(&()), vec![4, 5] ); assert_eq!( - cursor.slice(&Count(6), SeekBias::Right, &()).items(), + cursor.slice(&Count(6), SeekBias::Right, &()).items(&()), vec![6] ); } @@ -870,7 +890,7 @@ mod tests { let mut tree = SumTree::::new(); let removed = tree.edit(vec![Edit::Insert(1), Edit::Insert(2), Edit::Insert(0)], &()); - assert_eq!(tree.items(), vec![0, 1, 2]); + assert_eq!(tree.items(&()), vec![0, 1, 2]); assert_eq!(removed, Vec::::new()); assert_eq!(tree.get(&0, &()), Some(&0)); assert_eq!(tree.get(&1, &()), Some(&1)); @@ -878,7 +898,7 @@ mod tests { assert_eq!(tree.get(&4, &()), None); let removed = tree.edit(vec![Edit::Insert(2), Edit::Insert(4), Edit::Remove(0)], &()); - assert_eq!(tree.items(), vec![1, 2, 4]); + assert_eq!(tree.items(&()), vec![1, 2, 4]); assert_eq!(removed, vec![0, 2]); assert_eq!(tree.get(&0, &()), None); assert_eq!(tree.get(&1, &()), Some(&1)); @@ -933,19 +953,19 @@ mod tests { } impl<'a> Dimension<'a, IntegersSummary> for u8 { - fn add_summary(&mut self, summary: &IntegersSummary) { + fn add_summary(&mut self, summary: &IntegersSummary, _: &()) { *self = summary.max; } } impl<'a> Dimension<'a, IntegersSummary> for Count { - fn add_summary(&mut self, summary: &IntegersSummary) { + fn add_summary(&mut self, summary: &IntegersSummary, _: &()) { self.0 += summary.count.0; } } impl<'a> Dimension<'a, IntegersSummary> for Sum { - fn add_summary(&mut self, summary: &IntegersSummary) { + fn add_summary(&mut self, summary: &IntegersSummary, _: &()) { self.0 += summary.sum.0; } } diff --git a/zed/src/sum_tree/cursor.rs b/zed/src/sum_tree/cursor.rs index 146683ec2cbfdb2151af3d0d327ed196f2f77d6b..e5515e45c24d7286e530a0d18f0949a72309a961 100644 --- a/zed/src/sum_tree/cursor.rs +++ b/zed/src/sum_tree/cursor.rs @@ -49,10 +49,10 @@ where &self.sum_dimension } - pub fn end(&self) -> U { + pub fn end(&self, cx: &::Context) -> U { if let Some(item_summary) = self.item_summary() { let mut end = self.start().clone(); - end.add_summary(item_summary); + end.add_summary(item_summary, cx); end } else { self.start().clone() @@ -134,13 +134,13 @@ where } #[allow(unused)] - pub fn prev(&mut self) { + pub fn prev(&mut self, cx: &::Context) { assert!(self.did_seek, "Must seek before calling this method"); if self.at_end { self.seek_dimension = S::default(); self.sum_dimension = U::default(); - self.descend_to_last_item(self.tree); + self.descend_to_last_item(self.tree, cx); self.at_end = false; } else { while let Some(entry) = self.stack.pop() { @@ -167,8 +167,8 @@ where .. } => { for summary in &child_summaries[0..new_index] { - self.seek_dimension.add_summary(summary); - self.sum_dimension.add_summary(summary); + self.seek_dimension.add_summary(summary, cx); + self.sum_dimension.add_summary(summary, cx); } self.stack.push(StackEntry { tree: entry.tree, @@ -176,12 +176,12 @@ where seek_dimension: self.seek_dimension.clone(), sum_dimension: self.sum_dimension.clone(), }); - self.descend_to_last_item(&child_trees[new_index]); + self.descend_to_last_item(&child_trees[new_index], cx); } Node::Leaf { item_summaries, .. } => { for item_summary in &item_summaries[0..new_index] { - self.seek_dimension.add_summary(item_summary); - self.sum_dimension.add_summary(item_summary); + self.seek_dimension.add_summary(item_summary, cx); + self.sum_dimension.add_summary(item_summary, cx); } self.stack.push(StackEntry { tree: entry.tree, @@ -198,11 +198,11 @@ where } } - pub fn next(&mut self) { - self.next_internal(|_| true) + pub fn next(&mut self, cx: &::Context) { + self.next_internal(|_| true, cx) } - fn next_internal(&mut self, filter_node: F) + fn next_internal(&mut self, filter_node: F, cx: &::Context) where F: Fn(&T::Summary) -> bool, { @@ -230,8 +230,8 @@ where } => { if !descend { let summary = &child_summaries[entry.index]; - entry.seek_dimension.add_summary(summary); - entry.sum_dimension.add_summary(summary); + entry.seek_dimension.add_summary(summary, cx); + entry.sum_dimension.add_summary(summary, cx); entry.index += 1; } @@ -240,8 +240,8 @@ where if filter_node(next_summary) { break; } else { - self.seek_dimension.add_summary(next_summary); - self.sum_dimension.add_summary(next_summary); + self.seek_dimension.add_summary(next_summary, cx); + self.sum_dimension.add_summary(next_summary, cx); } entry.index += 1; } @@ -251,10 +251,10 @@ where Node::Leaf { item_summaries, .. } => { if !descend { let item_summary = &item_summaries[entry.index]; - self.seek_dimension.add_summary(item_summary); - entry.seek_dimension.add_summary(item_summary); - self.sum_dimension.add_summary(item_summary); - entry.sum_dimension.add_summary(item_summary); + self.seek_dimension.add_summary(item_summary, cx); + entry.seek_dimension.add_summary(item_summary, cx); + self.sum_dimension.add_summary(item_summary, cx); + entry.sum_dimension.add_summary(item_summary, cx); entry.index += 1; } @@ -263,10 +263,10 @@ where if filter_node(next_item_summary) { return; } else { - self.seek_dimension.add_summary(next_item_summary); - entry.seek_dimension.add_summary(next_item_summary); - self.sum_dimension.add_summary(next_item_summary); - entry.sum_dimension.add_summary(next_item_summary); + self.seek_dimension.add_summary(next_item_summary, cx); + entry.seek_dimension.add_summary(next_item_summary, cx); + self.sum_dimension.add_summary(next_item_summary, cx); + entry.sum_dimension.add_summary(next_item_summary, cx); entry.index += 1; } } else { @@ -295,7 +295,11 @@ where debug_assert!(self.stack.is_empty() || self.stack.last().unwrap().tree.0.is_leaf()); } - fn descend_to_last_item(&mut self, mut subtree: &'a SumTree) { + fn descend_to_last_item( + &mut self, + mut subtree: &'a SumTree, + cx: &::Context, + ) { self.did_seek = true; loop { match subtree.0.as_ref() { @@ -305,8 +309,8 @@ where .. } => { for summary in &child_summaries[0..child_summaries.len() - 1] { - self.seek_dimension.add_summary(summary); - self.sum_dimension.add_summary(summary); + self.seek_dimension.add_summary(summary, cx); + self.sum_dimension.add_summary(summary, cx); } self.stack.push(StackEntry { @@ -320,8 +324,8 @@ where Node::Leaf { item_summaries, .. } => { let last_index = item_summaries.len().saturating_sub(1); for item_summary in &item_summaries[0..last_index] { - self.seek_dimension.add_summary(item_summary); - self.sum_dimension.add_summary(item_summary); + self.seek_dimension.add_summary(item_summary, cx); + self.sum_dimension.add_summary(item_summary, cx); } self.stack.push(StackEntry { tree: subtree, @@ -372,7 +376,7 @@ where } pub fn suffix(&mut self, cx: &::Context) -> SumTree { - let extent = self.tree.extent::(); + let extent = self.tree.extent::(cx); let mut slice = SeekAggregate::Slice(SumTree::new()); self.seek_internal::<()>(&extent, SeekBias::Right, &mut slice, cx); if let SeekAggregate::Slice(slice) = slice { @@ -428,21 +432,21 @@ where .zip(&child_summaries[entry.index..]) { let mut child_end = self.seek_dimension.clone(); - child_end.add_summary(&child_summary); + child_end.add_summary(&child_summary, cx); let comparison = target.cmp(&child_end, cx); if comparison == Ordering::Greater || (comparison == Ordering::Equal && bias == SeekBias::Right) { self.seek_dimension = child_end; - self.sum_dimension.add_summary(child_summary); + self.sum_dimension.add_summary(child_summary, cx); match aggregate { SeekAggregate::None => {} SeekAggregate::Slice(slice) => { slice.push_tree(child_tree.clone(), cx); } SeekAggregate::Summary(summary) => { - summary.add_summary(child_summary); + summary.add_summary(child_summary, cx); } } entry.index += 1; @@ -470,14 +474,14 @@ where .zip(&item_summaries[entry.index..]) { let mut child_end = self.seek_dimension.clone(); - child_end.add_summary(item_summary); + child_end.add_summary(item_summary, cx); let comparison = target.cmp(&child_end, cx); if comparison == Ordering::Greater || (comparison == Ordering::Equal && bias == SeekBias::Right) { self.seek_dimension = child_end; - self.sum_dimension.add_summary(item_summary); + self.sum_dimension.add_summary(item_summary, cx); match aggregate { SeekAggregate::None => {} SeekAggregate::Slice(_) => { @@ -489,7 +493,7 @@ where .add_summary(item_summary, cx); } SeekAggregate::Summary(summary) => { - summary.add_summary(item_summary); + summary.add_summary(item_summary, cx); } } entry.index += 1; @@ -544,21 +548,21 @@ where child_trees.iter().zip(child_summaries).enumerate() { let mut child_end = self.seek_dimension.clone(); - child_end.add_summary(child_summary); + child_end.add_summary(child_summary, cx); let comparison = target.cmp(&child_end, cx); if comparison == Ordering::Greater || (comparison == Ordering::Equal && bias == SeekBias::Right) { self.seek_dimension = child_end; - self.sum_dimension.add_summary(child_summary); + self.sum_dimension.add_summary(child_summary, cx); match aggregate { SeekAggregate::None => {} SeekAggregate::Slice(slice) => { slice.push_tree(child_trees[index].clone(), cx); } SeekAggregate::Summary(summary) => { - summary.add_summary(child_summary); + summary.add_summary(child_summary, cx); } } } else { @@ -590,14 +594,14 @@ where items.iter().zip(item_summaries).enumerate() { let mut child_end = self.seek_dimension.clone(); - child_end.add_summary(item_summary); + child_end.add_summary(item_summary, cx); let comparison = target.cmp(&child_end, cx); if comparison == Ordering::Greater || (comparison == Ordering::Equal && bias == SeekBias::Right) { self.seek_dimension = child_end; - self.sum_dimension.add_summary(item_summary); + self.sum_dimension.add_summary(item_summary, cx); match aggregate { SeekAggregate::None => {} SeekAggregate::Slice(_) => { @@ -609,7 +613,7 @@ where slice_item_summaries.push(item_summary.clone()); } SeekAggregate::Summary(summary) => { - summary.add_summary(item_summary); + summary.add_summary(item_summary, cx); } } } else { @@ -651,7 +655,7 @@ where if bias == SeekBias::Left { let mut end = self.seek_dimension.clone(); if let Some(summary) = self.item_summary() { - end.add_summary(summary); + end.add_summary(summary, cx); } target.cmp(&end, cx) == Ordering::Equal } else { @@ -660,21 +664,22 @@ where } } -impl<'a, T, S, U> Iterator for Cursor<'a, T, S, U> +impl<'a, T, S, Seek, Sum> Iterator for Cursor<'a, T, Seek, Sum> where - T: Item, - S: Dimension<'a, T::Summary>, - U: Dimension<'a, T::Summary>, + T: Item, + S: Summary, + Seek: Dimension<'a, T::Summary>, + Sum: Dimension<'a, T::Summary>, { type Item = &'a T; fn next(&mut self) -> Option { if !self.did_seek { - self.next(); + self.next(&()); } if let Some(item) = self.item() { - self.next(); + self.next(&()); Some(item) } else { None @@ -693,9 +698,13 @@ where T: Item, U: Dimension<'a, T::Summary>, { - pub fn new(tree: &'a SumTree, filter_node: F) -> Self { + pub fn new( + tree: &'a SumTree, + filter_node: F, + cx: &::Context, + ) -> Self { let mut cursor = tree.cursor::<(), U>(); - cursor.next_internal(&filter_node); + cursor.next_internal(&filter_node, cx); Self { cursor, filter_node, @@ -710,22 +719,23 @@ where self.cursor.item() } - pub fn next(&mut self) { - self.cursor.next_internal(&self.filter_node); + pub fn next(&mut self, cx: &::Context) { + self.cursor.next_internal(&self.filter_node, cx); } } -impl<'a, F, T, U> Iterator for FilterCursor<'a, F, T, U> +impl<'a, F, T, S, U> Iterator for FilterCursor<'a, F, T, U> where F: Fn(&T::Summary) -> bool, - T: Item, + T: Item, + S: Summary, U: Dimension<'a, T::Summary>, { type Item = &'a T; fn next(&mut self) -> Option { if let Some(item) = self.item() { - self.cursor.next_internal(&self.filter_node); + self.cursor.next_internal(&self.filter_node, &()); Some(item) } else { None diff --git a/zed/src/worktree.rs b/zed/src/worktree.rs index f8bf316d544b10abd26ad0891c80e7ee0cd04251..56d53c0c5cfbb33d844be88d1b02957d7dcd7301 100644 --- a/zed/src/worktree.rs +++ b/zed/src/worktree.rs @@ -603,7 +603,7 @@ impl Default for PathKey { } impl<'a> sum_tree::Dimension<'a, EntrySummary> for PathKey { - fn add_summary(&mut self, summary: &'a EntrySummary) { + fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) { self.0 = summary.max_path.clone(); } } @@ -643,7 +643,7 @@ impl<'a> Default for PathSearch<'a> { } impl<'a: 'b, 'b> sum_tree::Dimension<'a, EntrySummary> for PathSearch<'b> { - fn add_summary(&mut self, summary: &'a EntrySummary) { + fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) { *self = Self::Exact(summary.max_path.as_ref()); } } @@ -652,7 +652,7 @@ impl<'a: 'b, 'b> sum_tree::Dimension<'a, EntrySummary> for PathSearch<'b> { pub struct FileCount(usize); impl<'a> sum_tree::Dimension<'a, EntrySummary> for FileCount { - fn add_summary(&mut self, summary: &'a EntrySummary) { + fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) { self.0 += summary.file_count; } } @@ -661,7 +661,7 @@ impl<'a> sum_tree::Dimension<'a, EntrySummary> for FileCount { pub struct VisibleFileCount(usize); impl<'a> sum_tree::Dimension<'a, EntrySummary> for VisibleFileCount { - fn add_summary(&mut self, summary: &'a EntrySummary) { + fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) { self.0 += summary.visible_file_count; } } From da7e3c8cd876e094472a5cf27465883268190f19 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 1 Jun 2021 15:28:20 +0200 Subject: [PATCH 03/40] Implement anchors using an offset + a version vector --- zed/src/editor/buffer.rs | 222 ++++++++++++++--------- zed/src/editor/buffer/anchor.rs | 13 +- zed/src/sum_tree/cursor.rs | 311 +++++++++++--------------------- zed/src/time.rs | 16 +- 4 files changed, 271 insertions(+), 291 deletions(-) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index 2241b87b2f0ca4a7ce2b16897fd165dbaf62da35..533ab0cfbe3047987016de1ef404eac7082eb6b1 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -351,6 +351,8 @@ pub struct FragmentSummary { text: FragmentTextSummary, max_fragment_id: FragmentId, max_version: time::Global, + min_insertion_version: time::Global, + max_insertion_version: time::Global, } #[derive(Default, Clone, Debug, PartialEq, Eq)] @@ -360,7 +362,7 @@ struct FragmentTextSummary { } impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary { - fn add_summary(&mut self, summary: &'a FragmentSummary, _: &()) { + fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option) { self.visible += summary.text.visible; self.deleted += summary.text.deleted; } @@ -502,7 +504,7 @@ impl Buffer { base_insertion.clone(), 0..0, ), - &(), + &None, ); if base_text.len() > 0 { @@ -520,7 +522,7 @@ impl Buffer { ); fragments.push( Fragment::new(base_fragment_id, base_insertion, range_in_insertion.clone()), - &(), + &None, ); } @@ -825,7 +827,7 @@ impl Buffer { } pub fn len(&self) -> usize { - self.fragments.extent::(&()) + self.fragments.extent::(&None) } pub fn line_len(&self, row: u32) -> u32 { @@ -873,7 +875,7 @@ impl Buffer { let since_2 = since.clone(); let cursor = self.fragments.filter( move |summary| summary.max_version.changed_since(&since_2), - &(), + &None, ); Edits { @@ -1191,8 +1193,11 @@ impl Buffer { let mut fragments_cursor = old_fragments.cursor::(); - let mut new_fragments = - fragments_cursor.slice(&FragmentIdRef::new(&start_fragment_id), SeekBias::Left, &()); + let mut new_fragments = fragments_cursor.slice( + &FragmentIdRef::new(&start_fragment_id), + SeekBias::Left, + &None, + ); let mut new_ropes = RopeBuilder::new(old_visible_text.cursor(0), old_deleted_text.cursor(0)); new_ropes.push_tree(new_fragments.summary().text); @@ -1201,8 +1206,8 @@ impl Buffer { if start_offset == start_fragment.range_in_insertion.end { let fragment = fragments_cursor.item().unwrap().clone(); new_ropes.push_fragment(&fragment, fragment.visible); - new_fragments.push(fragment, &()); - fragments_cursor.next(&()); + new_fragments.push(fragment, &None); + fragments_cursor.next(&None); } while let Some(fragment) = fragments_cursor.item() { @@ -1242,11 +1247,11 @@ impl Buffer { }; if let Some(fragment) = before_range { new_ropes.push_fragment(&fragment, fragment.visible); - new_fragments.push(fragment, &()); + new_fragments.push(fragment, &None); } if let Some(fragment) = insertion { new_ropes.push_str(new_text.take().unwrap()); - new_fragments.push(fragment, &()); + new_fragments.push(fragment, &None); } if let Some(mut fragment) = within_range { let fragment_was_visible = fragment.visible; @@ -1258,11 +1263,11 @@ impl Buffer { } new_ropes.push_fragment(&fragment, fragment_was_visible); - new_fragments.push(fragment, &()); + new_fragments.push(fragment, &None); } if let Some(fragment) = after_range { new_ropes.push_fragment(&fragment, fragment.visible); - new_fragments.push(fragment, &()); + new_fragments.push(fragment, &None); } } else { if new_text.is_some() && lamport_timestamp > fragment.insertion.lamport_timestamp { @@ -1275,7 +1280,7 @@ impl Buffer { lamport_timestamp, ); new_ropes.push_str(new_text); - new_fragments.push(fragment, &()); + new_fragments.push(fragment, &None); } let fragment_was_visible = fragment.visible; @@ -1289,10 +1294,10 @@ impl Buffer { } new_ropes.push_fragment(&fragment, fragment_was_visible); - new_fragments.push(fragment, &()); + new_fragments.push(fragment, &None); } - fragments_cursor.next(&()); + fragments_cursor.next(&None); } if let Some(new_text) = new_text { @@ -1304,11 +1309,11 @@ impl Buffer { lamport_timestamp, ); new_ropes.push_str(new_text); - new_fragments.push(fragment, &()); + new_fragments.push(fragment, &None); } let (visible_text, deleted_text) = new_ropes.finish(); - new_fragments.push_tree(fragments_cursor.suffix(&()), &()); + new_fragments.push_tree(fragments_cursor.suffix(&None), &None); self.fragments = new_fragments; self.visible_text = visible_text; @@ -1409,7 +1414,7 @@ impl Buffer { let first_split_id = insertion_splits.next().unwrap(); new_fragments = - fragments_cursor.slice(&FragmentIdRef::new(first_split_id), SeekBias::Left, &()); + fragments_cursor.slice(&FragmentIdRef::new(first_split_id), SeekBias::Left, &None); new_ropes.push_tree(new_fragments.summary().text); loop { @@ -1419,14 +1424,17 @@ impl Buffer { fragment.max_undos.observe(undo.id); new_ropes.push_fragment(&fragment, was_visible); - new_fragments.push(fragment.clone(), &()); + new_fragments.push(fragment.clone(), &None); - fragments_cursor.next(&()); + fragments_cursor.next(&None); if let Some(split_id) = insertion_splits.next() { - let slice = - fragments_cursor.slice(&FragmentIdRef::new(split_id), SeekBias::Left, &()); + let slice = fragments_cursor.slice( + &FragmentIdRef::new(split_id), + SeekBias::Left, + &None, + ); new_ropes.push_tree(slice.summary().text); - new_fragments.push_tree(slice, &()); + new_fragments.push_tree(slice, &None); } else { break; } @@ -1435,7 +1443,7 @@ impl Buffer { new_fragments = fragments_cursor.slice( &FragmentIdRef::new(&start_fragment_id), SeekBias::Left, - &(), + &None, ); new_ropes.push_tree(new_fragments.summary().text); @@ -1453,13 +1461,13 @@ impl Buffer { } new_ropes.push_fragment(&fragment, fragment_was_visible); - new_fragments.push(fragment, &()); - fragments_cursor.next(&()); + new_fragments.push(fragment, &None); + fragments_cursor.next(&None); } } } - new_fragments.push_tree(fragments_cursor.suffix(&()), &()); + new_fragments.push_tree(fragments_cursor.suffix(&None), &None); let (visible_text, deleted_text) = new_ropes.finish(); drop(fragments_cursor); @@ -1551,7 +1559,7 @@ impl Buffer { let mut fragments_cursor = old_fragments.cursor::(); let mut new_fragments = - fragments_cursor.slice(&cur_range.as_ref().unwrap().start, SeekBias::Right, &()); + fragments_cursor.slice(&cur_range.as_ref().unwrap().start, SeekBias::Right, &None); let mut new_ropes = RopeBuilder::new(old_visible_text.cursor(0), old_deleted_text.cursor(0)); @@ -1595,7 +1603,7 @@ impl Buffer { fragment.range_in_insertion.start = prefix.range_in_insertion.end; new_ropes.push_fragment(&prefix, prefix.visible); - new_fragments.push(prefix.clone(), &()); + new_fragments.push(prefix.clone(), &None); new_split_tree.push( InsertionSplit { extent: prefix.range_in_insertion.end - prefix.range_in_insertion.start, @@ -1628,7 +1636,7 @@ impl Buffer { ); new_ropes.push_str(&new_text); - new_fragments.push(new_fragment, &()); + new_fragments.push(new_fragment, &None); } } @@ -1639,14 +1647,14 @@ impl Buffer { prefix.range_in_insertion.start + (range.end - fragment_start); prefix.id = FragmentId::between(&new_fragments.last().unwrap().id, &fragment.id); - version_in_range.observe_all(&fragment_summary.max_version); + version_in_range.join(&fragment_summary.max_version); if prefix.visible { prefix.deletions.insert(local_timestamp); prefix.visible = false; } fragment.range_in_insertion.start = prefix.range_in_insertion.end; new_ropes.push_fragment(&prefix, fragment_was_visible); - new_fragments.push(prefix.clone(), &()); + new_fragments.push(prefix.clone(), &None); new_split_tree.push( InsertionSplit { extent: prefix.range_in_insertion.end @@ -1660,7 +1668,7 @@ impl Buffer { end_offset = Some(fragment.range_in_insertion.start); } } else { - version_in_range.observe_all(&fragment_summary.max_version); + version_in_range.join(&fragment_summary.max_version); if fragment.visible { fragment.deletions.insert(local_timestamp); fragment.visible = false; @@ -1714,10 +1722,10 @@ impl Buffer { .insert(fragment.insertion.id, new_split_tree); new_ropes.push_fragment(&fragment, fragment_was_visible); - new_fragments.push(fragment, &()); + new_fragments.push(fragment, &None); // Scan forward until we find a fragment that is not fully contained by the current splice. - fragments_cursor.next(&()); + fragments_cursor.next(&None); if let Some(range) = cur_range.clone() { while let Some(fragment) = fragments_cursor.item() { let fragment_summary = fragments_cursor.item_summary().unwrap(); @@ -1726,15 +1734,15 @@ impl Buffer { fragment_end = fragment_start + fragment.visible_len(); if range.start < fragment_start && range.end >= fragment_end { let mut new_fragment = fragment.clone(); - version_in_range.observe_all(&fragment_summary.max_version); + version_in_range.join(&fragment_summary.max_version); if new_fragment.visible { new_fragment.deletions.insert(local_timestamp); new_fragment.visible = false; } new_ropes.push_fragment(&new_fragment, fragment_was_visible); - new_fragments.push(new_fragment, &()); - fragments_cursor.next(&()); + new_fragments.push(new_fragment, &None); + fragments_cursor.next(&None); if range.end == fragment_end { end_id = Some(fragment.insertion.id); @@ -1777,10 +1785,10 @@ impl Buffer { let slice = fragments_cursor.slice( &cur_range.as_ref().unwrap().start, SeekBias::Right, - &(), + &None, ); new_ropes.push_tree(slice.summary().text); - new_fragments.push_tree(slice, &()); + new_fragments.push_tree(slice, &None); } } } @@ -1814,11 +1822,11 @@ impl Buffer { ); new_ropes.push_str(&new_text); - new_fragments.push(new_fragment, &()); + new_fragments.push(new_fragment, &None); } } - new_fragments.push_tree(fragments_cursor.suffix(&()), &()); + new_fragments.push_tree(fragments_cursor.suffix(&None), &None); let (visible_text, deleted_text) = new_ropes.finish(); self.fragments = new_fragments; @@ -1982,8 +1990,10 @@ impl Buffer { } else if offset == max_offset && bias == AnchorBias::Right { Anchor::End } else { + let mut cursor = self.fragments.cursor::(); + cursor.seek(&offset, bias.to_seek_bias(), &None); Anchor::Middle { - offset, + offset: offset + cursor.start().deleted, bias, version: self.version(), } @@ -1991,6 +2001,60 @@ impl Buffer { } fn summary_for_anchor(&self, anchor: &Anchor) -> TextSummary { + #[derive(Copy, Clone, Debug, Eq, PartialEq)] + enum VersionedOffset { + Offset(usize), + InvalidVersion, + } + + impl VersionedOffset { + fn offset(&self) -> usize { + if let Self::Offset(offset) = self { + *offset + } else { + panic!("invalid version") + } + } + } + + impl Default for VersionedOffset { + fn default() -> Self { + Self::Offset(0) + } + } + + impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedOffset { + fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option) { + if let Self::Offset(offset) = self { + let version = cx.as_ref().unwrap(); + if *version >= summary.max_insertion_version { + *offset += summary.text.visible + summary.text.deleted; + } else if *version < summary.min_insertion_version { + // Every insertion in this subtree is causally after the context's version. + } else { + *self = VersionedOffset::InvalidVersion; + } + } + } + } + + impl<'a> sum_tree::SeekDimension<'a, FragmentSummary> for VersionedOffset { + fn cmp(&self, other: &Self, _: &Option) -> cmp::Ordering { + match (self, other) { + (Self::Offset(a), Self::Offset(b)) => Ord::cmp(a, b), + (Self::Offset(_), Self::InvalidVersion) => cmp::Ordering::Less, + (Self::InvalidVersion, _) => unreachable!(), + } + } + } + + impl<'a> sum_tree::Dimension<'a, FragmentSummary> for (VersionedOffset, usize) { + fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option) { + self.0.add_summary(summary, cx); + self.1 += summary.text.visible; + } + } + match anchor { Anchor::Start => TextSummary::default(), Anchor::End => self.text_summary(), @@ -2001,39 +2065,20 @@ impl Buffer { } => { let mut cursor = self .fragments - .filter::<_, usize>(|summary| !(*version >= summary.max_version), &()); - - let mut old_offset = 0; - let mut new_offset = 0; - while let Some(fragment) = cursor.item() { - let bytes_since_last_fragment = *cursor.start() - new_offset; - let comparison = offset.cmp(&(old_offset + bytes_since_last_fragment)); - if comparison == cmp::Ordering::Greater - || (comparison == cmp::Ordering::Equal && *bias == AnchorBias::Right) - { - old_offset += bytes_since_last_fragment; - new_offset += bytes_since_last_fragment; - - if fragment.was_visible(version, &self.undo_map) { - let comparison = offset.cmp(&(old_offset + fragment.visible_len())); - if comparison == cmp::Ordering::Greater - || (comparison == cmp::Ordering::Equal - && *bias == AnchorBias::Right) - { - old_offset += fragment.len(); - } else { - break; - } - } - new_offset += fragment.visible_len(); - cursor.next(&()); - } else { - break; - } - } + .cursor::(); + cursor.seek( + &VersionedOffset::Offset(*offset), + bias.to_seek_bias(), + &Some(version.clone()), + ); + let fragment = cursor.item().unwrap(); + let overshoot = if fragment.visible { + offset - cursor.start().0.offset() + } else { + 0 + }; - let ix = new_offset + offset.saturating_sub(old_offset); - self.text_summary_for_range(0..ix) + self.text_summary_for_range(0..cursor.start().1 + overshoot) } } } @@ -2262,7 +2307,7 @@ impl<'a, F: Fn(&FragmentSummary) -> bool> Iterator for Edits<'a, F> { } } - self.cursor.next(&()); + self.cursor.next(&None); } change @@ -2446,7 +2491,7 @@ impl<'a> FragmentIdRef<'a> { } impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentIdRef<'a> { - fn add_summary(&mut self, summary: &'a FragmentSummary, _: &()) { + fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option) { self.0 = Some(&summary.max_fragment_id) } } @@ -2497,8 +2542,11 @@ impl sum_tree::Item for Fragment { for deletion in &self.deletions { max_version.observe(*deletion); } - max_version.observe_all(&self.max_undos); + max_version.join(&self.max_undos); + let mut min_insertion_version = time::Global::new(); + min_insertion_version.observe(self.insertion.id); + let max_insertion_version = min_insertion_version.clone(); if self.visible { FragmentSummary { text: FragmentTextSummary { @@ -2507,6 +2555,8 @@ impl sum_tree::Item for Fragment { }, max_fragment_id: self.id.clone(), max_version, + min_insertion_version, + max_insertion_version, } } else { FragmentSummary { @@ -2516,20 +2566,26 @@ impl sum_tree::Item for Fragment { }, max_fragment_id: self.id.clone(), max_version, + min_insertion_version, + max_insertion_version, } } } } impl sum_tree::Summary for FragmentSummary { - type Context = (); + type Context = Option; fn add_summary(&mut self, other: &Self, _: &Self::Context) { self.text.visible += &other.text.visible; self.text.deleted += &other.text.deleted; debug_assert!(self.max_fragment_id <= other.max_fragment_id); self.max_fragment_id = other.max_fragment_id.clone(); - self.max_version.observe_all(&other.max_version); + self.max_version.join(&other.max_version); + self.min_insertion_version + .meet(&other.min_insertion_version); + self.max_insertion_version + .join(&other.max_insertion_version); } } @@ -2539,12 +2595,14 @@ impl Default for FragmentSummary { text: FragmentTextSummary::default(), max_fragment_id: FragmentId::min_value().clone(), max_version: time::Global::new(), + min_insertion_version: time::Global::new(), + max_insertion_version: time::Global::new(), } } } impl<'a> sum_tree::Dimension<'a, FragmentSummary> for usize { - fn add_summary(&mut self, summary: &FragmentSummary, _: &()) { + fn add_summary(&mut self, summary: &FragmentSummary, _: &Option) { *self += summary.text.visible; } } diff --git a/zed/src/editor/buffer/anchor.rs b/zed/src/editor/buffer/anchor.rs index 607c5d4aa884e8676fc1a8fe11dda553e4b63146..eeacaf112464164f61f107f8067b9ac8330f6cb2 100644 --- a/zed/src/editor/buffer/anchor.rs +++ b/zed/src/editor/buffer/anchor.rs @@ -1,5 +1,5 @@ use super::{Buffer, ToOffset}; -use crate::time; +use crate::{sum_tree, time}; use anyhow::Result; use std::{cmp::Ordering, ops::Range}; @@ -14,12 +14,21 @@ pub enum Anchor { }, } -#[derive(Clone, Eq, PartialEq, Debug, Hash)] +#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)] pub enum AnchorBias { Left, Right, } +impl AnchorBias { + pub fn to_seek_bias(self) -> sum_tree::SeekBias { + match self { + AnchorBias::Left => sum_tree::SeekBias::Left, + AnchorBias::Right => sum_tree::SeekBias::Right, + } + } +} + impl PartialOrd for AnchorBias { fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) diff --git a/zed/src/sum_tree/cursor.rs b/zed/src/sum_tree/cursor.rs index e5515e45c24d7286e530a0d18f0949a72309a961..7016a79c9cb285f415ce8b52b564868be7e46671 100644 --- a/zed/src/sum_tree/cursor.rs +++ b/zed/src/sum_tree/cursor.rs @@ -415,220 +415,108 @@ where D: Dimension<'a, T::Summary>, { debug_assert!(target.cmp(&self.seek_dimension, cx) >= Ordering::Equal); - let mut containing_subtree = None; - if self.did_seek { - 'outer: while let Some(entry) = self.stack.last_mut() { - { - match *entry.tree.0 { - Node::Internal { - ref child_summaries, - ref child_trees, - .. - } => { - entry.index += 1; - for (child_tree, child_summary) in child_trees[entry.index..] - .iter() - .zip(&child_summaries[entry.index..]) - { - let mut child_end = self.seek_dimension.clone(); - child_end.add_summary(&child_summary, cx); - - let comparison = target.cmp(&child_end, cx); - if comparison == Ordering::Greater - || (comparison == Ordering::Equal && bias == SeekBias::Right) - { - self.seek_dimension = child_end; - self.sum_dimension.add_summary(child_summary, cx); - match aggregate { - SeekAggregate::None => {} - SeekAggregate::Slice(slice) => { - slice.push_tree(child_tree.clone(), cx); - } - SeekAggregate::Summary(summary) => { - summary.add_summary(child_summary, cx); - } - } - entry.index += 1; - } else { - containing_subtree = Some(child_tree); - break 'outer; - } - } - } - Node::Leaf { - ref items, - ref item_summaries, - .. - } => { - let mut slice_items = ArrayVec::<[T; 2 * TREE_BASE]>::new(); - let mut slice_item_summaries = - ArrayVec::<[T::Summary; 2 * TREE_BASE]>::new(); - let mut slice_items_summary = match aggregate { - SeekAggregate::Slice(_) => Some(T::Summary::default()), - _ => None, - }; - - for (item, item_summary) in items[entry.index..] - .iter() - .zip(&item_summaries[entry.index..]) - { - let mut child_end = self.seek_dimension.clone(); - child_end.add_summary(item_summary, cx); - - let comparison = target.cmp(&child_end, cx); - if comparison == Ordering::Greater - || (comparison == Ordering::Equal && bias == SeekBias::Right) - { - self.seek_dimension = child_end; - self.sum_dimension.add_summary(item_summary, cx); - match aggregate { - SeekAggregate::None => {} - SeekAggregate::Slice(_) => { - slice_items.push(item.clone()); - slice_item_summaries.push(item_summary.clone()); - slice_items_summary - .as_mut() - .unwrap() - .add_summary(item_summary, cx); - } - SeekAggregate::Summary(summary) => { - summary.add_summary(item_summary, cx); - } - } - entry.index += 1; - } else { - if let SeekAggregate::Slice(slice) = aggregate { - slice.push_tree( - SumTree(Arc::new(Node::Leaf { - summary: slice_items_summary.unwrap(), - items: slice_items, - item_summaries: slice_item_summaries, - })), - cx, - ); - } - break 'outer; - } - } + if !self.did_seek { + self.did_seek = true; + self.stack.push(StackEntry { + tree: self.tree, + index: 0, + seek_dimension: Default::default(), + sum_dimension: Default::default(), + }); + } - if let SeekAggregate::Slice(slice) = aggregate { - if !slice_items.is_empty() { - slice.push_tree( - SumTree(Arc::new(Node::Leaf { - summary: slice_items_summary.unwrap(), - items: slice_items, - item_summaries: slice_item_summaries, - })), - cx, - ); - } - } - } + let mut ascending = false; + 'outer: while let Some(entry) = self.stack.last_mut() { + match *entry.tree.0 { + Node::Internal { + ref child_summaries, + ref child_trees, + .. + } => { + if ascending { + entry.index += 1; } - } - self.stack.pop(); - } - } else { - self.did_seek = true; - containing_subtree = Some(self.tree); - } + for (child_tree, child_summary) in child_trees[entry.index..] + .iter() + .zip(&child_summaries[entry.index..]) + { + let mut child_end = self.seek_dimension.clone(); + child_end.add_summary(&child_summary, cx); - if let Some(mut subtree) = containing_subtree { - loop { - let mut next_subtree = None; - match *subtree.0 { - Node::Internal { - ref child_summaries, - ref child_trees, - .. - } => { - for (index, (child_tree, child_summary)) in - child_trees.iter().zip(child_summaries).enumerate() + let comparison = target.cmp(&child_end, cx); + if comparison == Ordering::Greater + || (comparison == Ordering::Equal && bias == SeekBias::Right) { - let mut child_end = self.seek_dimension.clone(); - child_end.add_summary(child_summary, cx); - - let comparison = target.cmp(&child_end, cx); - if comparison == Ordering::Greater - || (comparison == Ordering::Equal && bias == SeekBias::Right) - { - self.seek_dimension = child_end; - self.sum_dimension.add_summary(child_summary, cx); - match aggregate { - SeekAggregate::None => {} - SeekAggregate::Slice(slice) => { - slice.push_tree(child_trees[index].clone(), cx); - } - SeekAggregate::Summary(summary) => { - summary.add_summary(child_summary, cx); - } + self.seek_dimension = child_end; + self.sum_dimension.add_summary(child_summary, cx); + match aggregate { + SeekAggregate::None => {} + SeekAggregate::Slice(slice) => { + slice.push_tree(child_tree.clone(), cx); + } + SeekAggregate::Summary(summary) => { + summary.add_summary(child_summary, cx); } - } else { - self.stack.push(StackEntry { - tree: subtree, - index, - seek_dimension: self.seek_dimension.clone(), - sum_dimension: self.sum_dimension.clone(), - }); - next_subtree = Some(child_tree); - break; } + entry.index += 1; + entry.seek_dimension = self.seek_dimension.clone(); + entry.sum_dimension = self.sum_dimension.clone(); + } else { + self.stack.push(StackEntry { + tree: child_tree, + index: 0, + seek_dimension: self.seek_dimension.clone(), + sum_dimension: self.sum_dimension.clone(), + }); + ascending = false; + continue 'outer; } } - Node::Leaf { - ref items, - ref item_summaries, - .. - } => { - let mut slice_items = ArrayVec::<[T; 2 * TREE_BASE]>::new(); - let mut slice_item_summaries = - ArrayVec::<[T::Summary; 2 * TREE_BASE]>::new(); - let mut slice_items_summary = match aggregate { - SeekAggregate::Slice(_) => Some(T::Summary::default()), - _ => None, - }; - - for (index, (item, item_summary)) in - items.iter().zip(item_summaries).enumerate() + } + Node::Leaf { + ref items, + ref item_summaries, + .. + } => { + let mut slice_items = ArrayVec::<[T; 2 * TREE_BASE]>::new(); + let mut slice_item_summaries = ArrayVec::<[T::Summary; 2 * TREE_BASE]>::new(); + let mut slice_items_summary = match aggregate { + SeekAggregate::Slice(_) => Some(T::Summary::default()), + _ => None, + }; + + for (item, item_summary) in items[entry.index..] + .iter() + .zip(&item_summaries[entry.index..]) + { + let mut child_end = self.seek_dimension.clone(); + child_end.add_summary(item_summary, cx); + + let comparison = target.cmp(&child_end, cx); + if comparison == Ordering::Greater + || (comparison == Ordering::Equal && bias == SeekBias::Right) { - let mut child_end = self.seek_dimension.clone(); - child_end.add_summary(item_summary, cx); - - let comparison = target.cmp(&child_end, cx); - if comparison == Ordering::Greater - || (comparison == Ordering::Equal && bias == SeekBias::Right) - { - self.seek_dimension = child_end; - self.sum_dimension.add_summary(item_summary, cx); - match aggregate { - SeekAggregate::None => {} - SeekAggregate::Slice(_) => { - slice_items.push(item.clone()); - slice_items_summary - .as_mut() - .unwrap() - .add_summary(item_summary, cx); - slice_item_summaries.push(item_summary.clone()); - } - SeekAggregate::Summary(summary) => { - summary.add_summary(item_summary, cx); - } + self.seek_dimension = child_end; + self.sum_dimension.add_summary(item_summary, cx); + match aggregate { + SeekAggregate::None => {} + SeekAggregate::Slice(_) => { + slice_items.push(item.clone()); + slice_item_summaries.push(item_summary.clone()); + slice_items_summary + .as_mut() + .unwrap() + .add_summary(item_summary, cx); + } + SeekAggregate::Summary(summary) => { + summary.add_summary(item_summary, cx); } - } else { - self.stack.push(StackEntry { - tree: subtree, - index, - seek_dimension: self.seek_dimension.clone(), - sum_dimension: self.sum_dimension.clone(), - }); - break; } - } - - if let SeekAggregate::Slice(slice) = aggregate { - if !slice_items.is_empty() { + entry.index += 1; + } else { + if let SeekAggregate::Slice(slice) = aggregate { slice.push_tree( SumTree(Arc::new(Node::Leaf { summary: slice_items_summary.unwrap(), @@ -638,16 +526,27 @@ where cx, ); } + break 'outer; } } - }; - if let Some(next_subtree) = next_subtree { - subtree = next_subtree; - } else { - break; + if let SeekAggregate::Slice(slice) = aggregate { + if !slice_items.is_empty() { + slice.push_tree( + SumTree(Arc::new(Node::Leaf { + summary: slice_items_summary.unwrap(), + items: slice_items, + item_summaries: slice_item_summaries, + })), + cx, + ); + } + } } } + + self.stack.pop(); + ascending = true; } self.at_end = self.stack.is_empty(); diff --git a/zed/src/time.rs b/zed/src/time.rs index 8668ebfe9cfaed30a66dca3a22aec8ba18e01891..fdcc58b1fb66a72eb4b928e46f75ed5046ab83c3 100644 --- a/zed/src/time.rs +++ b/zed/src/time.rs @@ -81,12 +81,26 @@ impl Global { } } - pub fn observe_all(&mut self, other: &Self) { + pub fn join(&mut self, other: &Self) { for timestamp in other.0.iter() { self.observe(*timestamp); } } + pub fn meet(&mut self, other: &Self) { + for timestamp in other.0.iter() { + if let Some(entry) = self + .0 + .iter_mut() + .find(|t| t.replica_id == timestamp.replica_id) + { + entry.value = cmp::min(entry.value, timestamp.value); + } else { + self.0.push(*timestamp); + } + } + } + pub fn observed(&self, timestamp: Local) -> bool { self.get(timestamp.replica_id) >= timestamp.value } From 56efe305584f6a7db87eb11bef43532feef8b7cf Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 1 Jun 2021 16:54:02 +0200 Subject: [PATCH 04/40] Fix randomized tests for concurrent edits --- zed/src/editor/buffer.rs | 41 +++++++++++++++++++++++++++++----------- zed/src/time.rs | 5 +++++ 2 files changed, 35 insertions(+), 11 deletions(-) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index 533ab0cfbe3047987016de1ef404eac7082eb6b1..fefeb40d5bc9b87ee5fa53f726183a2961961fe7 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -2029,11 +2029,15 @@ impl Buffer { let version = cx.as_ref().unwrap(); if *version >= summary.max_insertion_version { *offset += summary.text.visible + summary.text.deleted; - } else if *version < summary.min_insertion_version { - // Every insertion in this subtree is causally after the context's version. - } else { - *self = VersionedOffset::InvalidVersion; + } else if !summary + .min_insertion_version + .iter() + .all(|t| !version.observed(*t)) + { + *self = Self::InvalidVersion; } + } else { + unreachable!(); } } } @@ -2728,7 +2732,7 @@ mod tests { use std::{ cell::RefCell, cmp::Ordering, - fs, + env, fs, rc::Rc, sync::atomic::{self, AtomicUsize}, }; @@ -3503,10 +3507,24 @@ mod tests { fn test_random_concurrent_edits(cx: &mut gpui::MutableAppContext) { use crate::test::Network; - const PEERS: usize = 5; + let peers = env::var("PEERS") + .map(|i| i.parse().expect("invalid `PEERS` variable")) + .unwrap_or(5); + let iterations = env::var("ITERATIONS") + .map(|i| i.parse().expect("invalid `ITERATIONS` variable")) + .unwrap_or(100); + let operations = env::var("OPERATIONS") + .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) + .unwrap_or(10); + let seed_range = if let Ok(seed) = env::var("SEED") { + let seed = seed.parse().expect("invalid `SEED` variable"); + seed..seed + 1 + } else { + 0..iterations + }; - for seed in 0..100 { - println!("{:?}", seed); + for seed in seed_range { + dbg!(seed); let mut rng = &mut StdRng::seed_from_u64(seed); let base_text_len = rng.gen_range(0..10); @@ -3516,16 +3534,16 @@ mod tests { let mut replica_ids = Vec::new(); let mut buffers = Vec::new(); let mut network = Network::new(); - for i in 0..PEERS { + for i in 0..peers { let buffer = cx.add_model(|cx| Buffer::new(i as ReplicaId, base_text.as_str(), cx)); buffers.push(buffer); replica_ids.push(i as u16); network.add_peer(i as u16); } - let mut mutation_count = 10; + let mut mutation_count = operations; loop { - let replica_index = rng.gen_range(0..PEERS); + let replica_index = rng.gen_range(0..peers); let replica_id = replica_ids[replica_index]; buffers[replica_index].update(cx, |buffer, _| match rng.gen_range(0..=100) { 0..=50 if mutation_count != 0 => { @@ -3801,6 +3819,7 @@ mod tests { // Randomly edit let (old_ranges, new_text, mut operations) = self.randomly_edit(rng, 5, cx.as_deref_mut()); + log::info!("Mutating buffer at {:?}: {:?}", old_ranges, new_text); // Randomly add, remove or mutate selection sets. let replica_selection_sets = &self diff --git a/zed/src/time.rs b/zed/src/time.rs index fdcc58b1fb66a72eb4b928e46f75ed5046ab83c3..bcc55336535407fdddbdef0dca69a2727a0ec3de 100644 --- a/zed/src/time.rs +++ b/zed/src/time.rs @@ -1,6 +1,7 @@ use smallvec::SmallVec; use std::cmp::{self, Ordering}; use std::ops::{Add, AddAssign}; +use std::slice; pub type ReplicaId = u16; pub type Seq = u32; @@ -108,6 +109,10 @@ impl Global { pub fn changed_since(&self, other: &Self) -> bool { self.0.iter().any(|t| t.value > other.get(t.replica_id)) } + + pub fn iter(&self) -> slice::Iter { + self.0.iter() + } } impl PartialOrd for Global { From b3f13ce325a6022f5681503d47caf40cd24b8e85 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 1 Jun 2021 16:57:03 +0200 Subject: [PATCH 05/40] Pull up `VersionedOffset` --- zed/src/editor/buffer.rs | 116 +++++++++++++++++++-------------------- 1 file changed, 58 insertions(+), 58 deletions(-) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index fefeb40d5bc9b87ee5fa53f726183a2961961fe7..8d4ada41c00a521ac5839abe9489f831acdf514b 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -2001,64 +2001,6 @@ impl Buffer { } fn summary_for_anchor(&self, anchor: &Anchor) -> TextSummary { - #[derive(Copy, Clone, Debug, Eq, PartialEq)] - enum VersionedOffset { - Offset(usize), - InvalidVersion, - } - - impl VersionedOffset { - fn offset(&self) -> usize { - if let Self::Offset(offset) = self { - *offset - } else { - panic!("invalid version") - } - } - } - - impl Default for VersionedOffset { - fn default() -> Self { - Self::Offset(0) - } - } - - impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedOffset { - fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option) { - if let Self::Offset(offset) = self { - let version = cx.as_ref().unwrap(); - if *version >= summary.max_insertion_version { - *offset += summary.text.visible + summary.text.deleted; - } else if !summary - .min_insertion_version - .iter() - .all(|t| !version.observed(*t)) - { - *self = Self::InvalidVersion; - } - } else { - unreachable!(); - } - } - } - - impl<'a> sum_tree::SeekDimension<'a, FragmentSummary> for VersionedOffset { - fn cmp(&self, other: &Self, _: &Option) -> cmp::Ordering { - match (self, other) { - (Self::Offset(a), Self::Offset(b)) => Ord::cmp(a, b), - (Self::Offset(_), Self::InvalidVersion) => cmp::Ordering::Less, - (Self::InvalidVersion, _) => unreachable!(), - } - } - } - - impl<'a> sum_tree::Dimension<'a, FragmentSummary> for (VersionedOffset, usize) { - fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option) { - self.0.add_summary(summary, cx); - self.1 += summary.text.visible; - } - } - match anchor { Anchor::Start => TextSummary::default(), Anchor::End => self.text_summary(), @@ -2641,6 +2583,64 @@ impl<'a> sum_tree::Dimension<'a, InsertionSplitSummary> for usize { } } +#[derive(Copy, Clone, Debug, Eq, PartialEq)] +enum VersionedOffset { + Offset(usize), + InvalidVersion, +} + +impl VersionedOffset { + fn offset(&self) -> usize { + if let Self::Offset(offset) = self { + *offset + } else { + panic!("invalid version") + } + } +} + +impl Default for VersionedOffset { + fn default() -> Self { + Self::Offset(0) + } +} + +impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedOffset { + fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option) { + if let Self::Offset(offset) = self { + let version = cx.as_ref().unwrap(); + if *version >= summary.max_insertion_version { + *offset += summary.text.visible + summary.text.deleted; + } else if !summary + .min_insertion_version + .iter() + .all(|t| !version.observed(*t)) + { + *self = Self::InvalidVersion; + } + } else { + unreachable!(); + } + } +} + +impl<'a> sum_tree::SeekDimension<'a, FragmentSummary> for VersionedOffset { + fn cmp(&self, other: &Self, _: &Option) -> cmp::Ordering { + match (self, other) { + (Self::Offset(a), Self::Offset(b)) => Ord::cmp(a, b), + (Self::Offset(_), Self::InvalidVersion) => cmp::Ordering::Less, + (Self::InvalidVersion, _) => unreachable!(), + } + } +} + +impl<'a> sum_tree::Dimension<'a, FragmentSummary> for (VersionedOffset, usize) { + fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option) { + self.0.add_summary(summary, cx); + self.1 += summary.text.visible; + } +} + impl Operation { fn replica_id(&self) -> ReplicaId { self.lamport_timestamp().replica_id From df13cf0a90a923d793c2c06ab5468882096fbb76 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Tue, 1 Jun 2021 17:54:55 +0200 Subject: [PATCH 06/40] WIP: Compare anchors without using FragmentId --- zed/src/editor/buffer.rs | 34 +++++++++++++++++++++++++++++++++ zed/src/editor/buffer/anchor.rs | 24 ++++++++++++++--------- 2 files changed, 49 insertions(+), 9 deletions(-) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index 8d4ada41c00a521ac5839abe9489f831acdf514b..c0a1df49e72c112d84f581e9b950a536f9602ff8 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -353,6 +353,7 @@ pub struct FragmentSummary { max_version: time::Global, min_insertion_version: time::Global, max_insertion_version: time::Global, + count: usize, } #[derive(Default, Clone, Debug, PartialEq, Eq)] @@ -2029,6 +2030,26 @@ impl Buffer { } } + fn fragment_ix_for_anchor(&self, anchor: &Anchor) -> usize { + match anchor { + Anchor::Start => 0, + Anchor::End => self.fragments.extent::(&None).0, + Anchor::Middle { + offset, + bias, + version, + } => { + let mut cursor = self.fragments.cursor::(); + cursor.seek( + &VersionedOffset::Offset(*offset), + bias.to_seek_bias(), + &Some(version.clone()), + ); + cursor.start().0 + } + } + } + pub fn point_for_offset(&self, offset: usize) -> Result { if offset <= self.len() { Ok(self.text_summary_for_range(0..offset).lines) @@ -2503,6 +2524,7 @@ impl sum_tree::Item for Fragment { max_version, min_insertion_version, max_insertion_version, + count: 1, } } else { FragmentSummary { @@ -2514,6 +2536,7 @@ impl sum_tree::Item for Fragment { max_version, min_insertion_version, max_insertion_version, + count: 1, } } } @@ -2532,6 +2555,7 @@ impl sum_tree::Summary for FragmentSummary { .meet(&other.min_insertion_version); self.max_insertion_version .join(&other.max_insertion_version); + self.count += other.count; } } @@ -2543,6 +2567,7 @@ impl Default for FragmentSummary { max_version: time::Global::new(), min_insertion_version: time::Global::new(), max_insertion_version: time::Global::new(), + count: 0, } } } @@ -2641,6 +2666,15 @@ impl<'a> sum_tree::Dimension<'a, FragmentSummary> for (VersionedOffset, usize) { } } +#[derive(Copy, Clone, Default, Debug, Eq, PartialEq, Ord, PartialOrd)] +struct FragmentCount(usize); + +impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentCount { + fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option) { + self.0 += summary.count; + } +} + impl Operation { fn replica_id(&self) -> ReplicaId { self.lamport_timestamp().replica_id diff --git a/zed/src/editor/buffer/anchor.rs b/zed/src/editor/buffer/anchor.rs index eeacaf112464164f61f107f8067b9ac8330f6cb2..55aee9a82167e28047039e742b22bb69530396fb 100644 --- a/zed/src/editor/buffer/anchor.rs +++ b/zed/src/editor/buffer/anchor.rs @@ -63,21 +63,27 @@ impl Anchor { Anchor::Middle { offset: self_offset, bias: self_bias, - version: self_version, + .. }, Anchor::Middle { offset: other_offset, bias: other_bias, - version: other_version, + .. }, ) => { - let offset_comparison = if self_version == other_version { - self_offset.cmp(other_offset) - } else { - self.to_offset(buffer).cmp(&other.to_offset(buffer)) - }; - - offset_comparison.then_with(|| self_bias.cmp(other_bias)) + dbg!( + self, + other, + self_offset, + other_offset, + buffer.fragment_ix_for_anchor(self), + buffer.fragment_ix_for_anchor(other) + ); + buffer + .fragment_ix_for_anchor(self) + .cmp(&buffer.fragment_ix_for_anchor(other)) + .then_with(|| self_offset.cmp(&other_offset)) + .then_with(|| self_bias.cmp(other_bias)) } }) } From e3c07942d5b7cb729ccb475f9d88b1690bccf47b Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 1 Jun 2021 11:52:42 -0700 Subject: [PATCH 07/40] Compare anchors via their fragment and their offset within it Co-Authored-By: Nathan Sobo Co-Authored-By: Antonio Scandurra --- zed/src/editor/buffer.rs | 23 ++++++++++++++++++----- zed/src/editor/buffer/anchor.rs | 29 +++++++---------------------- 2 files changed, 25 insertions(+), 27 deletions(-) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index c0a1df49e72c112d84f581e9b950a536f9602ff8..cffe2a5f9ba9e697d2446e073ff60977a339dc3c 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -2030,22 +2030,28 @@ impl Buffer { } } - fn fragment_ix_for_anchor(&self, anchor: &Anchor) -> usize { + fn fragment_ix_for_anchor(&self, anchor: &Anchor) -> (usize, usize) { match anchor { - Anchor::Start => 0, - Anchor::End => self.fragments.extent::(&None).0, + Anchor::Start => (0, 0), + Anchor::End => ( + self.fragments.extent::(&None).0, + self.fragments.last().map_or(0, |f| f.visible_len()), + ), Anchor::Middle { offset, bias, version, } => { - let mut cursor = self.fragments.cursor::(); + let mut cursor = self + .fragments + .cursor::(); cursor.seek( &VersionedOffset::Offset(*offset), bias.to_seek_bias(), &Some(version.clone()), ); - cursor.start().0 + let count = cursor.start().1; + (count.0, offset - cursor.start().0.offset()) } } } @@ -2666,6 +2672,13 @@ impl<'a> sum_tree::Dimension<'a, FragmentSummary> for (VersionedOffset, usize) { } } +impl<'a> sum_tree::Dimension<'a, FragmentSummary> for (VersionedOffset, FragmentCount) { + fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option) { + self.0.add_summary(summary, cx); + self.1 .0 += summary.count; + } +} + #[derive(Copy, Clone, Default, Debug, Eq, PartialEq, Ord, PartialOrd)] struct FragmentCount(usize); diff --git a/zed/src/editor/buffer/anchor.rs b/zed/src/editor/buffer/anchor.rs index 55aee9a82167e28047039e742b22bb69530396fb..518d4afe8f3b8bcb1c69e06bcd3984b827e8c703 100644 --- a/zed/src/editor/buffer/anchor.rs +++ b/zed/src/editor/buffer/anchor.rs @@ -1,4 +1,4 @@ -use super::{Buffer, ToOffset}; +use super::Buffer; use crate::{sum_tree, time}; use anyhow::Result; use std::{cmp::Ordering, ops::Range}; @@ -61,30 +61,15 @@ impl Anchor { (Anchor::End, _) | (_, Anchor::Start) => Ordering::Greater, ( Anchor::Middle { - offset: self_offset, - bias: self_bias, - .. + bias: self_bias, .. }, Anchor::Middle { - offset: other_offset, - bias: other_bias, - .. + bias: other_bias, .. }, - ) => { - dbg!( - self, - other, - self_offset, - other_offset, - buffer.fragment_ix_for_anchor(self), - buffer.fragment_ix_for_anchor(other) - ); - buffer - .fragment_ix_for_anchor(self) - .cmp(&buffer.fragment_ix_for_anchor(other)) - .then_with(|| self_offset.cmp(&other_offset)) - .then_with(|| self_bias.cmp(other_bias)) - } + ) => buffer + .fragment_ix_for_anchor(self) + .cmp(&buffer.fragment_ix_for_anchor(other)) + .then_with(|| self_bias.cmp(&other_bias)), }) } From dda9c6898b2c4c1e41e47e3ff5637c12c2b835dc Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 1 Jun 2021 12:28:04 -0700 Subject: [PATCH 08/40] Remove count field from FragmentSummary Sort anchors according to their 'full offset' (deleted + visible) --- zed/src/editor/buffer.rs | 33 +++++++++++++-------------------- zed/src/editor/buffer/anchor.rs | 4 ++-- 2 files changed, 15 insertions(+), 22 deletions(-) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index cffe2a5f9ba9e697d2446e073ff60977a339dc3c..7bf58d27a74cb9d096b987c25cef51486b266a29 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -353,7 +353,6 @@ pub struct FragmentSummary { max_version: time::Global, min_insertion_version: time::Global, max_insertion_version: time::Global, - count: usize, } #[derive(Default, Clone, Debug, PartialEq, Eq)] @@ -2030,13 +2029,10 @@ impl Buffer { } } - fn fragment_ix_for_anchor(&self, anchor: &Anchor) -> (usize, usize) { + fn full_offset_for_anchor(&self, anchor: &Anchor) -> usize { match anchor { - Anchor::Start => (0, 0), - Anchor::End => ( - self.fragments.extent::(&None).0, - self.fragments.last().map_or(0, |f| f.visible_len()), - ), + Anchor::Start => 0, + Anchor::End => self.fragments.extent::(&None).0, Anchor::Middle { offset, bias, @@ -2044,14 +2040,15 @@ impl Buffer { } => { let mut cursor = self .fragments - .cursor::(); + .cursor::(); cursor.seek( &VersionedOffset::Offset(*offset), bias.to_seek_bias(), &Some(version.clone()), ); - let count = cursor.start().1; - (count.0, offset - cursor.start().0.offset()) + let full_offset = cursor.start().1; + let visible_offset = cursor.start().0.offset(); + full_offset.0 + offset - visible_offset } } } @@ -2530,7 +2527,6 @@ impl sum_tree::Item for Fragment { max_version, min_insertion_version, max_insertion_version, - count: 1, } } else { FragmentSummary { @@ -2542,7 +2538,6 @@ impl sum_tree::Item for Fragment { max_version, min_insertion_version, max_insertion_version, - count: 1, } } } @@ -2561,7 +2556,6 @@ impl sum_tree::Summary for FragmentSummary { .meet(&other.min_insertion_version); self.max_insertion_version .join(&other.max_insertion_version); - self.count += other.count; } } @@ -2573,7 +2567,6 @@ impl Default for FragmentSummary { max_version: time::Global::new(), min_insertion_version: time::Global::new(), max_insertion_version: time::Global::new(), - count: 0, } } } @@ -2672,19 +2665,19 @@ impl<'a> sum_tree::Dimension<'a, FragmentSummary> for (VersionedOffset, usize) { } } -impl<'a> sum_tree::Dimension<'a, FragmentSummary> for (VersionedOffset, FragmentCount) { +impl<'a> sum_tree::Dimension<'a, FragmentSummary> for (VersionedOffset, FullOffset) { fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option) { self.0.add_summary(summary, cx); - self.1 .0 += summary.count; + self.1 .0 += summary.text.visible + summary.text.deleted; } } -#[derive(Copy, Clone, Default, Debug, Eq, PartialEq, Ord, PartialOrd)] -struct FragmentCount(usize); +#[derive(Clone, Copy, Debug, Default)] +struct FullOffset(usize); -impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentCount { +impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FullOffset { fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option) { - self.0 += summary.count; + self.0 += summary.text.visible + summary.text.deleted; } } diff --git a/zed/src/editor/buffer/anchor.rs b/zed/src/editor/buffer/anchor.rs index 518d4afe8f3b8bcb1c69e06bcd3984b827e8c703..1f44eb65f0fc81d17f6a5e08614660a768ffa269 100644 --- a/zed/src/editor/buffer/anchor.rs +++ b/zed/src/editor/buffer/anchor.rs @@ -67,8 +67,8 @@ impl Anchor { bias: other_bias, .. }, ) => buffer - .fragment_ix_for_anchor(self) - .cmp(&buffer.fragment_ix_for_anchor(other)) + .full_offset_for_anchor(self) + .cmp(&buffer.full_offset_for_anchor(other)) .then_with(|| self_bias.cmp(&other_bias)), }) } From 14b41279abe38c3541846e96b5b4b60809b840cf Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 1 Jun 2021 14:28:17 -0700 Subject: [PATCH 09/40] wip --- zed/src/editor/buffer.rs | 327 ++++++++++++++++++++++----------------- 1 file changed, 186 insertions(+), 141 deletions(-) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index 7bf58d27a74cb9d096b987c25cef51486b266a29..4ea0a84518ae34141b724fd6ea82bcd0d86f17ef 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -341,6 +341,7 @@ struct Fragment { id: FragmentId, insertion: Arc, range_in_insertion: Range, + len: usize, deletions: HashSet, max_undos: time::Global, visible: bool, @@ -399,11 +400,8 @@ pub enum Operation { #[derive(Clone, Debug, Eq, PartialEq)] pub struct EditOperation { id: time::Local, - start_id: time::Local, - start_offset: usize, - end_id: time::Local, - end_offset: usize, - version_in_range: time::Global, + version: time::Global, + ranges: Vec>, new_text: Option, } @@ -1130,12 +1128,9 @@ impl Buffer { } => { if !self.version.observed(edit.id) { self.apply_edit( - edit.start_id, - edit.start_offset, - edit.end_id, - edit.end_offset, + edit.version, + edit.ranges, edit.new_text.as_deref(), - &edit.version_in_range, edit.id, lamport_timestamp, )?; @@ -1172,148 +1167,73 @@ impl Buffer { fn apply_edit( &mut self, - start_id: time::Local, - start_offset: usize, - end_id: time::Local, - end_offset: usize, + version: time::Global, + ranges: Vec>, mut new_text: Option<&str>, - version_in_range: &time::Global, local_timestamp: time::Local, lamport_timestamp: time::Lamport, ) -> Result<()> { - let start_fragment_id = self.resolve_fragment_id(start_id, start_offset)?; - let end_fragment_id = self.resolve_fragment_id(end_id, end_offset)?; - - let mut old_visible_text = Rope::new(); - let mut old_deleted_text = Rope::new(); - let mut old_fragments = SumTree::new(); - mem::swap(&mut old_visible_text, &mut self.visible_text); - mem::swap(&mut old_deleted_text, &mut self.deleted_text); - mem::swap(&mut old_fragments, &mut self.fragments); + let mut old_visible_text = mem::take(&mut self.visible_text); + let mut old_deleted_text = mem::take(&mut self.deleted_text); + let mut old_fragments = mem::take(&mut self.fragments); + let mut old_fragments = old_fragments.cursor::(); - let mut fragments_cursor = old_fragments.cursor::(); + let version = Some(version); - let mut new_fragments = fragments_cursor.slice( - &FragmentIdRef::new(&start_fragment_id), - SeekBias::Left, - &None, - ); + let mut new_fragments = SumTree::new(); let mut new_ropes = RopeBuilder::new(old_visible_text.cursor(0), old_deleted_text.cursor(0)); - new_ropes.push_tree(new_fragments.summary().text); - - let start_fragment = fragments_cursor.item().unwrap(); - if start_offset == start_fragment.range_in_insertion.end { - let fragment = fragments_cursor.item().unwrap().clone(); - new_ropes.push_fragment(&fragment, fragment.visible); - new_fragments.push(fragment, &None); - fragments_cursor.next(&None); - } - - while let Some(fragment) = fragments_cursor.item() { - if new_text.is_none() && fragment.id > end_fragment_id { - break; - } - - let mut fragment = fragment.clone(); - - if fragment.id == start_fragment_id || fragment.id == end_fragment_id { - let split_start = if start_fragment_id == fragment.id { - start_offset - } else { - fragment.range_in_insertion.start - }; - let split_end = if end_fragment_id == fragment.id { - end_offset - } else { - fragment.range_in_insertion.end - }; - let (before_range, within_range, after_range) = self.split_fragment( - fragments_cursor.prev_item().as_ref().unwrap(), - &fragment, - split_start..split_end, - ); - let insertion = if let Some(new_text) = new_text { - let prev_fragment = fragments_cursor.prev_item(); - Some(self.build_fragment_to_insert( - before_range.as_ref().or(prev_fragment).unwrap(), - within_range.as_ref().or(after_range.as_ref()), - new_text, - local_timestamp, - lamport_timestamp, - )) - } else { - None - }; - if let Some(fragment) = before_range { - new_ropes.push_fragment(&fragment, fragment.visible); - new_fragments.push(fragment, &None); - } - if let Some(fragment) = insertion { - new_ropes.push_str(new_text.take().unwrap()); - new_fragments.push(fragment, &None); - } - if let Some(mut fragment) = within_range { - let fragment_was_visible = fragment.visible; - if fragment.was_visible(&version_in_range, &self.undo_map) { - fragment.deletions.insert(local_timestamp); - if fragment.visible { - fragment.visible = false; - } - } + let mut pending_fragment = None; + let mut pending_fragment_start_offset = 0; + + for range in ranges { + let preceding_fragments = old_fragments.slice( + &VersionedOffset::Offset(range.start), + SeekBias::Right, + &version, + ); + new_fragments.push_tree(preceding_fragments, &None); + new_ropes.push_tree(new_fragments.summary().text); - new_ropes.push_fragment(&fragment, fragment_was_visible); - new_fragments.push(fragment, &None); - } - if let Some(fragment) = after_range { - new_ropes.push_fragment(&fragment, fragment.visible); - new_fragments.push(fragment, &None); - } + let mut fragment_start_offset = old_fragments.start().offset(); + let mut fragment_end_offset = old_fragments.end(&version).offset(); + let mut fragment = if let Some(fragment) = old_fragments.item() { + fragment.clone() } else { - if new_text.is_some() && lamport_timestamp > fragment.insertion.lamport_timestamp { - let new_text = new_text.take().unwrap(); - let fragment = self.build_fragment_to_insert( - fragments_cursor.prev_item().as_ref().unwrap(), - Some(&fragment), - new_text, - local_timestamp, - lamport_timestamp, - ); - new_ropes.push_str(new_text); - new_fragments.push(fragment, &None); - } + todo!() + }; - let fragment_was_visible = fragment.visible; - if fragment.id < end_fragment_id - && fragment.was_visible(&version_in_range, &self.undo_map) - { - fragment.deletions.insert(local_timestamp); - if fragment.visible { - fragment.visible = false; - } - } + if fragment_start_offset < range.start { + let prefix_fragment = Fragment { + len: range.start - fragment_start_offset, + visible: fragment.visible, + deletions: fragment.deletions.clone(), + max_undos: fragment.max_undos.clone(), + + // TODO - remove + id: fragment.id.clone(), + insertion: fragment.insertion.clone(), + range_in_insertion: Default::default(), + }; - new_ropes.push_fragment(&fragment, fragment_was_visible); - new_fragments.push(fragment, &None); + new_ropes.push_fragment(&prefix_fragment, prefix_fragment.visible); + new_fragments.push(prefix_fragment, &None); + fragment.len -= prefix_fragment.len; } - fragments_cursor.next(&None); - } + let suffix_fragment = if fragment_end_offset > range.end { + fragment.visible = false; - if let Some(new_text) = new_text { - let fragment = self.build_fragment_to_insert( - fragments_cursor.prev_item().as_ref().unwrap(), - None, - new_text, - local_timestamp, - lamport_timestamp, - ); - new_ropes.push_str(new_text); - new_fragments.push(fragment, &None); + // + + Some(Fragment {}); + } else { + None + }; } let (visible_text, deleted_text) = new_ropes.finish(); - new_fragments.push_tree(fragments_cursor.suffix(&None), &None); + new_fragments.push_tree(old_fragments.suffix(&None), &None); self.fragments = new_fragments; self.visible_text = visible_text; @@ -1323,6 +1243,134 @@ impl Buffer { Ok(()) } + // let mut new_fragments = fragments_cursor.slice( + // &VersionedOffset::Offset(range.start), + // SeekBias::Left, + // &version_cx, + // ); + // new_ropes.push_tree(new_fragments.summary().text); + + // if range.start == fragments_cursor.end(&version_cx).offset() { + // let fragment = fragments_cursor.item().unwrap().clone(); + // new_ropes.push_fragment(&fragment, fragment.visible); + // new_fragments.push(fragment, &None); + // fragments_cursor.next(&None); + // } + + // while let Some(fragment) = fragments_cursor.item() { + // let fragment_start_offset = fragments_cursor.start().offset(); + // let fragment_end_offset = fragments_cursor.end(&version_cx).offset(); + + // if new_text.is_none() && fragment_start_offset > range.end { + // break; + // } + + // if fragment_start_offset + + // if cursor_start_offset < range.start || cursor_end_offset > range.end { + // let split_start = if start_fragment_id == fragment.id { + // start_offset + // } else { + // fragment.range_in_insertion.start + // }; + // let split_end = if end_fragment_id == fragment.id { + // end_offset + // } else { + // fragment.range_in_insertion.end + // }; + // let (before_range, within_range, after_range) = self.split_fragment( + // fragments_cursor.prev_item().as_ref().unwrap(), + // &fragment, + // split_start..split_end, + // ); + // let insertion = if let Some(new_text) = new_text { + // let prev_fragment = fragments_cursor.prev_item(); + // Some(self.build_fragment_to_insert( + // before_range.as_ref().or(prev_fragment).unwrap(), + // within_range.as_ref().or(after_range.as_ref()), + // new_text, + // local_timestamp, + // lamport_timestamp, + // )) + // } else { + // None + // }; + // if let Some(fragment) = before_range { + // new_ropes.push_fragment(&fragment, fragment.visible); + // new_fragments.push(fragment, &None); + // } + // if let Some(fragment) = insertion { + // new_ropes.push_str(new_text.take().unwrap()); + // new_fragments.push(fragment, &None); + // } + // if let Some(mut fragment) = within_range { + // let fragment_was_visible = fragment.visible; + // if fragment.was_visible(&version_in_range, &self.undo_map) { + // fragment.deletions.insert(local_timestamp); + // if fragment.visible { + // fragment.visible = false; + // } + // } + // new_ropes.push_fragment(&fragment, fragment_was_visible); + // new_fragments.push(fragment, &None); + // } + // if let Some(fragment) = after_range { + // new_ropes.push_fragment(&fragment, fragment.visible); + // new_fragments.push(fragment, &None); + // } + // } else { + // if new_text.is_some() && lamport_timestamp > fragment.insertion.lamport_timestamp { + // let new_text = new_text.take().unwrap(); + // let fragment = self.build_fragment_to_insert( + // fragments_cursor.prev_item().as_ref().unwrap(), + // Some(&fragment), + // new_text, + // local_timestamp, + // lamport_timestamp, + // ); + // new_ropes.push_str(new_text); + // new_fragments.push(fragment, &None); + // } + + // let fragment_was_visible = fragment.visible; + // if fragment.id < end_fragment_id + // && fragment.was_visible(&version_in_range, &self.undo_map) + // { + // fragment.deletions.insert(local_timestamp); + // if fragment.visible { + // fragment.visible = false; + // } + // } + + // new_ropes.push_fragment(&fragment, fragment_was_visible); + // new_fragments.push(fragment, &None); + // } + // fragments_cursor.next(&None); + // } + + // if let Some(new_text) = new_text { + // let fragment = self.build_fragment_to_insert( + // fragments_cursor.prev_item().as_ref().unwrap(), + // None, + // new_text, + // local_timestamp, + // lamport_timestamp, + // ); + // new_ropes.push_str(new_text); + // new_fragments.push(fragment, &None); + // } + + // let (visible_text, deleted_text) = new_ropes.finish(); + // new_fragments.push_tree(fragments_cursor.suffix(&None), &None); + + // self.fragments = new_fragments; + // self.visible_text = visible_text; + // self.deleted_text = deleted_text; + // self.local_clock.observe(local_timestamp); + // self.lamport_clock.observe(lamport_timestamp); + // Ok(()) + // } + pub fn undo(&mut self, mut cx: Option<&mut ModelContext>) -> Vec { let was_dirty = self.is_dirty(); let old_version = self.version.clone(); @@ -1548,6 +1596,7 @@ impl Buffer { return Vec::new(); } + let version = &self.version; let mut ops = Vec::with_capacity(old_ranges.size_hint().0); let mut old_fragments = SumTree::new(); @@ -1682,11 +1731,8 @@ impl Buffer { ops.push(Operation::Edit { edit: EditOperation { id: local_timestamp, - start_id: start_id.unwrap(), - start_offset: start_offset.unwrap(), - end_id: end_id.unwrap(), - end_offset: end_offset.unwrap(), - version_in_range, + version, + range, new_text: new_text.clone(), }, lamport_timestamp, @@ -2047,8 +2093,7 @@ impl Buffer { &Some(version.clone()), ); let full_offset = cursor.start().1; - let visible_offset = cursor.start().0.offset(); - full_offset.0 + offset - visible_offset + full_offset.0 + offset - cursor.start().0.offset() } } } From 74b07fed18fd3a01ef555c3f704a033fe631851f Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Tue, 1 Jun 2021 17:25:23 -0600 Subject: [PATCH 10/40] WIP: Start representing edit operations with versions and multiple ranges Compiling, long way to go though. Co-Authored-By: Max Brunsfeld --- zed/src/editor/buffer.rs | 910 +++++++++++++++------------------------ 1 file changed, 339 insertions(+), 571 deletions(-) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index 4ea0a84518ae34141b724fd6ea82bcd0d86f17ef..de8ecea4e4a21a754278572ac6f33f85c9926128 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -781,12 +781,11 @@ impl Buffer { match tag { ChangeTag::Equal => offset += len, ChangeTag::Delete => { - operations.extend_from_slice(&self.edit(Some(range), "", Some(cx)).unwrap()) + operations.push(self.edit(Some(range), "", Some(cx)).unwrap()) } ChangeTag::Insert => { - operations.extend_from_slice( - &self - .edit(Some(offset..offset), &diff.new_text[range], Some(cx)) + operations.push( + self.edit(Some(offset..offset), &diff.new_text[range], Some(cx)) .unwrap(), ); offset += len; @@ -952,10 +951,10 @@ impl Buffer { pub fn edit( &mut self, - old_ranges: I, + ranges: I, new_text: T, cx: Option<&mut ModelContext>, - ) -> Result> + ) -> Result where I: IntoIterator>, S: ToOffset, @@ -964,44 +963,45 @@ impl Buffer { self.start_transaction_at(None, Instant::now())?; let new_text = new_text.into(); - let old_ranges = old_ranges - .into_iter() - .map(|range| range.start.to_offset(self)..range.end.to_offset(self)) - .collect::>>(); - let new_text = if new_text.len() > 0 { Some(new_text) } else { None }; - let has_new_text = new_text.is_some(); - let ops = self.splice_fragments( - old_ranges - .into_iter() - .filter(|old_range| has_new_text || old_range.end > old_range.start), - new_text.into(), - ); + let ranges = ranges + .into_iter() + .filter_map(|range| { + let range = range.start.to_offset(self)..range.end.to_offset(self); + if has_new_text || !range.is_empty() { + Some(range) + } else { + None + } + }) + .collect::>>(); - for op in &ops { - if let Operation::Edit { edit, .. } = op { - self.history.push(edit.clone()); - self.history.push_undo(edit.id); - } - } + let edit_id = self.local_clock.tick(); + let lamport_timestamp = self.lamport_clock.tick(); + self.splice_fragments(&ranges, new_text.clone(), edit_id, lamport_timestamp); - if let Some(op) = ops.last() { - if let Operation::Edit { edit, .. } = op { - self.last_edit = edit.id; - self.version.observe(edit.id); - } else { - unreachable!() - } - } + let edit = EditOperation { + id: edit_id, + version: self.version.clone(), + ranges, + new_text, + }; + self.history.push(edit.clone()); + self.history.push_undo(edit.id); + self.last_edit = edit.id; + self.version.observe(edit.id); self.end_transaction_at(None, Instant::now(), cx)?; - Ok(ops) + Ok(Operation::Edit { + edit, + lamport_timestamp, + }) } fn did_edit(&self, was_dirty: bool, cx: &mut ModelContext) { @@ -1128,8 +1128,8 @@ impl Buffer { } => { if !self.version.observed(edit.id) { self.apply_edit( - edit.version, - edit.ranges, + &edit.version, + &edit.ranges, edit.new_text.as_deref(), edit.id, lamport_timestamp, @@ -1167,73 +1167,180 @@ impl Buffer { fn apply_edit( &mut self, - version: time::Global, - ranges: Vec>, - mut new_text: Option<&str>, + version: &time::Global, + ranges: &[Range], + new_text: Option<&str>, local_timestamp: time::Local, lamport_timestamp: time::Lamport, ) -> Result<()> { - let mut old_visible_text = mem::take(&mut self.visible_text); - let mut old_deleted_text = mem::take(&mut self.deleted_text); - let mut old_fragments = mem::take(&mut self.fragments); + let old_visible_text = mem::take(&mut self.visible_text); + let old_deleted_text = mem::take(&mut self.deleted_text); + let old_fragments = mem::take(&mut self.fragments); let mut old_fragments = old_fragments.cursor::(); - - let version = Some(version); + let old_fragments_cx = Some(version.clone()); let mut new_fragments = SumTree::new(); let mut new_ropes = RopeBuilder::new(old_visible_text.cursor(0), old_deleted_text.cursor(0)); - let mut pending_fragment = None; - let mut pending_fragment_start_offset = 0; - - for range in ranges { - let preceding_fragments = old_fragments.slice( - &VersionedOffset::Offset(range.start), + let mut ranges = ranges.iter().peekable(); + let mut fragment_start_offset = 0; + + // Push the fragments that precede the first edit and park the cursor over the fragment + // containing the start of the first edit. + if let Some(first_range) = ranges.peek() { + let prefix_fragments = old_fragments.slice( + &VersionedOffset::Offset(first_range.start), SeekBias::Right, - &version, + &old_fragments_cx, ); - new_fragments.push_tree(preceding_fragments, &None); - new_ropes.push_tree(new_fragments.summary().text); + new_ropes.push_tree(prefix_fragments.summary().text); + new_fragments.push_tree(prefix_fragments, &None); + fragment_start_offset = old_fragments.start().offset(); + } + + while let Some(range) = ranges.peek() { + let fragment = old_fragments.item(); + let fragment_end_offset = old_fragments.end(&old_fragments_cx).offset(); + + if let Some(fragment) = fragment { + // Was this fragment visible in the edit's base version? If not, push it into + // the new fragments, skip it, and continue the loop. + if !version.observed(fragment.insertion.id) { + new_ropes.push_fragment(fragment, fragment.visible); + new_fragments.push(fragment.clone(), &None); + old_fragments.next(&old_fragments_cx); + continue; + } - let mut fragment_start_offset = old_fragments.start().offset(); - let mut fragment_end_offset = old_fragments.end(&version).offset(); - let mut fragment = if let Some(fragment) = old_fragments.item() { - fragment.clone() + // If the current fragment doesn't intersect the current range, push the remainder + // of the fragment and then slice to the fragment containing the start of the + // current range. + if range.start > fragment_end_offset { + if fragment_end_offset > fragment_start_offset { + let suffix = Fragment { + len: fragment_end_offset - fragment_start_offset, + deletions: fragment.deletions.clone(), + max_undos: fragment.max_undos.clone(), + visible: fragment.visible, + // DELETE + id: Default::default(), + insertion: fragment.insertion.clone(), + range_in_insertion: Default::default(), + }; + new_ropes.push_fragment(&suffix, fragment.visible); + new_fragments.push(suffix, &None); + } + + let prefix_fragments = old_fragments.slice( + &VersionedOffset::Offset(range.start), + SeekBias::Right, + &old_fragments_cx, + ); + new_ropes.push_tree(prefix_fragments.summary().text); + new_fragments.push_tree(prefix_fragments, &None); + fragment_start_offset = old_fragments.start().offset(); + } + + // Now the current range intersects the current fragment. + // If there is a piece of the fragment preceding the current range, consume it. + if range.start > fragment_start_offset { + let prefix = Fragment { + len: range.start - fragment_start_offset, + deletions: fragment.deletions.clone(), + max_undos: fragment.max_undos.clone(), + visible: fragment.visible, + + // TODO: Delete these + id: Default::default(), + insertion: fragment.insertion.clone(), + range_in_insertion: Default::default(), + }; + fragment_start_offset += prefix.len; + new_ropes.push_fragment(&prefix, fragment.visible); + new_fragments.push(prefix, &None); + } + + // Push the portion of the current fragment that intersects the current range, + // marking it as deleted. + if range.end > range.start { + let deleted_end = cmp::min(range.end, fragment_end_offset); + + let mut deletions = fragment.deletions.clone(); + deletions.insert(local_timestamp); + + let deleted = Fragment { + len: deleted_end - fragment_start_offset, + deletions, + max_undos: fragment.max_undos.clone(), + visible: false, + + // TODO: Delete these + id: Default::default(), + insertion: fragment.insertion.clone(), + range_in_insertion: Default::default(), + }; + fragment_start_offset += deleted.len; + new_ropes.push_fragment(&deleted, fragment.visible); + new_fragments.push(deleted, &None); + } + + // Push any new text + if let Some(new_next) = new_text { + new_ropes.push_str(new_next); + new_fragments.push( + Fragment { + len: new_next.len(), + deletions: Default::default(), + max_undos: Default::default(), // TODO: Is this right? + visible: true, + + // TODO: Delete these + id: Default::default(), + insertion: fragment.insertion.clone(), + range_in_insertion: Default::default(), + }, + &None, + ); + } + + // Which ends first? The current fragment or the current range? If the current range + // ends before the current fragment, advance to the next range and preserve the + // current fragment. Otherwise, advance to next fragment and preserve the current + // range. + if range.end < fragment_end_offset { + ranges.next(); + } else { + old_fragments.next(&old_fragments_cx); + fragment_start_offset = fragment_end_offset; + } } else { - todo!() - }; + // Push a fragment containing the new text + } + } - if fragment_start_offset < range.start { - let prefix_fragment = Fragment { - len: range.start - fragment_start_offset, - visible: fragment.visible, + if let Some(fragment) = old_fragments.item() { + let fragment_end_offset = old_fragments.end(&old_fragments_cx).offset(); + if fragment_end_offset > fragment_start_offset { + let suffix = Fragment { + len: fragment_end_offset - fragment_start_offset, deletions: fragment.deletions.clone(), max_undos: fragment.max_undos.clone(), - - // TODO - remove - id: fragment.id.clone(), + visible: fragment.visible, + // DELETE + id: Default::default(), insertion: fragment.insertion.clone(), range_in_insertion: Default::default(), }; - - new_ropes.push_fragment(&prefix_fragment, prefix_fragment.visible); - new_fragments.push(prefix_fragment, &None); - fragment.len -= prefix_fragment.len; + new_ropes.push_fragment(&suffix, fragment.visible); + new_fragments.push(suffix, &None); } - let suffix_fragment = if fragment_end_offset > range.end { - fragment.visible = false; - - // - - Some(Fragment {}); - } else { - None - }; + let suffix_fragments = old_fragments.suffix(&None); + new_ropes.push_tree(suffix_fragments.summary().text); + new_fragments.push_tree(suffix_fragments, &None); } let (visible_text, deleted_text) = new_ropes.finish(); - new_fragments.push_tree(old_fragments.suffix(&None), &None); self.fragments = new_fragments; self.visible_text = visible_text; @@ -1243,134 +1350,6 @@ impl Buffer { Ok(()) } - // let mut new_fragments = fragments_cursor.slice( - // &VersionedOffset::Offset(range.start), - // SeekBias::Left, - // &version_cx, - // ); - // new_ropes.push_tree(new_fragments.summary().text); - - // if range.start == fragments_cursor.end(&version_cx).offset() { - // let fragment = fragments_cursor.item().unwrap().clone(); - // new_ropes.push_fragment(&fragment, fragment.visible); - // new_fragments.push(fragment, &None); - // fragments_cursor.next(&None); - // } - - // while let Some(fragment) = fragments_cursor.item() { - // let fragment_start_offset = fragments_cursor.start().offset(); - // let fragment_end_offset = fragments_cursor.end(&version_cx).offset(); - - // if new_text.is_none() && fragment_start_offset > range.end { - // break; - // } - - // if fragment_start_offset - - // if cursor_start_offset < range.start || cursor_end_offset > range.end { - // let split_start = if start_fragment_id == fragment.id { - // start_offset - // } else { - // fragment.range_in_insertion.start - // }; - // let split_end = if end_fragment_id == fragment.id { - // end_offset - // } else { - // fragment.range_in_insertion.end - // }; - // let (before_range, within_range, after_range) = self.split_fragment( - // fragments_cursor.prev_item().as_ref().unwrap(), - // &fragment, - // split_start..split_end, - // ); - // let insertion = if let Some(new_text) = new_text { - // let prev_fragment = fragments_cursor.prev_item(); - // Some(self.build_fragment_to_insert( - // before_range.as_ref().or(prev_fragment).unwrap(), - // within_range.as_ref().or(after_range.as_ref()), - // new_text, - // local_timestamp, - // lamport_timestamp, - // )) - // } else { - // None - // }; - // if let Some(fragment) = before_range { - // new_ropes.push_fragment(&fragment, fragment.visible); - // new_fragments.push(fragment, &None); - // } - // if let Some(fragment) = insertion { - // new_ropes.push_str(new_text.take().unwrap()); - // new_fragments.push(fragment, &None); - // } - // if let Some(mut fragment) = within_range { - // let fragment_was_visible = fragment.visible; - // if fragment.was_visible(&version_in_range, &self.undo_map) { - // fragment.deletions.insert(local_timestamp); - // if fragment.visible { - // fragment.visible = false; - // } - // } - // new_ropes.push_fragment(&fragment, fragment_was_visible); - // new_fragments.push(fragment, &None); - // } - // if let Some(fragment) = after_range { - // new_ropes.push_fragment(&fragment, fragment.visible); - // new_fragments.push(fragment, &None); - // } - // } else { - // if new_text.is_some() && lamport_timestamp > fragment.insertion.lamport_timestamp { - // let new_text = new_text.take().unwrap(); - // let fragment = self.build_fragment_to_insert( - // fragments_cursor.prev_item().as_ref().unwrap(), - // Some(&fragment), - // new_text, - // local_timestamp, - // lamport_timestamp, - // ); - // new_ropes.push_str(new_text); - // new_fragments.push(fragment, &None); - // } - - // let fragment_was_visible = fragment.visible; - // if fragment.id < end_fragment_id - // && fragment.was_visible(&version_in_range, &self.undo_map) - // { - // fragment.deletions.insert(local_timestamp); - // if fragment.visible { - // fragment.visible = false; - // } - // } - - // new_ropes.push_fragment(&fragment, fragment_was_visible); - // new_fragments.push(fragment, &None); - // } - // fragments_cursor.next(&None); - // } - - // if let Some(new_text) = new_text { - // let fragment = self.build_fragment_to_insert( - // fragments_cursor.prev_item().as_ref().unwrap(), - // None, - // new_text, - // local_timestamp, - // lamport_timestamp, - // ); - // new_ropes.push_str(new_text); - // new_fragments.push(fragment, &None); - // } - - // let (visible_text, deleted_text) = new_ropes.finish(); - // new_fragments.push_tree(fragments_cursor.suffix(&None), &None); - - // self.fragments = new_fragments; - // self.visible_text = visible_text; - // self.deleted_text = deleted_text; - // self.local_clock.observe(local_timestamp); - // self.lamport_clock.observe(lamport_timestamp); - // Ok(()) - // } - pub fn undo(&mut self, mut cx: Option<&mut ModelContext>) -> Vec { let was_dirty = self.is_dirty(); let old_version = self.version.clone(); @@ -1441,90 +1420,94 @@ impl Buffer { } fn apply_undo(&mut self, undo: UndoOperation) -> Result<()> { - let mut new_fragments; - let mut old_visible_text = Rope::new(); - let mut old_deleted_text = Rope::new(); - mem::swap(&mut old_visible_text, &mut self.visible_text); - mem::swap(&mut old_deleted_text, &mut self.deleted_text); - let mut new_ropes = - RopeBuilder::new(old_visible_text.cursor(0), old_deleted_text.cursor(0)); - - self.undo_map.insert(undo); - let edit = &self.history.ops[&undo.edit_id]; - let start_fragment_id = self.resolve_fragment_id(edit.start_id, edit.start_offset)?; - let end_fragment_id = self.resolve_fragment_id(edit.end_id, edit.end_offset)?; - - let mut fragments_cursor = self.fragments.cursor::(); - - if edit.start_id == edit.end_id && edit.start_offset == edit.end_offset { - let splits = &self.insertion_splits[&undo.edit_id]; - let mut insertion_splits = splits.cursor::<(), ()>().map(|s| &s.fragment_id).peekable(); - - let first_split_id = insertion_splits.next().unwrap(); - new_fragments = - fragments_cursor.slice(&FragmentIdRef::new(first_split_id), SeekBias::Left, &None); - new_ropes.push_tree(new_fragments.summary().text); - - loop { - let mut fragment = fragments_cursor.item().unwrap().clone(); - let was_visible = fragment.visible; - fragment.visible = fragment.is_visible(&self.undo_map); - fragment.max_undos.observe(undo.id); - - new_ropes.push_fragment(&fragment, was_visible); - new_fragments.push(fragment.clone(), &None); - - fragments_cursor.next(&None); - if let Some(split_id) = insertion_splits.next() { - let slice = fragments_cursor.slice( - &FragmentIdRef::new(split_id), - SeekBias::Left, - &None, - ); - new_ropes.push_tree(slice.summary().text); - new_fragments.push_tree(slice, &None); - } else { - break; - } - } - } else { - new_fragments = fragments_cursor.slice( - &FragmentIdRef::new(&start_fragment_id), - SeekBias::Left, - &None, - ); - new_ropes.push_tree(new_fragments.summary().text); - - while let Some(fragment) = fragments_cursor.item() { - if fragment.id > end_fragment_id { - break; - } else { - let mut fragment = fragment.clone(); - let fragment_was_visible = fragment.visible; - if edit.version_in_range.observed(fragment.insertion.id) - || fragment.insertion.id == undo.edit_id - { - fragment.visible = fragment.is_visible(&self.undo_map); - fragment.max_undos.observe(undo.id); - } + Ok(()) + } - new_ropes.push_fragment(&fragment, fragment_was_visible); - new_fragments.push(fragment, &None); - fragments_cursor.next(&None); - } - } - } + // { + // let mut new_fragments; + // let mut old_visible_text = Rope::new(); + // let mut old_deleted_text = Rope::new(); + // mem::swap(&mut old_visible_text, &mut self.visible_text); + // mem::swap(&mut old_deleted_text, &mut self.deleted_text); + // let mut new_ropes = + // RopeBuilder::new(old_visible_text.cursor(0), old_deleted_text.cursor(0)); + + // self.undo_map.insert(undo); + // let edit = &self.history.ops[&undo.edit_id]; + // let start_fragment_id = self.resolve_fragment_id(edit.start_id, edit.start_offset)?; + // let end_fragment_id = self.resolve_fragment_id(edit.end_id, edit.end_offset)?; + + // let mut fragments_cursor = self.fragments.cursor::(); + + // if edit.start_id == edit.end_id && edit.start_offset == edit.end_offset { + // let splits = &self.insertion_splits[&undo.edit_id]; + // let mut insertion_splits = splits.cursor::<(), ()>().map(|s| &s.fragment_id).peekable(); + + // let first_split_id = insertion_splits.next().unwrap(); + // new_fragments = + // fragments_cursor.slice(&FragmentIdRef::new(first_split_id), SeekBias::Left, &None); + // new_ropes.push_tree(new_fragments.summary().text); + + // loop { + // let mut fragment = fragments_cursor.item().unwrap().clone(); + // let was_visible = fragment.visible; + // fragment.visible = fragment.is_visible(&self.undo_map); + // fragment.max_undos.observe(undo.id); + + // new_ropes.push_fragment(&fragment, was_visible); + // new_fragments.push(fragment.clone(), &None); + + // fragments_cursor.next(&None); + // if let Some(split_id) = insertion_splits.next() { + // let slice = fragments_cursor.slice( + // &FragmentIdRef::new(split_id), + // SeekBias::Left, + // &None, + // ); + // new_ropes.push_tree(slice.summary().text); + // new_fragments.push_tree(slice, &None); + // } else { + // break; + // } + // } + // } else { + // new_fragments = fragments_cursor.slice( + // &FragmentIdRef::new(&start_fragment_id), + // SeekBias::Left, + // &None, + // ); + // new_ropes.push_tree(new_fragments.summary().text); + + // while let Some(fragment) = fragments_cursor.item() { + // if fragment.id > end_fragment_id { + // break; + // } else { + // let mut fragment = fragment.clone(); + // let fragment_was_visible = fragment.visible; + // if edit.version_in_range.observed(fragment.insertion.id) + // || fragment.insertion.id == undo.edit_id + // { + // fragment.visible = fragment.is_visible(&self.undo_map); + // fragment.max_undos.observe(undo.id); + // } + + // new_ropes.push_fragment(&fragment, fragment_was_visible); + // new_fragments.push(fragment, &None); + // fragments_cursor.next(&None); + // } + // } + // } - new_fragments.push_tree(fragments_cursor.suffix(&None), &None); - let (visible_text, deleted_text) = new_ropes.finish(); - drop(fragments_cursor); + // new_fragments.push_tree(fragments_cursor.suffix(&None), &None); + // let (visible_text, deleted_text) = new_ropes.finish(); + // drop(fragments_cursor); - self.fragments = new_fragments; - self.visible_text = visible_text; - self.deleted_text = deleted_text; + // self.fragments = new_fragments; + // self.visible_text = visible_text; + // self.deleted_text = deleted_text; - Ok(()) - } + // Ok(()) + // } fn flush_deferred_ops(&mut self) -> Result<()> { self.deferred_replicas.clear(); @@ -1542,69 +1525,54 @@ impl Buffer { } fn can_apply_op(&self, op: &Operation) -> bool { - if self.deferred_replicas.contains(&op.replica_id()) { - false - } else { - match op { - Operation::Edit { edit, .. } => { - self.version.observed(edit.start_id) - && self.version.observed(edit.end_id) - && edit.version_in_range <= self.version - } - Operation::Undo { undo, .. } => self.version.observed(undo.edit_id), - Operation::UpdateSelections { selections, .. } => { - if let Some(selections) = selections { - selections.iter().all(|selection| { - let contains_start = match &selection.start { - Anchor::Middle { version, .. } => self.version >= *version, - _ => true, - }; - let contains_end = match &selection.end { - Anchor::Middle { version, .. } => self.version >= *version, - _ => true, - }; - contains_start && contains_end - }) - } else { - true - } - } - } - } - } - - fn resolve_fragment_id(&self, edit_id: time::Local, offset: usize) -> Result { - let split_tree = self - .insertion_splits - .get(&edit_id) - .ok_or_else(|| anyhow!("invalid operation"))?; - let mut cursor = split_tree.cursor::(); - cursor.seek(&offset, SeekBias::Left, &()); - Ok(cursor - .item() - .ok_or_else(|| anyhow!("invalid operation"))? - .fragment_id - .clone()) - } - - fn splice_fragments(&mut self, mut old_ranges: I, new_text: Option) -> Vec - where - I: Iterator>, - { + true + // if self.deferred_replicas.contains(&op.replica_id()) { + // false + // } else { + // match op { + // Operation::Edit { edit, .. } => { + // self.version.observed(edit.start_id) + // && self.version.observed(edit.end_id) + // && edit.version_in_range <= self.version + // } + // Operation::Undo { undo, .. } => self.version.observed(undo.edit_id), + // Operation::UpdateSelections { selections, .. } => { + // if let Some(selections) = selections { + // selections.iter().all(|selection| { + // let contains_start = match &selection.start { + // Anchor::Middle { version, .. } => self.version >= *version, + // _ => true, + // }; + // let contains_end = match &selection.end { + // Anchor::Middle { version, .. } => self.version >= *version, + // _ => true, + // }; + // contains_start && contains_end + // }) + // } else { + // true + // } + // } + // } + // } + } + + fn splice_fragments( + &mut self, + old_ranges: &[Range], + new_text: Option, + edit_id: time::Local, + lamport_timestamp: time::Lamport, + ) { + let mut old_ranges = old_ranges.iter(); let mut cur_range = old_ranges.next(); if cur_range.is_none() { - return Vec::new(); + return; } - let version = &self.version; - let mut ops = Vec::with_capacity(old_ranges.size_hint().0); - - let mut old_fragments = SumTree::new(); - let mut old_visible_text = Rope::new(); - let mut old_deleted_text = Rope::new(); - mem::swap(&mut old_visible_text, &mut self.visible_text); - mem::swap(&mut old_deleted_text, &mut self.deleted_text); - mem::swap(&mut old_fragments, &mut self.fragments); + let old_fragments = mem::take(&mut self.fragments); + let old_visible_text = mem::take(&mut self.visible_text); + let old_deleted_text = mem::take(&mut self.deleted_text); let mut fragments_cursor = old_fragments.cursor::(); let mut new_fragments = @@ -1614,18 +1582,8 @@ impl Buffer { RopeBuilder::new(old_visible_text.cursor(0), old_deleted_text.cursor(0)); new_ropes.push_tree(new_fragments.summary().text); - let mut start_id = None; - let mut start_offset = None; - let mut end_id = None; - let mut end_offset = None; - let mut version_in_range = time::Global::new(); - - let mut local_timestamp = self.local_clock.tick(); - let mut lamport_timestamp = self.lamport_clock.tick(); - while cur_range.is_some() && fragments_cursor.item().is_some() { let mut fragment = fragments_cursor.item().unwrap().clone(); - let fragment_summary = fragments_cursor.item_summary().unwrap(); let mut fragment_start = *fragments_cursor.start(); let mut fragment_end = fragment_start + fragment.visible_len(); let fragment_was_visible = fragment.visible; @@ -1663,24 +1621,13 @@ impl Buffer { fragment_start = range.start; } - if range.end == fragment_start { - end_id = Some(new_fragments.last().unwrap().insertion.id); - end_offset = Some(new_fragments.last().unwrap().range_in_insertion.end); - } else if range.end == fragment_end { - end_id = Some(fragment.insertion.id); - end_offset = Some(fragment.range_in_insertion.end); - } - if range.start == fragment_start { - start_id = Some(new_fragments.last().unwrap().insertion.id); - start_offset = Some(new_fragments.last().unwrap().range_in_insertion.end); - if let Some(new_text) = new_text.clone() { let new_fragment = self.build_fragment_to_insert( &new_fragments.last().unwrap(), Some(&fragment), &new_text, - local_timestamp, + edit_id, lamport_timestamp, ); @@ -1696,9 +1643,8 @@ impl Buffer { prefix.range_in_insertion.start + (range.end - fragment_start); prefix.id = FragmentId::between(&new_fragments.last().unwrap().id, &fragment.id); - version_in_range.join(&fragment_summary.max_version); if prefix.visible { - prefix.deletions.insert(local_timestamp); + prefix.deletions.insert(edit_id); prefix.visible = false; } fragment.range_in_insertion.start = prefix.range_in_insertion.end; @@ -1713,13 +1659,10 @@ impl Buffer { &(), ); fragment_start = range.end; - end_id = Some(fragment.insertion.id); - end_offset = Some(fragment.range_in_insertion.start); } } else { - version_in_range.join(&fragment_summary.max_version); if fragment.visible { - fragment.deletions.insert(local_timestamp); + fragment.deletions.insert(edit_id); fragment.visible = false; } } @@ -1728,26 +1671,7 @@ impl Buffer { // check if it also intersects the current fragment. Otherwise we break out of the // loop and find the first fragment that the splice does not contain fully. if range.end <= fragment_end { - ops.push(Operation::Edit { - edit: EditOperation { - id: local_timestamp, - version, - range, - new_text: new_text.clone(), - }, - lamport_timestamp, - }); - - start_id = None; - start_offset = None; - end_id = None; - end_offset = None; - version_in_range = time::Global::new(); cur_range = old_ranges.next(); - if cur_range.is_some() { - local_timestamp = self.local_clock.tick(); - lamport_timestamp = self.lamport_clock.tick(); - } } else { break; } @@ -1774,15 +1698,13 @@ impl Buffer { fragments_cursor.next(&None); if let Some(range) = cur_range.clone() { while let Some(fragment) = fragments_cursor.item() { - let fragment_summary = fragments_cursor.item_summary().unwrap(); let fragment_was_visible = fragment.visible; fragment_start = *fragments_cursor.start(); fragment_end = fragment_start + fragment.visible_len(); if range.start < fragment_start && range.end >= fragment_end { let mut new_fragment = fragment.clone(); - version_in_range.join(&fragment_summary.max_version); if new_fragment.visible { - new_fragment.deletions.insert(local_timestamp); + new_fragment.deletions.insert(edit_id); new_fragment.visible = false; } @@ -1791,32 +1713,7 @@ impl Buffer { fragments_cursor.next(&None); if range.end == fragment_end { - end_id = Some(fragment.insertion.id); - end_offset = Some(fragment.range_in_insertion.end); - ops.push(Operation::Edit { - edit: EditOperation { - id: local_timestamp, - start_id: start_id.unwrap(), - start_offset: start_offset.unwrap(), - end_id: end_id.unwrap(), - end_offset: end_offset.unwrap(), - version_in_range, - new_text: new_text.clone(), - }, - lamport_timestamp, - }); - - start_id = None; - start_offset = None; - end_id = None; - end_offset = None; - version_in_range = time::Global::new(); - cur_range = old_ranges.next(); - if cur_range.is_some() { - local_timestamp = self.local_clock.tick(); - lamport_timestamp = self.lamport_clock.tick(); - } break; } } else { @@ -1844,26 +1741,13 @@ impl Buffer { if cur_range.is_some() { debug_assert_eq!(old_ranges.next(), None); let last_fragment = new_fragments.last().unwrap(); - ops.push(Operation::Edit { - edit: EditOperation { - id: local_timestamp, - start_id: last_fragment.insertion.id, - start_offset: last_fragment.range_in_insertion.end, - end_id: last_fragment.insertion.id, - end_offset: last_fragment.range_in_insertion.end, - version_in_range: time::Global::new(), - // TODO: avoid cloning the String. - new_text: new_text.clone(), - }, - lamport_timestamp, - }); if let Some(new_text) = new_text { let new_fragment = self.build_fragment_to_insert( &last_fragment, None, &new_text, - local_timestamp, + edit_id, lamport_timestamp, ); @@ -1878,106 +1762,6 @@ impl Buffer { self.fragments = new_fragments; self.visible_text = visible_text; self.deleted_text = deleted_text; - ops - } - - fn split_fragment( - &mut self, - prev_fragment: &Fragment, - fragment: &Fragment, - range: Range, - ) -> (Option, Option, Option) { - debug_assert!(range.start >= fragment.range_in_insertion.start); - debug_assert!(range.start <= fragment.range_in_insertion.end); - debug_assert!(range.end <= fragment.range_in_insertion.end); - debug_assert!(range.end >= fragment.range_in_insertion.start); - - if range.end == fragment.range_in_insertion.start { - (None, None, Some(fragment.clone())) - } else if range.start == fragment.range_in_insertion.end { - (Some(fragment.clone()), None, None) - } else if range.start == fragment.range_in_insertion.start - && range.end == fragment.range_in_insertion.end - { - (None, Some(fragment.clone()), None) - } else { - let mut prefix = fragment.clone(); - - let after_range = if range.end < fragment.range_in_insertion.end { - let mut suffix = prefix.clone(); - suffix.range_in_insertion.start = range.end; - prefix.range_in_insertion.end = range.end; - prefix.id = FragmentId::between(&prev_fragment.id, &suffix.id); - Some(suffix) - } else { - None - }; - - let within_range = if range.start != range.end { - let mut suffix = prefix.clone(); - suffix.range_in_insertion.start = range.start; - prefix.range_in_insertion.end = range.start; - prefix.id = FragmentId::between(&prev_fragment.id, &suffix.id); - Some(suffix) - } else { - None - }; - - let before_range = if range.start > fragment.range_in_insertion.start { - Some(prefix) - } else { - None - }; - - let old_split_tree = self - .insertion_splits - .remove(&fragment.insertion.id) - .unwrap(); - let mut cursor = old_split_tree.cursor::(); - let mut new_split_tree = - cursor.slice(&fragment.range_in_insertion.start, SeekBias::Right, &()); - - if let Some(ref fragment) = before_range { - new_split_tree.push( - InsertionSplit { - extent: range.start - fragment.range_in_insertion.start, - fragment_id: fragment.id.clone(), - }, - &(), - ); - } - - if let Some(ref fragment) = within_range { - new_split_tree.push( - InsertionSplit { - extent: range.end - range.start, - fragment_id: fragment.id.clone(), - }, - &(), - ); - } - - if let Some(ref fragment) = after_range { - new_split_tree.push( - InsertionSplit { - extent: fragment.range_in_insertion.end - range.end, - fragment_id: fragment.id.clone(), - }, - &(), - ); - } - - cursor.next(&()); - new_split_tree.push_tree( - cursor.slice(&old_split_tree.extent::(&()), SeekBias::Right, &()), - &(), - ); - - self.insertion_splits - .insert(fragment.insertion.id, new_split_tree); - - (before_range, within_range, after_range) - } } fn build_fragment_to_insert( @@ -2496,26 +2280,12 @@ impl FragmentId { } } -#[derive(Ord, PartialOrd, Eq, PartialEq, Clone, Debug, Default)] -struct FragmentIdRef<'a>(Option<&'a FragmentId>); - -impl<'a> FragmentIdRef<'a> { - fn new(id: &'a FragmentId) -> Self { - Self(Some(id)) - } -} - -impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentIdRef<'a> { - fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option) { - self.0 = Some(&summary.max_fragment_id) - } -} - impl Fragment { fn new(id: FragmentId, insertion: Arc, range_in_insertion: Range) -> Self { Self { id, insertion, + len: range_in_insertion.len(), range_in_insertion, deletions: Default::default(), max_undos: Default::default(), @@ -2862,8 +2632,8 @@ mod tests { // An edit emits an edited event, followed by a dirtied event, // since the buffer was previously in a clean state. - let ops = buffer.edit(Some(2..4), "XYZ", Some(cx)).unwrap(); - buffer_ops.extend_from_slice(&ops); + let op = buffer.edit(Some(2..4), "XYZ", Some(cx)).unwrap(); + buffer_ops.push(op); // An empty transaction does not emit any events. buffer.start_transaction(None).unwrap(); @@ -2872,10 +2642,8 @@ mod tests { // A transaction containing two edits emits one edited event. now += Duration::from_secs(1); buffer.start_transaction_at(None, now).unwrap(); - let ops = buffer.edit(Some(5..5), "u", Some(cx)).unwrap(); - buffer_ops.extend_from_slice(&ops); - let ops = buffer.edit(Some(6..6), "w", Some(cx)).unwrap(); - buffer_ops.extend_from_slice(&ops); + buffer_ops.push(buffer.edit(Some(5..5), "u", Some(cx)).unwrap()); + buffer_ops.push(buffer.edit(Some(6..6), "w", Some(cx)).unwrap()); buffer.end_transaction_at(None, now, Some(cx)).unwrap(); // Undoing a transaction emits one edited event. @@ -3495,25 +3263,25 @@ mod tests { let edit3 = buffer.edit(vec![3..5], "cd", None).unwrap(); assert_eq!(buffer.text(), "1abcdef234"); - buffer.undo_or_redo(edit1[0].edit_id().unwrap()).unwrap(); + buffer.undo_or_redo(edit1.edit_id().unwrap()).unwrap(); assert_eq!(buffer.text(), "1cdef234"); - buffer.undo_or_redo(edit1[0].edit_id().unwrap()).unwrap(); + buffer.undo_or_redo(edit1.edit_id().unwrap()).unwrap(); assert_eq!(buffer.text(), "1abcdef234"); - buffer.undo_or_redo(edit2[0].edit_id().unwrap()).unwrap(); + buffer.undo_or_redo(edit2.edit_id().unwrap()).unwrap(); assert_eq!(buffer.text(), "1abcdx234"); - buffer.undo_or_redo(edit3[0].edit_id().unwrap()).unwrap(); + buffer.undo_or_redo(edit3.edit_id().unwrap()).unwrap(); assert_eq!(buffer.text(), "1abx234"); - buffer.undo_or_redo(edit2[0].edit_id().unwrap()).unwrap(); + buffer.undo_or_redo(edit2.edit_id().unwrap()).unwrap(); assert_eq!(buffer.text(), "1abyzef234"); - buffer.undo_or_redo(edit3[0].edit_id().unwrap()).unwrap(); + buffer.undo_or_redo(edit3.edit_id().unwrap()).unwrap(); assert_eq!(buffer.text(), "1abcdef234"); - buffer.undo_or_redo(edit3[0].edit_id().unwrap()).unwrap(); + buffer.undo_or_redo(edit3.edit_id().unwrap()).unwrap(); assert_eq!(buffer.text(), "1abyzef234"); - buffer.undo_or_redo(edit1[0].edit_id().unwrap()).unwrap(); + buffer.undo_or_redo(edit1.edit_id().unwrap()).unwrap(); assert_eq!(buffer.text(), "1yzef234"); - buffer.undo_or_redo(edit2[0].edit_id().unwrap()).unwrap(); + buffer.undo_or_redo(edit2.edit_id().unwrap()).unwrap(); assert_eq!(buffer.text(), "1234"); buffer @@ -3871,7 +3639,7 @@ mod tests { rng: &mut T, old_range_count: usize, cx: Option<&mut ModelContext>, - ) -> (Vec>, String, Vec) + ) -> (Vec>, String, Operation) where T: Rng, { @@ -3886,11 +3654,11 @@ mod tests { let new_text_len = rng.gen_range(0..10); let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect(); - let operations = self + let operation = self .edit(old_ranges.iter().cloned(), new_text.as_str(), cx) .unwrap(); - (old_ranges, new_text, operations) + (old_ranges, new_text, operation) } pub fn randomly_mutate( @@ -3901,9 +3669,9 @@ mod tests { where T: Rng, { - // Randomly edit - let (old_ranges, new_text, mut operations) = - self.randomly_edit(rng, 5, cx.as_deref_mut()); + let (old_ranges, new_text, operation) = self.randomly_edit(rng, 5, cx.as_deref_mut()); + let mut operations = vec![operation]; + log::info!("Mutating buffer at {:?}: {:?}", old_ranges, new_text); // Randomly add, remove or mutate selection sets. From 5f28fdb8f7905f426a35374242148e5b83cbdae8 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 1 Jun 2021 18:22:08 -0700 Subject: [PATCH 11/40] WIP --- zed/src/editor/buffer.rs | 583 ++++++++++++--------------------------- 1 file changed, 175 insertions(+), 408 deletions(-) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index de8ecea4e4a21a754278572ac6f33f85c9926128..cb0b68c2a7e50c71349ef6b7bed78a9451c2edea 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -28,7 +28,7 @@ use std::{ cell::RefCell, cmp, hash::BuildHasher, - iter::{self, Iterator}, + iter::Iterator, mem, ops::{Deref, DerefMut, Range}, str, @@ -109,7 +109,6 @@ pub struct Buffer { fragments: SumTree, visible_text: Rope, deleted_text: Rope, - insertion_splits: HashMap>, pub version: time::Global, saved_version: time::Global, saved_mtime: SystemTime, @@ -338,9 +337,7 @@ pub struct Insertion { #[derive(Eq, PartialEq, Clone, Debug)] struct Fragment { - id: FragmentId, - insertion: Arc, - range_in_insertion: Range, + insertion_id: time::Local, len: usize, deletions: HashSet, max_undos: time::Global, @@ -350,7 +347,6 @@ struct Fragment { #[derive(Eq, PartialEq, Clone, Debug)] pub struct FragmentSummary { text: FragmentTextSummary, - max_fragment_id: FragmentId, max_version: time::Global, min_insertion_version: time::Global, max_insertion_version: time::Global, @@ -369,17 +365,6 @@ impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary { } } -#[derive(Eq, PartialEq, Clone, Debug)] -struct InsertionSplit { - extent: usize, - fragment_id: FragmentId, -} - -#[derive(Eq, PartialEq, Clone, Debug)] -struct InsertionSplitSummary { - extent: usize, -} - #[derive(Clone, Debug, Eq, PartialEq)] pub enum Operation { Edit { @@ -474,52 +459,20 @@ impl Buffer { saved_mtime = UNIX_EPOCH; } - let mut visible_text = Rope::new(); - let mut insertion_splits = HashMap::default(); let mut fragments = SumTree::new(); - let base_text = Rope::from(history.base_text.as_ref()); - let base_insertion = Arc::new(Insertion { - id: time::Local::default(), - parent_id: time::Local::default(), - offset_in_parent: 0, - lamport_timestamp: time::Lamport::default(), - }); + let visible_text = Rope::from(history.base_text.as_ref()); + let mut local_clock = time::Local::new(replica_id); - insertion_splits.insert( - base_insertion.id, - SumTree::from_item( - InsertionSplit { - fragment_id: FragmentId::min_value().clone(), - extent: 0, - }, - &(), - ), - ); - fragments.push( - Fragment::new( - FragmentId::min_value().clone(), - base_insertion.clone(), - 0..0, - ), - &None, - ); - - if base_text.len() > 0 { - let base_fragment_id = - FragmentId::between(&FragmentId::min_value(), &FragmentId::max_value()); - let range_in_insertion = 0..base_text.len(); - - visible_text = base_text.clone(); - insertion_splits.get_mut(&base_insertion.id).unwrap().push( - InsertionSplit { - fragment_id: base_fragment_id.clone(), - extent: range_in_insertion.end, - }, - &(), - ); + if visible_text.len() > 0 { fragments.push( - Fragment::new(base_fragment_id, base_insertion, range_in_insertion.clone()), + Fragment { + insertion_id: local_clock.tick(), + len: visible_text.len(), + deletions: Default::default(), + max_undos: Default::default(), + visible: true, + }, &None, ); } @@ -528,7 +481,6 @@ impl Buffer { visible_text, deleted_text: Rope::new(), fragments, - insertion_splits, version: time::Global::new(), saved_version: time::Global::new(), last_edit: time::Local::default(), @@ -544,7 +496,7 @@ impl Buffer { deferred_ops: OperationQueue::new(), deferred_replicas: HashSet::default(), replica_id, - local_clock: time::Local::new(replica_id), + local_clock, lamport_clock: time::Lamport::new(replica_id), }; result.reparse(cx); @@ -1205,7 +1157,7 @@ impl Buffer { if let Some(fragment) = fragment { // Was this fragment visible in the edit's base version? If not, push it into // the new fragments, skip it, and continue the loop. - if !version.observed(fragment.insertion.id) { + if !version.observed(fragment.insertion_id) { new_ropes.push_fragment(fragment, fragment.visible); new_fragments.push(fragment.clone(), &None); old_fragments.next(&old_fragments_cx); @@ -1222,10 +1174,7 @@ impl Buffer { deletions: fragment.deletions.clone(), max_undos: fragment.max_undos.clone(), visible: fragment.visible, - // DELETE - id: Default::default(), - insertion: fragment.insertion.clone(), - range_in_insertion: Default::default(), + insertion_id: fragment.insertion_id, }; new_ropes.push_fragment(&suffix, fragment.visible); new_fragments.push(suffix, &None); @@ -1249,11 +1198,7 @@ impl Buffer { deletions: fragment.deletions.clone(), max_undos: fragment.max_undos.clone(), visible: fragment.visible, - - // TODO: Delete these - id: Default::default(), - insertion: fragment.insertion.clone(), - range_in_insertion: Default::default(), + insertion_id: fragment.insertion_id, }; fragment_start_offset += prefix.len; new_ropes.push_fragment(&prefix, fragment.visible); @@ -1273,11 +1218,7 @@ impl Buffer { deletions, max_undos: fragment.max_undos.clone(), visible: false, - - // TODO: Delete these - id: Default::default(), - insertion: fragment.insertion.clone(), - range_in_insertion: Default::default(), + insertion_id: fragment.insertion_id, }; fragment_start_offset += deleted.len; new_ropes.push_fragment(&deleted, fragment.visible); @@ -1293,11 +1234,7 @@ impl Buffer { deletions: Default::default(), max_undos: Default::default(), // TODO: Is this right? visible: true, - - // TODO: Delete these - id: Default::default(), - insertion: fragment.insertion.clone(), - range_in_insertion: Default::default(), + insertion_id: local_timestamp, }, &None, ); @@ -1326,16 +1263,13 @@ impl Buffer { deletions: fragment.deletions.clone(), max_undos: fragment.max_undos.clone(), visible: fragment.visible, - // DELETE - id: Default::default(), - insertion: fragment.insertion.clone(), - range_in_insertion: Default::default(), + insertion_id: fragment.insertion_id, }; new_ropes.push_fragment(&suffix, fragment.visible); new_fragments.push(suffix, &None); } - let suffix_fragments = old_fragments.suffix(&None); + let suffix_fragments = old_fragments.suffix(&old_fragments_cx); new_ropes.push_tree(suffix_fragments.summary().text); new_fragments.push_tree(suffix_fragments, &None); } @@ -1420,95 +1354,129 @@ impl Buffer { } fn apply_undo(&mut self, undo: UndoOperation) -> Result<()> { + let mut old_fragments = self.fragments.cursor::(); + + let mut new_fragments = SumTree::new(); + let mut new_ropes = + RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); + + self.undo_map.insert(undo); + let edit = &self.history.ops[&undo.edit_id]; + + let version = Some(edit.version.clone()); + for range in &edit.ranges { + let preceding_fragments = old_fragments.slice( + &VersionedOffset::Offset(range.start), + SeekBias::Right, + &version, + ); + new_ropes.push_tree(preceding_fragments.summary().text); + new_fragments.push_tree(preceding_fragments, &None); + + while old_fragments.end(&version).offset() < range.end { + if let Some(fragment) = old_fragments.item() { + let mut fragment = fragment.clone(); + let fragment_was_visible = fragment.visible; + if edit.version.observed(fragment.insertion_id) { + fragment.visible = fragment.is_visible(&self.undo_map); + fragment.max_undos.observe(undo.id); + } + new_ropes.push_fragment(&fragment, fragment_was_visible); + new_fragments.push(fragment, &None); + + // Skip over any fragments that were not present when the edit occurred. + let newer_fragments = old_fragments.slice( + &old_fragments.end(&version), + SeekBias::Right, + &version, + ); + new_ropes.push_tree(newer_fragments.summary().text); + new_fragments.push_tree(newer_fragments, &None); + } else { + break; + } + } + } + + let suffix = old_fragments.suffix(&version); + new_ropes.push_tree(suffix.summary().text); + new_fragments.push_tree(suffix, &None); + + drop(old_fragments); + let (visible_text, deleted_text) = new_ropes.finish(); + self.fragments = new_fragments; + self.visible_text = visible_text; + self.deleted_text = deleted_text; + + // if edit.start_id == edit.end_id && edit.start_offset == edit.end_offset { + // let splits = &self.insertion_splits[&undo.edit_id]; + // let mut insertion_splits = splits.cursor::<(), ()>().map(|s| &s.fragment_id).peekable(); + + // let first_split_id = insertion_splits.next().unwrap(); + // new_fragments = + // fragments_cursor.slice(&FragmentIdRef::new(first_split_id), SeekBias::Left, &None); + // new_ropes.push_tree(new_fragments.summary().text); + + // loop { + // let mut fragment = fragments_cursor.item().unwrap().clone(); + // let was_visible = fragment.visible; + // fragment.visible = fragment.is_visible(&self.undo_map); + // fragment.max_undos.observe(undo.id); + + // new_ropes.push_fragment(&fragment, was_visible); + // new_fragments.push(fragment.clone(), &None); + + // fragments_cursor.next(&None); + // if let Some(split_id) = insertion_splits.next() { + // let slice = fragments_cursor.slice( + // &FragmentIdRef::new(split_id), + // SeekBias::Left, + // &None, + // ); + // new_ropes.push_tree(slice.summary().text); + // new_fragments.push_tree(slice, &None); + // } else { + // break; + // } + // } + // } else { + // new_fragments = fragments_cursor.slice( + // &FragmentIdRef::new(&edit.), + // SeekBias::Left, + // &None, + // ); + // new_ropes.push_tree(new_fragments.summary().text); + + // while let Some(fragment) = fragments_cursor.item() { + // if fragment.id > end_fragment_id { + // break; + // } else { + // let mut fragment = fragment.clone(); + // let fragment_was_visible = fragment.visible; + // if edit.version_in_range.observed(fragment.insertion.id) + // || fragment.insertion.id == undo.edit_id + // { + // fragment.visible = fragment.is_visible(&self.undo_map); + // fragment.max_undos.observe(undo.id); + // } + + // new_ropes.push_fragment(&fragment, fragment_was_visible); + // new_fragments.push(fragment, &None); + // fragments_cursor.next(&None); + // } + // } + // } + + // new_fragments.push_tree(fragments_cursor.suffix(&None), &None); + // let (visible_text, deleted_text) = new_ropes.finish(); + // drop(fragments_cursor); + // self.fragments = new_fragments; + // self.visible_text = visible_text; + // self.deleted_text = deleted_text; + Ok(()) } - // { - // let mut new_fragments; - // let mut old_visible_text = Rope::new(); - // let mut old_deleted_text = Rope::new(); - // mem::swap(&mut old_visible_text, &mut self.visible_text); - // mem::swap(&mut old_deleted_text, &mut self.deleted_text); - // let mut new_ropes = - // RopeBuilder::new(old_visible_text.cursor(0), old_deleted_text.cursor(0)); - - // self.undo_map.insert(undo); - // let edit = &self.history.ops[&undo.edit_id]; - // let start_fragment_id = self.resolve_fragment_id(edit.start_id, edit.start_offset)?; - // let end_fragment_id = self.resolve_fragment_id(edit.end_id, edit.end_offset)?; - - // let mut fragments_cursor = self.fragments.cursor::(); - - // if edit.start_id == edit.end_id && edit.start_offset == edit.end_offset { - // let splits = &self.insertion_splits[&undo.edit_id]; - // let mut insertion_splits = splits.cursor::<(), ()>().map(|s| &s.fragment_id).peekable(); - - // let first_split_id = insertion_splits.next().unwrap(); - // new_fragments = - // fragments_cursor.slice(&FragmentIdRef::new(first_split_id), SeekBias::Left, &None); - // new_ropes.push_tree(new_fragments.summary().text); - - // loop { - // let mut fragment = fragments_cursor.item().unwrap().clone(); - // let was_visible = fragment.visible; - // fragment.visible = fragment.is_visible(&self.undo_map); - // fragment.max_undos.observe(undo.id); - - // new_ropes.push_fragment(&fragment, was_visible); - // new_fragments.push(fragment.clone(), &None); - - // fragments_cursor.next(&None); - // if let Some(split_id) = insertion_splits.next() { - // let slice = fragments_cursor.slice( - // &FragmentIdRef::new(split_id), - // SeekBias::Left, - // &None, - // ); - // new_ropes.push_tree(slice.summary().text); - // new_fragments.push_tree(slice, &None); - // } else { - // break; - // } - // } - // } else { - // new_fragments = fragments_cursor.slice( - // &FragmentIdRef::new(&start_fragment_id), - // SeekBias::Left, - // &None, - // ); - // new_ropes.push_tree(new_fragments.summary().text); - - // while let Some(fragment) = fragments_cursor.item() { - // if fragment.id > end_fragment_id { - // break; - // } else { - // let mut fragment = fragment.clone(); - // let fragment_was_visible = fragment.visible; - // if edit.version_in_range.observed(fragment.insertion.id) - // || fragment.insertion.id == undo.edit_id - // { - // fragment.visible = fragment.is_visible(&self.undo_map); - // fragment.max_undos.observe(undo.id); - // } - - // new_ropes.push_fragment(&fragment, fragment_was_visible); - // new_fragments.push(fragment, &None); - // fragments_cursor.next(&None); - // } - // } - // } - - // new_fragments.push_tree(fragments_cursor.suffix(&None), &None); - // let (visible_text, deleted_text) = new_ropes.finish(); - // drop(fragments_cursor); - - // self.fragments = new_fragments; - // self.visible_text = visible_text; - // self.deleted_text = deleted_text; - - // Ok(()) - // } - fn flush_deferred_ops(&mut self) -> Result<()> { self.deferred_replicas.clear(); let mut deferred_ops = Vec::new(); @@ -1588,14 +1556,6 @@ impl Buffer { let mut fragment_end = fragment_start + fragment.visible_len(); let fragment_was_visible = fragment.visible; - let old_split_tree = self - .insertion_splits - .remove(&fragment.insertion.id) - .unwrap(); - let mut splits_cursor = old_split_tree.cursor::(); - let mut new_split_tree = - splits_cursor.slice(&fragment.range_in_insertion.start, SeekBias::Right, &()); - // Find all splices that start or end within the current fragment. Then, split the // fragment and reassemble it in both trees accounting for the deleted and the newly // inserted text. @@ -1603,33 +1563,23 @@ impl Buffer { let range = cur_range.clone().unwrap(); if range.start > fragment_start { let mut prefix = fragment.clone(); - prefix.range_in_insertion.end = - prefix.range_in_insertion.start + (range.start - fragment_start); - prefix.id = - FragmentId::between(&new_fragments.last().unwrap().id, &fragment.id); - fragment.range_in_insertion.start = prefix.range_in_insertion.end; + prefix.len = range.start - fragment_start; + fragment.len -= prefix.len; new_ropes.push_fragment(&prefix, prefix.visible); new_fragments.push(prefix.clone(), &None); - new_split_tree.push( - InsertionSplit { - extent: prefix.range_in_insertion.end - prefix.range_in_insertion.start, - fragment_id: prefix.id, - }, - &(), - ); fragment_start = range.start; } if range.start == fragment_start { if let Some(new_text) = new_text.clone() { - let new_fragment = self.build_fragment_to_insert( - &new_fragments.last().unwrap(), - Some(&fragment), - &new_text, - edit_id, - lamport_timestamp, - ); + let new_fragment = Fragment { + len: new_text.len(), + insertion_id: edit_id, + deletions: Default::default(), + max_undos: Default::default(), + visible: true, + }; new_ropes.push_str(&new_text); new_fragments.push(new_fragment, &None); @@ -1639,25 +1589,14 @@ impl Buffer { if range.end < fragment_end { if range.end > fragment_start { let mut prefix = fragment.clone(); - prefix.range_in_insertion.end = - prefix.range_in_insertion.start + (range.end - fragment_start); - prefix.id = - FragmentId::between(&new_fragments.last().unwrap().id, &fragment.id); + prefix.len = range.end - fragment_start; if prefix.visible { prefix.deletions.insert(edit_id); prefix.visible = false; } - fragment.range_in_insertion.start = prefix.range_in_insertion.end; + fragment.len -= prefix.len; new_ropes.push_fragment(&prefix, fragment_was_visible); new_fragments.push(prefix.clone(), &None); - new_split_tree.push( - InsertionSplit { - extent: prefix.range_in_insertion.end - - prefix.range_in_insertion.start, - fragment_id: prefix.id, - }, - &(), - ); fragment_start = range.end; } } else { @@ -1676,20 +1615,6 @@ impl Buffer { break; } } - new_split_tree.push( - InsertionSplit { - extent: fragment.range_in_insertion.end - fragment.range_in_insertion.start, - fragment_id: fragment.id.clone(), - }, - &(), - ); - splits_cursor.next(&()); - new_split_tree.push_tree( - splits_cursor.slice(&old_split_tree.extent::(&()), SeekBias::Right, &()), - &(), - ); - self.insertion_splits - .insert(fragment.insertion.id, new_split_tree); new_ropes.push_fragment(&fragment, fragment_was_visible); new_fragments.push(fragment, &None); @@ -1740,16 +1665,15 @@ impl Buffer { // multiple because ranges must be disjoint. if cur_range.is_some() { debug_assert_eq!(old_ranges.next(), None); - let last_fragment = new_fragments.last().unwrap(); if let Some(new_text) = new_text { - let new_fragment = self.build_fragment_to_insert( - &last_fragment, - None, - &new_text, - edit_id, - lamport_timestamp, - ); + let new_fragment = Fragment { + len: new_text.len(), + insertion_id: edit_id, + deletions: Default::default(), + max_undos: Default::default(), + visible: true, + }; new_ropes.push_str(&new_text); new_fragments.push(new_fragment, &None); @@ -1764,44 +1688,6 @@ impl Buffer { self.deleted_text = deleted_text; } - fn build_fragment_to_insert( - &mut self, - prev_fragment: &Fragment, - next_fragment: Option<&Fragment>, - text: &str, - insertion_id: time::Local, - lamport_timestamp: time::Lamport, - ) -> Fragment { - let new_fragment_id = FragmentId::between( - &prev_fragment.id, - next_fragment - .map(|f| &f.id) - .unwrap_or(&FragmentId::max_value()), - ); - - let range_in_insertion = 0..text.len(); - let mut split_tree = SumTree::new(); - split_tree.push( - InsertionSplit { - extent: range_in_insertion.len(), - fragment_id: new_fragment_id.clone(), - }, - &(), - ); - self.insertion_splits.insert(insertion_id, split_tree); - - Fragment::new( - new_fragment_id, - Arc::new(Insertion { - id: insertion_id, - parent_id: prev_fragment.insertion.id, - offset_in_parent: prev_fragment.range_in_insertion.end, - lamport_timestamp, - }), - range_in_insertion, - ) - } - pub fn anchor_before(&self, position: T) -> Anchor { self.anchor_at(position, AnchorBias::Left) } @@ -1905,7 +1791,6 @@ impl Clone for Buffer { fragments: self.fragments.clone(), visible_text: self.visible_text.clone(), deleted_text: self.deleted_text.clone(), - insertion_splits: self.insertion_splits.clone(), version: self.version.clone(), saved_version: self.saved_version.clone(), saved_mtime: self.saved_mtime, @@ -2015,7 +1900,7 @@ impl<'a> RopeBuilder<'a> { } fn push_fragment(&mut self, fragment: &Fragment, was_visible: bool) { - self.push(fragment.len(), was_visible, fragment.visible) + self.push(fragment.len, was_visible, fragment.visible) } fn push(&mut self, len: usize, was_visible: bool, is_visible: bool) { @@ -2071,38 +1956,38 @@ impl<'a, F: Fn(&FragmentSummary) -> bool> Iterator for Edits<'a, F> { if !fragment.was_visible(&self.since, &self.undos) && fragment.visible { if let Some(ref mut change) = change { if change.new_range.end == new_offset { - change.new_range.end += fragment.len(); - self.delta += fragment.len() as isize; + change.new_range.end += fragment.len; + self.delta += fragment.len as isize; } else { break; } } else { change = Some(Edit { old_range: old_offset..old_offset, - new_range: new_offset..new_offset + fragment.len(), + new_range: new_offset..new_offset + fragment.len, old_lines: Point::zero(), }); - self.delta += fragment.len() as isize; + self.delta += fragment.len as isize; } } else if fragment.was_visible(&self.since, &self.undos) && !fragment.visible { let deleted_start = self.cursor.start().deleted; - let old_lines = self.deleted_text.to_point(deleted_start + fragment.len()) + let old_lines = self.deleted_text.to_point(deleted_start + fragment.len) - self.deleted_text.to_point(deleted_start); if let Some(ref mut change) = change { if change.new_range.end == new_offset { - change.old_range.end += fragment.len(); + change.old_range.end += fragment.len; change.old_lines += &old_lines; - self.delta -= fragment.len() as isize; + self.delta -= fragment.len as isize; } else { break; } } else { change = Some(Edit { - old_range: old_offset..old_offset + fragment.len(), + old_range: old_offset..old_offset + fragment.len, new_range: new_offset..new_offset, old_lines, }); - self.delta -= fragment.len() as isize; + self.delta -= fragment.len as isize; } } @@ -2233,85 +2118,22 @@ impl<'a> Iterator for HighlightedChunks<'a> { } } -#[derive(Ord, PartialOrd, Eq, PartialEq, Clone, Debug)] -struct FragmentId(Arc<[u16]>); - -lazy_static! { - static ref FRAGMENT_ID_EMPTY: FragmentId = FragmentId(Arc::from([])); - static ref FRAGMENT_ID_MIN_VALUE: FragmentId = FragmentId(Arc::from([0 as u16])); - static ref FRAGMENT_ID_MAX_VALUE: FragmentId = FragmentId(Arc::from([u16::max_value()])); -} - -impl Default for FragmentId { - fn default() -> Self { - FRAGMENT_ID_EMPTY.clone() - } -} - -impl FragmentId { - fn min_value() -> &'static Self { - &FRAGMENT_ID_MIN_VALUE - } - - fn max_value() -> &'static Self { - &FRAGMENT_ID_MAX_VALUE - } - - fn between(left: &Self, right: &Self) -> Self { - Self::between_with_max(left, right, u16::max_value()) - } - - fn between_with_max(left: &Self, right: &Self, max_value: u16) -> Self { - let mut new_entries = Vec::new(); - - let left_entries = left.0.iter().cloned().chain(iter::repeat(0)); - let right_entries = right.0.iter().cloned().chain(iter::repeat(max_value)); - for (l, r) in left_entries.zip(right_entries) { - let interval = r - l; - if interval > 1 { - new_entries.push(l + cmp::max(1, cmp::min(8, interval / 2))); - break; - } else { - new_entries.push(l); - } - } - - FragmentId(Arc::from(new_entries)) - } -} - impl Fragment { - fn new(id: FragmentId, insertion: Arc, range_in_insertion: Range) -> Self { - Self { - id, - insertion, - len: range_in_insertion.len(), - range_in_insertion, - deletions: Default::default(), - max_undos: Default::default(), - visible: true, - } - } - fn is_visible(&self, undos: &UndoMap) -> bool { - !undos.is_undone(self.insertion.id) && self.deletions.iter().all(|d| undos.is_undone(*d)) + !undos.is_undone(self.insertion_id) && self.deletions.iter().all(|d| undos.is_undone(*d)) } fn was_visible(&self, version: &time::Global, undos: &UndoMap) -> bool { - (version.observed(self.insertion.id) && !undos.was_undone(self.insertion.id, version)) + (version.observed(self.insertion_id) && !undos.was_undone(self.insertion_id, version)) && self .deletions .iter() .all(|d| !version.observed(*d) || undos.was_undone(*d, version)) } - fn len(&self) -> usize { - self.range_in_insertion.len() - } - fn visible_len(&self) -> usize { if self.visible { - self.range_in_insertion.len() + self.len } else { 0 } @@ -2323,22 +2145,21 @@ impl sum_tree::Item for Fragment { fn summary(&self) -> Self::Summary { let mut max_version = time::Global::new(); - max_version.observe(self.insertion.id); + max_version.observe(self.insertion_id); for deletion in &self.deletions { max_version.observe(*deletion); } max_version.join(&self.max_undos); let mut min_insertion_version = time::Global::new(); - min_insertion_version.observe(self.insertion.id); + min_insertion_version.observe(self.insertion_id); let max_insertion_version = min_insertion_version.clone(); if self.visible { FragmentSummary { text: FragmentTextSummary { - visible: self.len(), + visible: self.len, deleted: 0, }, - max_fragment_id: self.id.clone(), max_version, min_insertion_version, max_insertion_version, @@ -2347,9 +2168,8 @@ impl sum_tree::Item for Fragment { FragmentSummary { text: FragmentTextSummary { visible: 0, - deleted: self.len(), + deleted: self.len, }, - max_fragment_id: self.id.clone(), max_version, min_insertion_version, max_insertion_version, @@ -2364,8 +2184,6 @@ impl sum_tree::Summary for FragmentSummary { fn add_summary(&mut self, other: &Self, _: &Self::Context) { self.text.visible += &other.text.visible; self.text.deleted += &other.text.deleted; - debug_assert!(self.max_fragment_id <= other.max_fragment_id); - self.max_fragment_id = other.max_fragment_id.clone(); self.max_version.join(&other.max_version); self.min_insertion_version .meet(&other.min_insertion_version); @@ -2378,7 +2196,6 @@ impl Default for FragmentSummary { fn default() -> Self { FragmentSummary { text: FragmentTextSummary::default(), - max_fragment_id: FragmentId::min_value().clone(), max_version: time::Global::new(), min_insertion_version: time::Global::new(), max_insertion_version: time::Global::new(), @@ -2392,36 +2209,6 @@ impl<'a> sum_tree::Dimension<'a, FragmentSummary> for usize { } } -impl sum_tree::Item for InsertionSplit { - type Summary = InsertionSplitSummary; - - fn summary(&self) -> Self::Summary { - InsertionSplitSummary { - extent: self.extent, - } - } -} - -impl sum_tree::Summary for InsertionSplitSummary { - type Context = (); - - fn add_summary(&mut self, other: &Self, _: &()) { - self.extent += other.extent; - } -} - -impl Default for InsertionSplitSummary { - fn default() -> Self { - InsertionSplitSummary { extent: 0 } - } -} - -impl<'a> sum_tree::Dimension<'a, InsertionSplitSummary> for usize { - fn add_summary(&mut self, summary: &InsertionSplitSummary, _: &()) { - *self += summary.extent; - } -} - #[derive(Copy, Clone, Debug, Eq, PartialEq)] enum VersionedOffset { Offset(usize), @@ -2847,26 +2634,6 @@ mod tests { }); } - #[test] - fn test_fragment_ids() { - for seed in 0..10 { - let rng = &mut StdRng::seed_from_u64(seed); - - let mut ids = vec![FragmentId(Arc::from([0])), FragmentId(Arc::from([4]))]; - for _i in 0..100 { - let index = rng.gen_range(1..ids.len()); - - let left = ids[index - 1].clone(); - let right = ids[index].clone(); - ids.insert(index, FragmentId::between_with_max(&left, &right, 4)); - - let mut sorted_ids = ids.clone(); - sorted_ids.sort(); - assert_eq!(ids, sorted_ids); - } - } - } - #[gpui::test] fn test_anchors(cx: &mut gpui::MutableAppContext) { cx.add_model(|cx| { From 11a3b8c5ce517c3a22ff9f5eecd079c7072e34dd Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Wed, 2 Jun 2021 12:13:08 +0200 Subject: [PATCH 12/40] Don't increment clock for initial insertion --- zed/src/editor/buffer.rs | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index cb0b68c2a7e50c71349ef6b7bed78a9451c2edea..f9e061030f3b93b74b1a9f56b0747fa89dc38485 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -462,12 +462,10 @@ impl Buffer { let mut fragments = SumTree::new(); let visible_text = Rope::from(history.base_text.as_ref()); - let mut local_clock = time::Local::new(replica_id); - if visible_text.len() > 0 { fragments.push( Fragment { - insertion_id: local_clock.tick(), + insertion_id: Default::default(), len: visible_text.len(), deletions: Default::default(), max_undos: Default::default(), @@ -496,7 +494,7 @@ impl Buffer { deferred_ops: OperationQueue::new(), deferred_replicas: HashSet::default(), replica_id, - local_clock, + local_clock: time::Local::new(replica_id), lamport_clock: time::Lamport::new(replica_id), }; result.reparse(cx); From 84fe7f50ac3bbb2ccf6e7e800fe829de06bab266 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Wed, 2 Jun 2021 12:24:00 +0200 Subject: [PATCH 13/40] Speed up anchor comparison when the version is the same --- zed/src/editor/buffer/anchor.rs | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/zed/src/editor/buffer/anchor.rs b/zed/src/editor/buffer/anchor.rs index 1f44eb65f0fc81d17f6a5e08614660a768ffa269..145330416c183d1141871fe916f7388b5ad4f1fb 100644 --- a/zed/src/editor/buffer/anchor.rs +++ b/zed/src/editor/buffer/anchor.rs @@ -61,15 +61,26 @@ impl Anchor { (Anchor::End, _) | (_, Anchor::Start) => Ordering::Greater, ( Anchor::Middle { - bias: self_bias, .. + offset: self_offset, + bias: self_bias, + version: self_version, }, Anchor::Middle { - bias: other_bias, .. + offset: other_offset, + bias: other_bias, + version: other_version, }, - ) => buffer - .full_offset_for_anchor(self) - .cmp(&buffer.full_offset_for_anchor(other)) - .then_with(|| self_bias.cmp(&other_bias)), + ) => { + let offset_comparison = if self_version == other_version { + self_offset.cmp(other_offset) + } else { + buffer + .full_offset_for_anchor(self) + .cmp(&buffer.full_offset_for_anchor(other)) + }; + + offset_comparison.then_with(|| self_bias.cmp(&other_bias)) + } }) } From 60a1d47c96c893a4e2c56fd5bdb72bd707d145fb Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 2 Jun 2021 12:09:16 -0700 Subject: [PATCH 14/40] Generalize Dimension for tuples --- zed/src/editor/buffer.rs | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index f9e061030f3b93b74b1a9f56b0747fa89dc38485..5657b54a3f8c3368fd3fbe911718760740b5130d 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -2258,17 +2258,14 @@ impl<'a> sum_tree::SeekDimension<'a, FragmentSummary> for VersionedOffset { } } -impl<'a> sum_tree::Dimension<'a, FragmentSummary> for (VersionedOffset, usize) { +impl<'a, T, U> sum_tree::Dimension<'a, FragmentSummary> for (T, U) +where + T: sum_tree::Dimension<'a, FragmentSummary>, + U: sum_tree::Dimension<'a, FragmentSummary>, +{ fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option) { self.0.add_summary(summary, cx); - self.1 += summary.text.visible; - } -} - -impl<'a> sum_tree::Dimension<'a, FragmentSummary> for (VersionedOffset, FullOffset) { - fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option) { - self.0.add_summary(summary, cx); - self.1 .0 += summary.text.visible + summary.text.deleted; + self.1.add_summary(summary, cx); } } From d83a046911ff4d8d3a05044b2834728da993c774 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 2 Jun 2021 13:51:52 -0700 Subject: [PATCH 15/40] Avoid calling SumTree::extent in Cursor::suffix --- zed/src/sum_tree/cursor.rs | 31 +++++++++++++++++-------------- 1 file changed, 17 insertions(+), 14 deletions(-) diff --git a/zed/src/sum_tree/cursor.rs b/zed/src/sum_tree/cursor.rs index 7016a79c9cb285f415ce8b52b564868be7e46671..18c86b03387c56a971fe826de727ad44769261ac 100644 --- a/zed/src/sum_tree/cursor.rs +++ b/zed/src/sum_tree/cursor.rs @@ -348,7 +348,7 @@ where { pub fn seek(&mut self, pos: &S, bias: SeekBias, cx: &::Context) -> bool { self.reset(); - self.seek_internal::<()>(pos, bias, &mut SeekAggregate::None, cx) + self.seek_internal::<()>(Some(pos), bias, &mut SeekAggregate::None, cx) } pub fn seek_forward( @@ -357,7 +357,7 @@ where bias: SeekBias, cx: &::Context, ) -> bool { - self.seek_internal::<()>(pos, bias, &mut SeekAggregate::None, cx) + self.seek_internal::<()>(Some(pos), bias, &mut SeekAggregate::None, cx) } pub fn slice( @@ -367,7 +367,7 @@ where cx: &::Context, ) -> SumTree { let mut slice = SeekAggregate::Slice(SumTree::new()); - self.seek_internal::<()>(end, bias, &mut slice, cx); + self.seek_internal::<()>(Some(end), bias, &mut slice, cx); if let SeekAggregate::Slice(slice) = slice { slice } else { @@ -376,9 +376,8 @@ where } pub fn suffix(&mut self, cx: &::Context) -> SumTree { - let extent = self.tree.extent::(cx); let mut slice = SeekAggregate::Slice(SumTree::new()); - self.seek_internal::<()>(&extent, SeekBias::Right, &mut slice, cx); + self.seek_internal::<()>(None, SeekBias::Right, &mut slice, cx); if let SeekAggregate::Slice(slice) = slice { slice } else { @@ -396,7 +395,7 @@ where D: Dimension<'a, T::Summary>, { let mut summary = SeekAggregate::Summary(D::default()); - self.seek_internal(end, bias, &mut summary, cx); + self.seek_internal(Some(end), bias, &mut summary, cx); if let SeekAggregate::Summary(summary) = summary { summary } else { @@ -406,7 +405,7 @@ where fn seek_internal( &mut self, - target: &S, + target: Option<&S>, bias: SeekBias, aggregate: &mut SeekAggregate, cx: &::Context, @@ -414,7 +413,9 @@ where where D: Dimension<'a, T::Summary>, { - debug_assert!(target.cmp(&self.seek_dimension, cx) >= Ordering::Equal); + if let Some(target) = target { + debug_assert!(target.cmp(&self.seek_dimension, cx) >= Ordering::Equal); + } if !self.did_seek { self.did_seek = true; @@ -445,7 +446,8 @@ where let mut child_end = self.seek_dimension.clone(); child_end.add_summary(&child_summary, cx); - let comparison = target.cmp(&child_end, cx); + let comparison = + target.map_or(Ordering::Greater, |t| t.cmp(&child_end, cx)); if comparison == Ordering::Greater || (comparison == Ordering::Equal && bias == SeekBias::Right) { @@ -494,7 +496,8 @@ where let mut child_end = self.seek_dimension.clone(); child_end.add_summary(item_summary, cx); - let comparison = target.cmp(&child_end, cx); + let comparison = + target.map_or(Ordering::Greater, |t| t.cmp(&child_end, cx)); if comparison == Ordering::Greater || (comparison == Ordering::Equal && bias == SeekBias::Right) { @@ -551,15 +554,15 @@ where self.at_end = self.stack.is_empty(); debug_assert!(self.stack.is_empty() || self.stack.last().unwrap().tree.0.is_leaf()); + + let mut end = self.seek_dimension.clone(); if bias == SeekBias::Left { - let mut end = self.seek_dimension.clone(); if let Some(summary) = self.item_summary() { end.add_summary(summary, cx); } - target.cmp(&end, cx) == Ordering::Equal - } else { - target.cmp(&self.seek_dimension, cx) == Ordering::Equal } + + target.map_or(false, |t| t.cmp(&end, cx) == Ordering::Equal) } } From 657b0affd504c7b528b72c775f8ff50a69bba85c Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 2 Jun 2021 14:43:17 -0700 Subject: [PATCH 16/40] Avoid unnecessarily adding internal summaries in Cursor::next --- zed/src/sum_tree/cursor.rs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/zed/src/sum_tree/cursor.rs b/zed/src/sum_tree/cursor.rs index 18c86b03387c56a971fe826de727ad44769261ac..5e1624aa877c718c172c520b1c2108db5683a5ce 100644 --- a/zed/src/sum_tree/cursor.rs +++ b/zed/src/sum_tree/cursor.rs @@ -229,9 +229,8 @@ where .. } => { if !descend { - let summary = &child_summaries[entry.index]; - entry.seek_dimension.add_summary(summary, cx); - entry.sum_dimension.add_summary(summary, cx); + entry.seek_dimension = self.seek_dimension.clone(); + entry.sum_dimension = self.sum_dimension.clone(); entry.index += 1; } From 01cfba0f8ea300e8edd21a6eac2c2d656c52161b Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 2 Jun 2021 14:47:22 -0700 Subject: [PATCH 17/40] Compute full ranges for edit operation inside of Buffer::splice_fragments --- zed/src/editor/buffer.rs | 61 +++++++++++++++++++++++++--------------- 1 file changed, 39 insertions(+), 22 deletions(-) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index 5657b54a3f8c3368fd3fbe911718760740b5130d..0ab475b009b244d6d53776e3b5cea81b51255608 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -933,14 +933,8 @@ impl Buffer { let edit_id = self.local_clock.tick(); let lamport_timestamp = self.lamport_clock.tick(); - self.splice_fragments(&ranges, new_text.clone(), edit_id, lamport_timestamp); + let edit = self.splice_fragments(&ranges, new_text, edit_id, lamport_timestamp); - let edit = EditOperation { - id: edit_id, - version: self.version.clone(), - ranges, - new_text, - }; self.history.push(edit.clone()); self.history.push_undo(edit.id); self.last_edit = edit.id; @@ -1529,18 +1523,25 @@ impl Buffer { new_text: Option, edit_id: time::Local, lamport_timestamp: time::Lamport, - ) { + ) -> EditOperation { + let mut edit = EditOperation { + id: edit_id, + version: self.version.clone(), + ranges: Vec::with_capacity(old_ranges.len()), + new_text: new_text.clone(), + }; + let mut old_ranges = old_ranges.iter(); let mut cur_range = old_ranges.next(); if cur_range.is_none() { - return; + return edit; } let old_fragments = mem::take(&mut self.fragments); let old_visible_text = mem::take(&mut self.visible_text); let old_deleted_text = mem::take(&mut self.deleted_text); - let mut fragments_cursor = old_fragments.cursor::(); + let mut fragments_cursor = old_fragments.cursor::(); let mut new_fragments = fragments_cursor.slice(&cur_range.as_ref().unwrap().start, SeekBias::Right, &None); @@ -1550,8 +1551,9 @@ impl Buffer { while cur_range.is_some() && fragments_cursor.item().is_some() { let mut fragment = fragments_cursor.item().unwrap().clone(); - let mut fragment_start = *fragments_cursor.start(); + let mut fragment_start = fragments_cursor.start().0; let mut fragment_end = fragment_start + fragment.visible_len(); + let mut full_range = 0..0; let fragment_was_visible = fragment.visible; // Find all splices that start or end within the current fragment. Then, split the @@ -1559,17 +1561,21 @@ impl Buffer { // inserted text. while cur_range.as_ref().map_or(false, |r| r.start < fragment_end) { let range = cur_range.clone().unwrap(); - if range.start > fragment_start { - let mut prefix = fragment.clone(); - prefix.len = range.start - fragment_start; - fragment.len -= prefix.len; - - new_ropes.push_fragment(&prefix, prefix.visible); - new_fragments.push(prefix.clone(), &None); - fragment_start = range.start; - } - if range.start == fragment_start { + if range.start >= fragment_start { + full_range.start = + fragments_cursor.start().1 .0 + (range.start - fragments_cursor.start().0); + + if range.start > fragment_start { + let mut prefix = fragment.clone(); + prefix.len = range.start - fragment_start; + fragment.len -= prefix.len; + + new_ropes.push_fragment(&prefix, prefix.visible); + new_fragments.push(prefix.clone(), &None); + fragment_start = range.start; + } + if let Some(new_text) = new_text.clone() { let new_fragment = Fragment { len: new_text.len(), @@ -1608,6 +1614,9 @@ impl Buffer { // check if it also intersects the current fragment. Otherwise we break out of the // loop and find the first fragment that the splice does not contain fully. if range.end <= fragment_end { + full_range.end = + fragments_cursor.start().1 .0 + (range.end - fragments_cursor.start().0); + edit.ranges.push(full_range.clone()); cur_range = old_ranges.next(); } else { break; @@ -1622,7 +1631,10 @@ impl Buffer { if let Some(range) = cur_range.clone() { while let Some(fragment) = fragments_cursor.item() { let fragment_was_visible = fragment.visible; - fragment_start = *fragments_cursor.start(); + fragment_start = fragments_cursor.start().0; + full_range.end = + fragments_cursor.start().1 .0 + (range.end - fragments_cursor.start().0); + fragment_end = fragment_start + fragment.visible_len(); if range.start < fragment_start && range.end >= fragment_end { let mut new_fragment = fragment.clone(); @@ -1636,6 +1648,7 @@ impl Buffer { fragments_cursor.next(&None); if range.end == fragment_end { + edit.ranges.push(full_range.clone()); cur_range = old_ranges.next(); break; } @@ -1664,6 +1677,9 @@ impl Buffer { if cur_range.is_some() { debug_assert_eq!(old_ranges.next(), None); + let full_offset = fragments_cursor.end(&None).1 .0; + edit.ranges.push(full_offset..full_offset); + if let Some(new_text) = new_text { let new_fragment = Fragment { len: new_text.len(), @@ -1684,6 +1700,7 @@ impl Buffer { self.fragments = new_fragments; self.visible_text = visible_text; self.deleted_text = deleted_text; + edit } pub fn anchor_before(&self, position: T) -> Anchor { From 2c8d5973f39c64a08b81483992928933a4f8fa02 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 2 Jun 2021 15:08:29 -0700 Subject: [PATCH 18/40] Add a hand-written error message for the `seek_internal` precondition --- zed/src/sum_tree/cursor.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/zed/src/sum_tree/cursor.rs b/zed/src/sum_tree/cursor.rs index 5e1624aa877c718c172c520b1c2108db5683a5ce..649b3ad28a8b0f77ba94f4f90d8d47ec13821f0a 100644 --- a/zed/src/sum_tree/cursor.rs +++ b/zed/src/sum_tree/cursor.rs @@ -413,7 +413,12 @@ where D: Dimension<'a, T::Summary>, { if let Some(target) = target { - debug_assert!(target.cmp(&self.seek_dimension, cx) >= Ordering::Equal); + debug_assert!( + target.cmp(&self.seek_dimension, cx) >= Ordering::Equal, + "cannot seek backward from {:?} to {:?}", + self.seek_dimension, + target + ); } if !self.did_seek { From bef93b319c3f0e8c07d061699d429d82e62d5fb6 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 2 Jun 2021 15:15:37 -0700 Subject: [PATCH 19/40] Get undo/redo tests passing --- zed/src/editor/buffer.rs | 118 ++++++++++----------------------------- 1 file changed, 30 insertions(+), 88 deletions(-) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index 0ab475b009b244d6d53776e3b5cea81b51255608..bda48535d780bf95c5aaf55008a48a2f9af32df7 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -1346,44 +1346,55 @@ impl Buffer { } fn apply_undo(&mut self, undo: UndoOperation) -> Result<()> { + self.undo_map.insert(undo); + let edit = &self.history.ops[&undo.edit_id]; + let version = Some(edit.version.clone()); + let mut old_fragments = self.fragments.cursor::(); + old_fragments.seek(&VersionedOffset::Offset(0), SeekBias::Left, &version); let mut new_fragments = SumTree::new(); let mut new_ropes = RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); - self.undo_map.insert(undo); - let edit = &self.history.ops[&undo.edit_id]; + let mut version_after_edit = edit.version.clone(); + version_after_edit.observe(edit.id); - let version = Some(edit.version.clone()); for range in &edit.ranges { - let preceding_fragments = old_fragments.slice( - &VersionedOffset::Offset(range.start), - SeekBias::Right, - &version, - ); - new_ropes.push_tree(preceding_fragments.summary().text); - new_fragments.push_tree(preceding_fragments, &None); + let mut end_offset = old_fragments.end(&version).offset(); - while old_fragments.end(&version).offset() < range.end { + if end_offset < range.start { + let preceding_fragments = old_fragments.slice( + &VersionedOffset::Offset(range.start), + SeekBias::Left, + &version, + ); + new_ropes.push_tree(preceding_fragments.summary().text); + new_fragments.push_tree(preceding_fragments, &None); + } + + while end_offset <= range.end { if let Some(fragment) = old_fragments.item() { let mut fragment = fragment.clone(); let fragment_was_visible = fragment.visible; - if edit.version.observed(fragment.insertion_id) { + if version_after_edit.observed(fragment.insertion_id) { fragment.visible = fragment.is_visible(&self.undo_map); fragment.max_undos.observe(undo.id); } new_ropes.push_fragment(&fragment, fragment_was_visible); new_fragments.push(fragment, &None); + old_fragments.next(&version); + end_offset = old_fragments.end(&version).offset(); + // Skip over any fragments that were not present when the edit occurred. - let newer_fragments = old_fragments.slice( - &old_fragments.end(&version), - SeekBias::Right, - &version, - ); - new_ropes.push_tree(newer_fragments.summary().text); - new_fragments.push_tree(newer_fragments, &None); + // let newer_fragments = old_fragments.slice( + // &old_fragments.end(&version), + // SeekBias::Right, + // &version, + // ); + // new_ropes.push_tree(newer_fragments.summary().text); + // new_fragments.push_tree(newer_fragments, &None); } else { break; } @@ -1399,73 +1410,6 @@ impl Buffer { self.fragments = new_fragments; self.visible_text = visible_text; self.deleted_text = deleted_text; - - // if edit.start_id == edit.end_id && edit.start_offset == edit.end_offset { - // let splits = &self.insertion_splits[&undo.edit_id]; - // let mut insertion_splits = splits.cursor::<(), ()>().map(|s| &s.fragment_id).peekable(); - - // let first_split_id = insertion_splits.next().unwrap(); - // new_fragments = - // fragments_cursor.slice(&FragmentIdRef::new(first_split_id), SeekBias::Left, &None); - // new_ropes.push_tree(new_fragments.summary().text); - - // loop { - // let mut fragment = fragments_cursor.item().unwrap().clone(); - // let was_visible = fragment.visible; - // fragment.visible = fragment.is_visible(&self.undo_map); - // fragment.max_undos.observe(undo.id); - - // new_ropes.push_fragment(&fragment, was_visible); - // new_fragments.push(fragment.clone(), &None); - - // fragments_cursor.next(&None); - // if let Some(split_id) = insertion_splits.next() { - // let slice = fragments_cursor.slice( - // &FragmentIdRef::new(split_id), - // SeekBias::Left, - // &None, - // ); - // new_ropes.push_tree(slice.summary().text); - // new_fragments.push_tree(slice, &None); - // } else { - // break; - // } - // } - // } else { - // new_fragments = fragments_cursor.slice( - // &FragmentIdRef::new(&edit.), - // SeekBias::Left, - // &None, - // ); - // new_ropes.push_tree(new_fragments.summary().text); - - // while let Some(fragment) = fragments_cursor.item() { - // if fragment.id > end_fragment_id { - // break; - // } else { - // let mut fragment = fragment.clone(); - // let fragment_was_visible = fragment.visible; - // if edit.version_in_range.observed(fragment.insertion.id) - // || fragment.insertion.id == undo.edit_id - // { - // fragment.visible = fragment.is_visible(&self.undo_map); - // fragment.max_undos.observe(undo.id); - // } - - // new_ropes.push_fragment(&fragment, fragment_was_visible); - // new_fragments.push(fragment, &None); - // fragments_cursor.next(&None); - // } - // } - // } - - // new_fragments.push_tree(fragments_cursor.suffix(&None), &None); - // let (visible_text, deleted_text) = new_ropes.finish(); - // drop(fragments_cursor); - // self.fragments = new_fragments; - // self.visible_text = visible_text; - // self.deleted_text = deleted_text; - Ok(()) } @@ -2259,8 +2203,6 @@ impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedOffset { { *self = Self::InvalidVersion; } - } else { - unreachable!(); } } } From e021154852104848794b0734367a50e5bd1baeab Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 2 Jun 2021 17:47:02 -0700 Subject: [PATCH 20/40] Add logging in random concurrent edits test --- zed/src/editor/buffer.rs | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index bda48535d780bf95c5aaf55008a48a2f9af32df7..c8735e9b09478a5fac39091ef5321d0290d8a357 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -3131,9 +3131,13 @@ mod tests { mutation_count -= 1; } 71..=100 if network.has_unreceived(replica_id) => { - buffer - .apply_ops(network.receive(replica_id, &mut rng), None) - .unwrap(); + let ops = network.receive(replica_id, &mut rng); + log::info!( + "Peer {} applying {} ops from the network.", + replica_id, + ops.len() + ); + buffer.apply_ops(ops, None).unwrap(); } _ => {} }); @@ -3393,7 +3397,12 @@ mod tests { let (old_ranges, new_text, operation) = self.randomly_edit(rng, 5, cx.as_deref_mut()); let mut operations = vec![operation]; - log::info!("Mutating buffer at {:?}: {:?}", old_ranges, new_text); + log::info!( + "Mutating buffer {} at {:?}: {:?}", + self.replica_id, + old_ranges, + new_text + ); // Randomly add, remove or mutate selection sets. let replica_selection_sets = &self @@ -3428,6 +3437,7 @@ mod tests { let mut ops = Vec::new(); for _ in 0..rng.gen_range(1..5) { if let Some(edit_id) = self.history.ops.keys().choose(rng).copied() { + log::info!("Undoing buffer {} operation {:?}", self.replica_id, edit_id); ops.push(self.undo_or_redo(edit_id).unwrap()); } } From 68994248ee31adb11a0d5844c92620b89ae5bc09 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 2 Jun 2021 17:47:25 -0700 Subject: [PATCH 21/40] Add custom error message for rope cursor slice precondition --- zed/src/editor/buffer/rope.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/zed/src/editor/buffer/rope.rs b/zed/src/editor/buffer/rope.rs index 05b30e389604504f21172894d26cbc73782ec23c..1b5d8fb8965266fb8fa330234e118463307a383d 100644 --- a/zed/src/editor/buffer/rope.rs +++ b/zed/src/editor/buffer/rope.rs @@ -213,7 +213,12 @@ impl<'a> Cursor<'a> { } pub fn slice(&mut self, end_offset: usize) -> Rope { - debug_assert!(end_offset >= self.offset); + debug_assert!( + end_offset >= self.offset, + "cannot slice backwards from {} to {}", + self.offset, + end_offset + ); let mut slice = Rope::new(); if let Some(start_chunk) = self.chunks.item() { From 2ea89695072f5de86880f0611fb2b449a6ea8143 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 2 Jun 2021 20:09:53 -0700 Subject: [PATCH 22/40] Add a simple unit test for applying remote edit operations --- zed/src/editor/buffer.rs | 42 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index c8735e9b09478a5fac39091ef5321d0290d8a357..88f1f7fef3ea47edaf211fe4ce8c1473d4552857 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -3077,6 +3077,48 @@ mod tests { }); } + #[gpui::test] + fn test_concurrent_edits(cx: &mut gpui::MutableAppContext) { + let text = "abcdef"; + + let buffer1 = cx.add_model(|cx| Buffer::new(1, text, cx)); + let buffer2 = cx.add_model(|cx| Buffer::new(2, text, cx)); + let buffer3 = cx.add_model(|cx| Buffer::new(3, text, cx)); + + let buf1_op = buffer1.update(cx, |buffer, cx| { + let op = buffer.edit(vec![1..2], "12", Some(cx)).unwrap(); + assert_eq!(buffer.text(), "a12cdef"); + op + }); + let buf2_op = buffer2.update(cx, |buffer, cx| { + let op = buffer.edit(vec![3..4], "34", Some(cx)).unwrap(); + assert_eq!(buffer.text(), "abc34ef"); + op + }); + let buf3_op = buffer3.update(cx, |buffer, cx| { + let op = buffer.edit(vec![5..6], "56", Some(cx)).unwrap(); + assert_eq!(buffer.text(), "abcde56"); + op + }); + + buffer1.update(cx, |buffer, _| { + buffer.apply_op(buf2_op.clone()).unwrap(); + buffer.apply_op(buf3_op.clone()).unwrap(); + }); + buffer2.update(cx, |buffer, _| { + buffer.apply_op(buf1_op.clone()).unwrap(); + buffer.apply_op(buf3_op.clone()).unwrap(); + }); + buffer3.update(cx, |buffer, _| { + buffer.apply_op(buf1_op.clone()).unwrap(); + buffer.apply_op(buf2_op.clone()).unwrap(); + }); + + assert_eq!(buffer1.read(cx).text(), "a12c34e56"); + assert_eq!(buffer2.read(cx).text(), "a12c34e56"); + assert_eq!(buffer3.read(cx).text(), "a12c34e56"); + } + #[gpui::test] fn test_random_concurrent_edits(cx: &mut gpui::MutableAppContext) { use crate::test::Network; From 72464a946075bcdf65e4ecda943c9d4ace52952c Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 3 Jun 2021 18:40:01 +0200 Subject: [PATCH 23/40] WIP: Start on `apply_remote_edit` Co-Authored-By: Nathan Sobo --- zed/src/editor/buffer.rs | 314 ++++++++++++++++++--------------------- 1 file changed, 142 insertions(+), 172 deletions(-) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index 88f1f7fef3ea47edaf211fe4ce8c1473d4552857..99230baa0f28890ec91ac4d254969a821f085f80 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -338,6 +338,7 @@ pub struct Insertion { #[derive(Eq, PartialEq, Clone, Debug)] struct Fragment { insertion_id: time::Local, + lamport_timestamp: time::Lamport, len: usize, deletions: HashSet, max_undos: time::Global, @@ -466,6 +467,7 @@ impl Buffer { fragments.push( Fragment { insertion_id: Default::default(), + lamport_timestamp: Default::default(), len: visible_text.len(), deletions: Default::default(), max_undos: Default::default(), @@ -933,7 +935,7 @@ impl Buffer { let edit_id = self.local_clock.tick(); let lamport_timestamp = self.lamport_clock.tick(); - let edit = self.splice_fragments(&ranges, new_text, edit_id, lamport_timestamp); + let edit = self.apply_local_edit(&ranges, new_text, edit_id, lamport_timestamp); self.history.push(edit.clone()); self.history.push_undo(edit.id); @@ -1071,13 +1073,13 @@ impl Buffer { .. } => { if !self.version.observed(edit.id) { - self.apply_edit( + self.apply_remote_edit( &edit.version, &edit.ranges, edit.new_text.as_deref(), edit.id, lamport_timestamp, - )?; + ); self.version.observe(edit.id); self.history.push(edit); } @@ -1109,171 +1111,137 @@ impl Buffer { Ok(()) } - fn apply_edit( + fn apply_remote_edit( &mut self, version: &time::Global, ranges: &[Range], new_text: Option<&str>, local_timestamp: time::Local, lamport_timestamp: time::Lamport, - ) -> Result<()> { - let old_visible_text = mem::take(&mut self.visible_text); - let old_deleted_text = mem::take(&mut self.deleted_text); - let old_fragments = mem::take(&mut self.fragments); - let mut old_fragments = old_fragments.cursor::(); - let old_fragments_cx = Some(version.clone()); - - let mut new_fragments = SumTree::new(); - let mut new_ropes = - RopeBuilder::new(old_visible_text.cursor(0), old_deleted_text.cursor(0)); - let mut ranges = ranges.iter().peekable(); - let mut fragment_start_offset = 0; - - // Push the fragments that precede the first edit and park the cursor over the fragment - // containing the start of the first edit. - if let Some(first_range) = ranges.peek() { - let prefix_fragments = old_fragments.slice( - &VersionedOffset::Offset(first_range.start), - SeekBias::Right, - &old_fragments_cx, - ); - new_ropes.push_tree(prefix_fragments.summary().text); - new_fragments.push_tree(prefix_fragments, &None); - fragment_start_offset = old_fragments.start().offset(); + ) { + if ranges.is_empty() { + return; } - while let Some(range) = ranges.peek() { - let fragment = old_fragments.item(); - let fragment_end_offset = old_fragments.end(&old_fragments_cx).offset(); + let cx = Some(version.clone()); + let mut new_ropes = + RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); + let mut old_fragments = self.fragments.cursor::(); + let mut new_fragments = old_fragments.slice( + &VersionedOffset::Offset(ranges[0].start), + SeekBias::Left, + &cx, + ); + new_ropes.push_tree(new_fragments.summary().text); - if let Some(fragment) = fragment { - // Was this fragment visible in the edit's base version? If not, push it into - // the new fragments, skip it, and continue the loop. - if !version.observed(fragment.insertion_id) { - new_ropes.push_fragment(fragment, fragment.visible); - new_fragments.push(fragment.clone(), &None); - old_fragments.next(&old_fragments_cx); - continue; + let mut fragment_start = old_fragments.start().offset(); + for range in ranges { + if range.start > old_fragments.end(&cx).offset() { + if old_fragments.end(&cx).offset() > fragment_start { + let mut suffix = old_fragments.item().unwrap().clone(); + suffix.len = old_fragments.end(&cx).offset() - fragment_start; + new_ropes.push_fragment(&suffix, suffix.visible); + new_fragments.push(suffix, &None); + old_fragments.next(&cx); } - // If the current fragment doesn't intersect the current range, push the remainder - // of the fragment and then slice to the fragment containing the start of the - // current range. - if range.start > fragment_end_offset { - if fragment_end_offset > fragment_start_offset { - let suffix = Fragment { - len: fragment_end_offset - fragment_start_offset, - deletions: fragment.deletions.clone(), - max_undos: fragment.max_undos.clone(), - visible: fragment.visible, - insertion_id: fragment.insertion_id, - }; - new_ropes.push_fragment(&suffix, fragment.visible); - new_fragments.push(suffix, &None); - } + let slice = + old_fragments.slice(&VersionedOffset::Offset(range.start), SeekBias::Left, &cx); + new_ropes.push_tree(slice.summary().text); + new_fragments.push_tree(slice, &None); + fragment_start = old_fragments.start().offset(); + } - let prefix_fragments = old_fragments.slice( - &VersionedOffset::Offset(range.start), - SeekBias::Right, - &old_fragments_cx, - ); - new_ropes.push_tree(prefix_fragments.summary().text); - new_fragments.push_tree(prefix_fragments, &None); - fragment_start_offset = old_fragments.start().offset(); + // If we are at the end of a non-concurrent fragment, advance to the next one. + if let Some(fragment) = old_fragments.item() { + let fragment_end = old_fragments.end(&cx).offset(); + if range.start == fragment_end && fragment_end > fragment_start { + let mut fragment = fragment.clone(); + fragment.len = fragment_end - fragment_start; + new_ropes.push_fragment(&fragment, fragment.visible); + new_fragments.push(fragment, &None); + old_fragments.next(&cx); + fragment_start = old_fragments.start().offset(); } + } - // Now the current range intersects the current fragment. - // If there is a piece of the fragment preceding the current range, consume it. - if range.start > fragment_start_offset { - let prefix = Fragment { - len: range.start - fragment_start_offset, - deletions: fragment.deletions.clone(), - max_undos: fragment.max_undos.clone(), - visible: fragment.visible, - insertion_id: fragment.insertion_id, - }; - fragment_start_offset += prefix.len; - new_ropes.push_fragment(&prefix, fragment.visible); - new_fragments.push(prefix, &None); + // Skip over insertions that are concurrent to this edit, but have a lower lamport + // timestamp. + while let Some(fragment) = old_fragments.item() { + if range.start == fragment_start && fragment.lamport_timestamp > lamport_timestamp { + new_ropes.push_fragment(fragment, fragment.visible); + new_fragments.push(fragment.clone(), &None); + old_fragments.next(&cx); + debug_assert_eq!(fragment_start, range.start); + } else { + break; } + } + debug_assert!(fragment_start <= range.start); + + if range.start > fragment_start { + let mut prefix = old_fragments.item().unwrap().clone(); + prefix.len = range.start - fragment_start; + fragment_start = range.start; + new_ropes.push_fragment(&prefix, prefix.visible); + new_fragments.push(prefix, &None); + } - // Push the portion of the current fragment that intersects the current range, - // marking it as deleted. - if range.end > range.start { - let deleted_end = cmp::min(range.end, fragment_end_offset); - - let mut deletions = fragment.deletions.clone(); - deletions.insert(local_timestamp); - - let deleted = Fragment { - len: deleted_end - fragment_start_offset, - deletions, - max_undos: fragment.max_undos.clone(), - visible: false, - insertion_id: fragment.insertion_id, - }; - fragment_start_offset += deleted.len; - new_ropes.push_fragment(&deleted, fragment.visible); - new_fragments.push(deleted, &None); - } + if let Some(new_text) = new_text { + new_ropes.push_str(new_text); + new_fragments.push( + Fragment { + insertion_id: local_timestamp, + lamport_timestamp, + len: new_text.len(), + deletions: Default::default(), + max_undos: Default::default(), + visible: true, + }, + &None, + ); + } - // Push any new text - if let Some(new_next) = new_text { - new_ropes.push_str(new_next); - new_fragments.push( - Fragment { - len: new_next.len(), - deletions: Default::default(), - max_undos: Default::default(), // TODO: Is this right? - visible: true, - insertion_id: local_timestamp, - }, - &None, - ); + while range.end > fragment_start { + let fragment = old_fragments.item().unwrap(); + let fragment_end = old_fragments.end(&cx).offset(); + let mut intersection = fragment.clone(); + if intersection.was_visible(&version, &self.undo_map) { + let intersection_end = cmp::min(range.end, fragment_end); + intersection.len = intersection_end - fragment_start; + intersection.deletions.insert(local_timestamp); + intersection.visible = false; + fragment_start = intersection_end; } + new_ropes.push_fragment(&intersection, fragment.visible); + new_fragments.push(intersection, &None); - // Which ends first? The current fragment or the current range? If the current range - // ends before the current fragment, advance to the next range and preserve the - // current fragment. Otherwise, advance to next fragment and preserve the current - // range. - if range.end < fragment_end_offset { - ranges.next(); - } else { - old_fragments.next(&old_fragments_cx); - fragment_start_offset = fragment_end_offset; + if range.end >= fragment_end { + old_fragments.next(&cx); } - } else { - // Push a fragment containing the new text } } - if let Some(fragment) = old_fragments.item() { - let fragment_end_offset = old_fragments.end(&old_fragments_cx).offset(); - if fragment_end_offset > fragment_start_offset { - let suffix = Fragment { - len: fragment_end_offset - fragment_start_offset, - deletions: fragment.deletions.clone(), - max_undos: fragment.max_undos.clone(), - visible: fragment.visible, - insertion_id: fragment.insertion_id, - }; - new_ropes.push_fragment(&suffix, fragment.visible); - new_fragments.push(suffix, &None); - } - - let suffix_fragments = old_fragments.suffix(&old_fragments_cx); - new_ropes.push_tree(suffix_fragments.summary().text); - new_fragments.push_tree(suffix_fragments, &None); + let fragment_end = old_fragments.end(&cx).offset(); + if fragment_end > fragment_start { + let mut suffix = old_fragments.item().unwrap().clone(); + suffix.len = fragment_end - fragment_start; + new_ropes.push_fragment(&suffix, suffix.visible); + new_fragments.push(suffix, &None); + old_fragments.next(&cx); } + let suffix = old_fragments.suffix(&cx); + new_ropes.push_tree(suffix.summary().text); + new_fragments.push_tree(suffix, &None); let (visible_text, deleted_text) = new_ropes.finish(); + drop(old_fragments); self.fragments = new_fragments; self.visible_text = visible_text; self.deleted_text = deleted_text; self.local_clock.observe(local_timestamp); self.lamport_clock.observe(lamport_timestamp); - Ok(()) } pub fn undo(&mut self, mut cx: Option<&mut ModelContext>) -> Vec { @@ -1429,39 +1397,34 @@ impl Buffer { } fn can_apply_op(&self, op: &Operation) -> bool { - true - // if self.deferred_replicas.contains(&op.replica_id()) { - // false - // } else { - // match op { - // Operation::Edit { edit, .. } => { - // self.version.observed(edit.start_id) - // && self.version.observed(edit.end_id) - // && edit.version_in_range <= self.version - // } - // Operation::Undo { undo, .. } => self.version.observed(undo.edit_id), - // Operation::UpdateSelections { selections, .. } => { - // if let Some(selections) = selections { - // selections.iter().all(|selection| { - // let contains_start = match &selection.start { - // Anchor::Middle { version, .. } => self.version >= *version, - // _ => true, - // }; - // let contains_end = match &selection.end { - // Anchor::Middle { version, .. } => self.version >= *version, - // _ => true, - // }; - // contains_start && contains_end - // }) - // } else { - // true - // } - // } - // } - // } - } - - fn splice_fragments( + if self.deferred_replicas.contains(&op.replica_id()) { + false + } else { + match op { + Operation::Edit { edit, .. } => self.version >= edit.version, + Operation::Undo { undo, .. } => self.version.observed(undo.edit_id), + Operation::UpdateSelections { selections, .. } => { + if let Some(selections) = selections { + selections.iter().all(|selection| { + let contains_start = match &selection.start { + Anchor::Middle { version, .. } => self.version >= *version, + _ => true, + }; + let contains_end = match &selection.end { + Anchor::Middle { version, .. } => self.version >= *version, + _ => true, + }; + contains_start && contains_end + }) + } else { + true + } + } + } + } + } + + fn apply_local_edit( &mut self, old_ranges: &[Range], new_text: Option, @@ -1522,8 +1485,9 @@ impl Buffer { if let Some(new_text) = new_text.clone() { let new_fragment = Fragment { - len: new_text.len(), insertion_id: edit_id, + lamport_timestamp, + len: new_text.len(), deletions: Default::default(), max_undos: Default::default(), visible: true, @@ -1626,8 +1590,9 @@ impl Buffer { if let Some(new_text) = new_text { let new_fragment = Fragment { - len: new_text.len(), insertion_id: edit_id, + lamport_timestamp, + len: new_text.len(), deletions: Default::default(), max_undos: Default::default(), visible: true, @@ -3192,7 +3157,12 @@ mod tests { let first_buffer = buffers[0].read(cx); for buffer in &buffers[1..] { let buffer = buffer.read(cx); - assert_eq!(buffer.text(), first_buffer.text()); + assert_eq!( + buffer.text(), + first_buffer.text(), + "Replica {} text != Replica 0 text", + buffer.replica_id + ); assert_eq!( buffer.all_selections().collect::>(), first_buffer.all_selections().collect::>() From 9a29f55777c0ec828b82f986bc34a1914eed95c4 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 3 Jun 2021 19:32:54 +0200 Subject: [PATCH 24/40] WIP: Re-implement apply_local_edit to look more like apply_remote_edit Co-Authored-By: Nathan Sobo Co-Authored-By: Max Brunsfeld --- zed/src/editor.rs | 4 +- zed/src/editor/buffer.rs | 332 ++++++++++++++++----------------------- 2 files changed, 133 insertions(+), 203 deletions(-) diff --git a/zed/src/editor.rs b/zed/src/editor.rs index 4919da91031d20be757ea43b986a833a4fb30d64..58a723755f7af5f2034d3203e4ad0c2a1385e44d 100644 --- a/zed/src/editor.rs +++ b/zed/src/editor.rs @@ -723,9 +723,7 @@ impl Editor { let mut new_selections = Vec::new(); self.buffer.update(cx, |buffer, cx| { let edit_ranges = old_selections.iter().map(|(_, range)| range.clone()); - if let Err(error) = buffer.edit(edit_ranges, text.as_str(), Some(cx)) { - log::error!("error inserting text: {}", error); - }; + buffer.edit(edit_ranges, text.as_str(), Some(cx)); let text_len = text.len() as isize; let mut delta = 0_isize; new_selections = old_selections diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index 99230baa0f28890ec91ac4d254969a821f085f80..e628f169ca63fb5c82a8c51ec2cd99f2a3bcc4fa 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -29,7 +29,6 @@ use std::{ cmp, hash::BuildHasher, iter::Iterator, - mem, ops::{Deref, DerefMut, Range}, str, sync::Arc, @@ -906,14 +905,12 @@ impl Buffer { ranges: I, new_text: T, cx: Option<&mut ModelContext>, - ) -> Result + ) -> Option where I: IntoIterator>, S: ToOffset, T: Into, { - self.start_transaction_at(None, Instant::now())?; - let new_text = new_text.into(); let new_text = if new_text.len() > 0 { Some(new_text) @@ -933,21 +930,26 @@ impl Buffer { }) .collect::>>(); - let edit_id = self.local_clock.tick(); - let lamport_timestamp = self.lamport_clock.tick(); - let edit = self.apply_local_edit(&ranges, new_text, edit_id, lamport_timestamp); + if ranges.is_empty() { + None + } else { + self.start_transaction_at(None, Instant::now()).unwrap(); + let edit_id = self.local_clock.tick(); + let lamport_timestamp = self.lamport_clock.tick(); + let edit = self.apply_local_edit(&ranges, new_text, edit_id, lamport_timestamp); - self.history.push(edit.clone()); - self.history.push_undo(edit.id); - self.last_edit = edit.id; - self.version.observe(edit.id); + self.history.push(edit.clone()); + self.history.push_undo(edit.id); + self.last_edit = edit.id; + self.version.observe(edit.id); - self.end_transaction_at(None, Instant::now(), cx)?; + self.end_transaction_at(None, Instant::now(), cx).unwrap(); - Ok(Operation::Edit { - edit, - lamport_timestamp, - }) + Some(Operation::Edit { + edit, + lamport_timestamp, + }) + } } fn did_edit(&self, was_dirty: bool, cx: &mut ModelContext) { @@ -1222,12 +1224,17 @@ impl Buffer { } } - let fragment_end = old_fragments.end(&cx).offset(); - if fragment_end > fragment_start { - let mut suffix = old_fragments.item().unwrap().clone(); - suffix.len = fragment_end - fragment_start; - new_ropes.push_fragment(&suffix, suffix.visible); - new_fragments.push(suffix, &None); + if old_fragments + .item() + .map_or(false, |f| version.observed(f.insertion_id)) + { + let fragment_end = old_fragments.end(&cx).offset(); + if fragment_end > fragment_start { + let mut suffix = old_fragments.item().unwrap().clone(); + suffix.len = fragment_end - fragment_start; + new_ropes.push_fragment(&suffix, suffix.visible); + new_fragments.push(suffix, &None); + } old_fragments.next(&cx); } @@ -1426,185 +1433,106 @@ impl Buffer { fn apply_local_edit( &mut self, - old_ranges: &[Range], + ranges: &[Range], new_text: Option, - edit_id: time::Local, + local_timestamp: time::Local, lamport_timestamp: time::Lamport, ) -> EditOperation { let mut edit = EditOperation { - id: edit_id, - version: self.version.clone(), - ranges: Vec::with_capacity(old_ranges.len()), + id: local_timestamp, + version: self.version(), + ranges: Vec::with_capacity(ranges.len()), + // TODO: avoid cloning here new_text: new_text.clone(), }; - let mut old_ranges = old_ranges.iter(); - let mut cur_range = old_ranges.next(); - if cur_range.is_none() { - return edit; - } - - let old_fragments = mem::take(&mut self.fragments); - let old_visible_text = mem::take(&mut self.visible_text); - let old_deleted_text = mem::take(&mut self.deleted_text); - - let mut fragments_cursor = old_fragments.cursor::(); - let mut new_fragments = - fragments_cursor.slice(&cur_range.as_ref().unwrap().start, SeekBias::Right, &None); - let mut new_ropes = - RopeBuilder::new(old_visible_text.cursor(0), old_deleted_text.cursor(0)); + RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); + let mut old_fragments = self.fragments.cursor::(); + let mut new_fragments = old_fragments.slice(&ranges[0].start, SeekBias::Right, &None); new_ropes.push_tree(new_fragments.summary().text); - while cur_range.is_some() && fragments_cursor.item().is_some() { - let mut fragment = fragments_cursor.item().unwrap().clone(); - let mut fragment_start = fragments_cursor.start().0; - let mut fragment_end = fragment_start + fragment.visible_len(); - let mut full_range = 0..0; - let fragment_was_visible = fragment.visible; - - // Find all splices that start or end within the current fragment. Then, split the - // fragment and reassemble it in both trees accounting for the deleted and the newly - // inserted text. - while cur_range.as_ref().map_or(false, |r| r.start < fragment_end) { - let range = cur_range.clone().unwrap(); - - if range.start >= fragment_start { - full_range.start = - fragments_cursor.start().1 .0 + (range.start - fragments_cursor.start().0); - - if range.start > fragment_start { - let mut prefix = fragment.clone(); - prefix.len = range.start - fragment_start; - fragment.len -= prefix.len; - - new_ropes.push_fragment(&prefix, prefix.visible); - new_fragments.push(prefix.clone(), &None); - fragment_start = range.start; - } - - if let Some(new_text) = new_text.clone() { - let new_fragment = Fragment { - insertion_id: edit_id, - lamport_timestamp, - len: new_text.len(), - deletions: Default::default(), - max_undos: Default::default(), - visible: true, - }; - - new_ropes.push_str(&new_text); - new_fragments.push(new_fragment, &None); - } - } - - if range.end < fragment_end { - if range.end > fragment_start { - let mut prefix = fragment.clone(); - prefix.len = range.end - fragment_start; - if prefix.visible { - prefix.deletions.insert(edit_id); - prefix.visible = false; - } - fragment.len -= prefix.len; - new_ropes.push_fragment(&prefix, fragment_was_visible); - new_fragments.push(prefix.clone(), &None); - fragment_start = range.end; - } - } else { - if fragment.visible { - fragment.deletions.insert(edit_id); - fragment.visible = false; - } + let mut fragment_start = old_fragments.start().visible; + for range in ranges { + if range.start > old_fragments.end(&None).visible { + if old_fragments.end(&None).visible > fragment_start { + let mut suffix = old_fragments.item().unwrap().clone(); + suffix.len = old_fragments.end(&None).visible - fragment_start; + new_ropes.push_fragment(&suffix, suffix.visible); + new_fragments.push(suffix, &None); + old_fragments.next(&None); } - // If the splice ends inside this fragment, we can advance to the next splice and - // check if it also intersects the current fragment. Otherwise we break out of the - // loop and find the first fragment that the splice does not contain fully. - if range.end <= fragment_end { - full_range.end = - fragments_cursor.start().1 .0 + (range.end - fragments_cursor.start().0); - edit.ranges.push(full_range.clone()); - cur_range = old_ranges.next(); - } else { - break; - } + let slice = old_fragments.slice(&range.start, SeekBias::Right, &None); + new_ropes.push_tree(slice.summary().text); + new_fragments.push_tree(slice, &None); + fragment_start = old_fragments.start().visible; } - new_ropes.push_fragment(&fragment, fragment_was_visible); - new_fragments.push(fragment, &None); + let full_range_start = range.start + old_fragments.start().deleted; - // Scan forward until we find a fragment that is not fully contained by the current splice. - fragments_cursor.next(&None); - if let Some(range) = cur_range.clone() { - while let Some(fragment) = fragments_cursor.item() { - let fragment_was_visible = fragment.visible; - fragment_start = fragments_cursor.start().0; - full_range.end = - fragments_cursor.start().1 .0 + (range.end - fragments_cursor.start().0); - - fragment_end = fragment_start + fragment.visible_len(); - if range.start < fragment_start && range.end >= fragment_end { - let mut new_fragment = fragment.clone(); - if new_fragment.visible { - new_fragment.deletions.insert(edit_id); - new_fragment.visible = false; - } + if range.start > fragment_start { + let mut prefix = old_fragments.item().unwrap().clone(); + prefix.len = range.start - fragment_start; + fragment_start = range.start; + new_ropes.push_fragment(&prefix, prefix.visible); + new_fragments.push(prefix, &None); + } - new_ropes.push_fragment(&new_fragment, fragment_was_visible); - new_fragments.push(new_fragment, &None); - fragments_cursor.next(&None); + if let Some(new_text) = new_text.as_deref() { + new_ropes.push_str(new_text); + new_fragments.push( + Fragment { + insertion_id: local_timestamp, + lamport_timestamp, + len: new_text.len(), + deletions: Default::default(), + max_undos: Default::default(), + visible: true, + }, + &None, + ); + } - if range.end == fragment_end { - edit.ranges.push(full_range.clone()); - cur_range = old_ranges.next(); - break; - } - } else { - break; - } + while range.end > fragment_start { + let fragment = old_fragments.item().unwrap(); + let fragment_end = old_fragments.end(&None).visible; + let mut intersection = fragment.clone(); + if intersection.visible { + let intersection_end = cmp::min(range.end, fragment_end); + intersection.len = intersection_end - fragment_start; + intersection.deletions.insert(local_timestamp); + intersection.visible = false; + fragment_start = intersection_end; } + new_ropes.push_fragment(&intersection, fragment.visible); + new_fragments.push(intersection, &None); - // If the splice we are currently evaluating starts after the end of the fragment - // that the cursor is parked at, we should seek to the next splice's start range - // and push all the fragments in between into the new tree. - if cur_range.as_ref().map_or(false, |r| r.start > fragment_end) { - let slice = fragments_cursor.slice( - &cur_range.as_ref().unwrap().start, - SeekBias::Right, - &None, - ); - new_ropes.push_tree(slice.summary().text); - new_fragments.push_tree(slice, &None); + if range.end >= fragment_end { + old_fragments.next(&None); } } - } - - // Handle range that is at the end of the buffer if it exists. There should never be - // multiple because ranges must be disjoint. - if cur_range.is_some() { - debug_assert_eq!(old_ranges.next(), None); - let full_offset = fragments_cursor.end(&None).1 .0; - edit.ranges.push(full_offset..full_offset); - - if let Some(new_text) = new_text { - let new_fragment = Fragment { - insertion_id: edit_id, - lamport_timestamp, - len: new_text.len(), - deletions: Default::default(), - max_undos: Default::default(), - visible: true, - }; + let full_range_end = range.end + old_fragments.start().deleted; + edit.ranges.push(full_range_start..full_range_end); + } - new_ropes.push_str(&new_text); - new_fragments.push(new_fragment, &None); - } + let fragment_end = old_fragments.end(&None).visible; + if fragment_end > fragment_start { + let mut suffix = old_fragments.item().unwrap().clone(); + suffix.len = fragment_end - fragment_start; + new_ropes.push_fragment(&suffix, suffix.visible); + new_fragments.push(suffix, &None); + } + if old_fragments.item().is_some() { + old_fragments.next(&None); } - new_fragments.push_tree(fragments_cursor.suffix(&None), &None); + let suffix = old_fragments.suffix(&None); + new_ropes.push_tree(suffix.summary().text); + new_fragments.push_tree(suffix, &None); let (visible_text, deleted_text) = new_ropes.finish(); + drop(old_fragments); self.fragments = new_fragments; self.visible_text = visible_text; @@ -2054,14 +1982,6 @@ impl Fragment { .iter() .all(|d| !version.observed(*d) || undos.was_undone(*d, version)) } - - fn visible_len(&self) -> usize { - if self.visible { - self.len - } else { - 0 - } - } } impl sum_tree::Item for Fragment { @@ -2294,6 +2214,7 @@ mod tests { cell::RefCell, cmp::Ordering, env, fs, + iter::FromIterator, rc::Rc, sync::atomic::{self, AtomicUsize}, }; @@ -2375,7 +2296,23 @@ mod tests { #[gpui::test] fn test_random_edits(cx: &mut gpui::MutableAppContext) { - for seed in 0..100 { + let iterations = env::var("ITERATIONS") + .map(|i| i.parse().expect("invalid `ITERATIONS` variable")) + .unwrap_or(100); + let operations = env::var("OPERATIONS") + .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) + .unwrap_or(10); + let seed_range = if let Ok(seed) = env::var("SEED") { + let seed = seed.parse().expect("invalid `SEED` variable"); + seed..seed + 1 + } else { + 0..iterations + }; + + // let seed_range = 0..1; + // let operations = 1; + + for seed in seed_range { println!("{:?}", seed); let mut rng = &mut StdRng::seed_from_u64(seed); @@ -2386,7 +2323,7 @@ mod tests { cx.add_model(|cx| { let mut buffer = Buffer::new(0, reference_string.as_str(), cx); let mut buffer_versions = Vec::new(); - for _i in 0..10 { + for _i in 0..operations { let (old_ranges, new_text, _) = buffer.randomly_mutate(rng, None); for old_range in old_ranges.iter().rev() { reference_string.replace_range(old_range.clone(), &new_text); @@ -3376,7 +3313,7 @@ mod tests { rng: &mut T, old_range_count: usize, cx: Option<&mut ModelContext>, - ) -> (Vec>, String, Operation) + ) -> (Vec>, String, Option) where T: Rng, { @@ -3390,11 +3327,13 @@ mod tests { } let new_text_len = rng.gen_range(0..10); let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect(); - - let operation = self - .edit(old_ranges.iter().cloned(), new_text.as_str(), cx) - .unwrap(); - + log::info!( + "Mutating buffer {} at {:?}: {:?}", + self.replica_id, + old_ranges, + new_text + ); + let operation = self.edit(old_ranges.iter().cloned(), new_text.as_str(), cx); (old_ranges, new_text, operation) } @@ -3406,15 +3345,8 @@ mod tests { where T: Rng, { - let (old_ranges, new_text, operation) = self.randomly_edit(rng, 5, cx.as_deref_mut()); - let mut operations = vec![operation]; - - log::info!( - "Mutating buffer {} at {:?}: {:?}", - self.replica_id, - old_ranges, - new_text - ); + let (old_ranges, new_text, operation) = self.randomly_edit(rng, 2, cx.as_deref_mut()); + let mut operations = Vec::from_iter(operation); // Randomly add, remove or mutate selection sets. let replica_selection_sets = &self From f1010505d95f5093b288f684429e4be740598d11 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 3 Jun 2021 11:23:25 -0700 Subject: [PATCH 25/40] Create shorter Debug impls for clocks --- zed/src/time.rs | 42 +++++++++++++++++++++++++++++++++++------- 1 file changed, 35 insertions(+), 7 deletions(-) diff --git a/zed/src/time.rs b/zed/src/time.rs index bcc55336535407fdddbdef0dca69a2727a0ec3de..c4cff793c93ef5a0a225a9d310e9102e95e749a4 100644 --- a/zed/src/time.rs +++ b/zed/src/time.rs @@ -1,21 +1,24 @@ use smallvec::SmallVec; -use std::cmp::{self, Ordering}; -use std::ops::{Add, AddAssign}; -use std::slice; +use std::{ + cmp::{self, Ordering}, + fmt, + ops::{Add, AddAssign}, + slice, +}; pub type ReplicaId = u16; pub type Seq = u32; -#[derive(Clone, Copy, Debug, Default, Eq, Hash, PartialEq, Ord, PartialOrd)] +#[derive(Clone, Copy, Default, Eq, Hash, PartialEq, Ord, PartialOrd)] pub struct Local { pub replica_id: ReplicaId, pub value: Seq, } -#[derive(Clone, Copy, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)] +#[derive(Clone, Copy, Default, Eq, Hash, Ord, PartialEq, PartialOrd)] pub struct Lamport { - pub value: Seq, pub replica_id: ReplicaId, + pub value: Seq, } impl Local { @@ -55,7 +58,7 @@ impl<'a> AddAssign<&'a Local> for Local { } } -#[derive(Clone, Debug, Default, Hash, Eq, PartialEq)] +#[derive(Clone, Default, Hash, Eq, PartialEq)] pub struct Global(SmallVec<[Local; 3]>); impl Global { @@ -154,3 +157,28 @@ impl Lamport { self.value = cmp::max(self.value, timestamp.value) + 1; } } + +impl fmt::Debug for Local { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Local {{{}: {}}}", self.replica_id, self.value) + } +} + +impl fmt::Debug for Lamport { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Lamport {{{}: {}}}", self.replica_id, self.value) + } +} + +impl fmt::Debug for Global { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Global {{")?; + for (i, element) in self.0.iter().enumerate() { + if i > 0 { + write!(f, ", ")?; + } + write!(f, "{}: {}", element.replica_id, element.value)?; + } + write!(f, "}}") + } +} From 02e4745d14507396eee558f57030f598f2ce3c7b Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 3 Jun 2021 13:45:39 -0700 Subject: [PATCH 26/40] Improve logging for randomized buffer test --- zed/src/editor/buffer.rs | 41 ++++++++++++++++++++++++++++------------ 1 file changed, 29 insertions(+), 12 deletions(-) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index e628f169ca63fb5c82a8c51ec2cd99f2a3bcc4fa..ffc61a422baf0538aea59ccf52bb385a5bda0c8c 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -336,12 +336,12 @@ pub struct Insertion { #[derive(Eq, PartialEq, Clone, Debug)] struct Fragment { + len: usize, + visible: bool, insertion_id: time::Local, lamport_timestamp: time::Lamport, - len: usize, deletions: HashSet, max_undos: time::Global, - visible: bool, } #[derive(Eq, PartialEq, Clone, Debug)] @@ -2309,9 +2309,6 @@ mod tests { 0..iterations }; - // let seed_range = 0..1; - // let operations = 1; - for seed in seed_range { println!("{:?}", seed); let mut rng = &mut StdRng::seed_from_u64(seed); @@ -2323,12 +2320,23 @@ mod tests { cx.add_model(|cx| { let mut buffer = Buffer::new(0, reference_string.as_str(), cx); let mut buffer_versions = Vec::new(); + log::info!( + "buffer text {:?}, version: {:?}", + buffer.text(), + buffer.version() + ); + for _i in 0..operations { let (old_ranges, new_text, _) = buffer.randomly_mutate(rng, None); for old_range in old_ranges.iter().rev() { reference_string.replace_range(old_range.clone(), &new_text); } assert_eq!(buffer.text(), reference_string); + log::info!( + "buffer text {:?}, version: {:?}", + buffer.text(), + buffer.version() + ); if rng.gen_bool(0.25) { buffer.randomly_undo_redo(rng); @@ -2347,20 +2355,29 @@ mod tests { } for mut old_buffer in buffer_versions { + let edits = buffer + .edits_since(old_buffer.version.clone()) + .collect::>(); + + log::info!( + "mutating old buffer version {:?}, text: {:?}, edits since: {:?}", + old_buffer.version(), + old_buffer.text(), + edits, + ); + let mut delta = 0_isize; for Edit { old_range, new_range, .. - } in buffer.edits_since(old_buffer.version.clone()) + } in edits { let old_len = old_range.end - old_range.start; let new_len = new_range.end - new_range.start; let old_start = (old_range.start as isize + delta) as usize; let new_text: String = buffer.text_for_range(new_range).collect(); - old_buffer - .edit(Some(old_start..old_start + old_len), new_text, None) - .unwrap(); + old_buffer.edit(Some(old_start..old_start + old_len), new_text, None); delta += new_len as isize - old_len as isize; } @@ -3328,7 +3345,7 @@ mod tests { let new_text_len = rng.gen_range(0..10); let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect(); log::info!( - "Mutating buffer {} at {:?}: {:?}", + "mutating buffer {} at {:?}: {:?}", self.replica_id, old_ranges, new_text @@ -3345,7 +3362,7 @@ mod tests { where T: Rng, { - let (old_ranges, new_text, operation) = self.randomly_edit(rng, 2, cx.as_deref_mut()); + let (old_ranges, new_text, operation) = self.randomly_edit(rng, 5, cx.as_deref_mut()); let mut operations = Vec::from_iter(operation); // Randomly add, remove or mutate selection sets. @@ -3381,7 +3398,7 @@ mod tests { let mut ops = Vec::new(); for _ in 0..rng.gen_range(1..5) { if let Some(edit_id) = self.history.ops.keys().choose(rng).copied() { - log::info!("Undoing buffer {} operation {:?}", self.replica_id, edit_id); + log::info!("undoing buffer {} operation {:?}", self.replica_id, edit_id); ops.push(self.undo_or_redo(edit_id).unwrap()); } } From 33472ebf7e46377cb2283d715aedfb21dc5f1409 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 3 Jun 2021 13:51:13 -0700 Subject: [PATCH 27/40] Ensure fragments are only consumed once in apply_local_edit --- zed/src/editor/buffer.rs | 33 ++++++++++++++++++--------------- 1 file changed, 18 insertions(+), 15 deletions(-) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index ffc61a422baf0538aea59ccf52bb385a5bda0c8c..b138742ed9bf933788daa34b5d8cbfd920d8f225 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -1455,11 +1455,14 @@ impl Buffer { let mut fragment_start = old_fragments.start().visible; for range in ranges { if range.start > old_fragments.end(&None).visible { - if old_fragments.end(&None).visible > fragment_start { - let mut suffix = old_fragments.item().unwrap().clone(); - suffix.len = old_fragments.end(&None).visible - fragment_start; - new_ropes.push_fragment(&suffix, suffix.visible); - new_fragments.push(suffix, &None); + if fragment_start > old_fragments.start().visible { + let fragment_end = old_fragments.end(&None).visible; + if fragment_end > fragment_start { + let mut suffix = old_fragments.item().unwrap().clone(); + suffix.len = fragment_end - fragment_start; + new_ropes.push_fragment(&suffix, suffix.visible); + new_fragments.push(suffix, &None); + } old_fragments.next(&None); } @@ -1517,24 +1520,24 @@ impl Buffer { edit.ranges.push(full_range_start..full_range_end); } - let fragment_end = old_fragments.end(&None).visible; - if fragment_end > fragment_start { - let mut suffix = old_fragments.item().unwrap().clone(); - suffix.len = fragment_end - fragment_start; - new_ropes.push_fragment(&suffix, suffix.visible); - new_fragments.push(suffix, &None); - } - if old_fragments.item().is_some() { + if fragment_start > old_fragments.start().visible { + let fragment_end = old_fragments.end(&None).visible; + if fragment_end > fragment_start { + let mut suffix = old_fragments.item().unwrap().clone(); + suffix.len = fragment_end - fragment_start; + new_ropes.push_fragment(&suffix, suffix.visible); + new_fragments.push(suffix, &None); + } old_fragments.next(&None); } let suffix = old_fragments.suffix(&None); new_ropes.push_tree(suffix.summary().text); new_fragments.push_tree(suffix, &None); - let (visible_text, deleted_text) = new_ropes.finish(); - drop(old_fragments); + drop(old_fragments); self.fragments = new_fragments; + let (visible_text, deleted_text) = new_ropes.finish(); self.visible_text = visible_text; self.deleted_text = deleted_text; edit From f6bb1a9572f17bf46e3d4a57952343f9ee93637b Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 3 Jun 2021 15:33:43 -0700 Subject: [PATCH 28/40] Implement Ord manually for time::Lamport --- zed/src/time.rs | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/zed/src/time.rs b/zed/src/time.rs index c4cff793c93ef5a0a225a9d310e9102e95e749a4..00f4e54c186206462e25b288e349010ceb0600e3 100644 --- a/zed/src/time.rs +++ b/zed/src/time.rs @@ -15,7 +15,7 @@ pub struct Local { pub value: Seq, } -#[derive(Clone, Copy, Default, Eq, Hash, Ord, PartialEq, PartialOrd)] +#[derive(Clone, Copy, Default, Eq, Hash, PartialEq)] pub struct Lamport { pub replica_id: ReplicaId, pub value: Seq, @@ -139,6 +139,21 @@ impl PartialOrd for Global { } } +impl Ord for Lamport { + fn cmp(&self, other: &Self) -> Ordering { + // Use the replica id to break ties between concurrent events. + self.value + .cmp(&other.value) + .then_with(|| self.replica_id.cmp(&other.replica_id)) + } +} + +impl PartialOrd for Lamport { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + impl Lamport { pub fn new(replica_id: ReplicaId) -> Self { Self { From 6f0ef36ec4a6e10b4f77c3d82f7964cb57a0777c Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 3 Jun 2021 16:49:55 -0700 Subject: [PATCH 29/40] Make SEED and ITERATIONS variables work independently This way, after finding one failure, you can still search for a simpler failure by running another series of iterations starting from the next seed. --- zed/src/editor/buffer.rs | 37 +++++++++++++++++-------------------- 1 file changed, 17 insertions(+), 20 deletions(-) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index b138742ed9bf933788daa34b5d8cbfd920d8f225..e954ff514247e2b7f4969fd1aa23ce406e26db98 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -2305,14 +2305,10 @@ mod tests { let operations = env::var("OPERATIONS") .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) .unwrap_or(10); - let seed_range = if let Ok(seed) = env::var("SEED") { - let seed = seed.parse().expect("invalid `SEED` variable"); - seed..seed + 1 - } else { - 0..iterations - }; + let start_seed = + env::var("SEED").map_or(0, |seed| seed.parse().expect("invalid `SEED` variable")); - for seed in seed_range { + for seed in start_seed..start_seed + iterations { println!("{:?}", seed); let mut rng = &mut StdRng::seed_from_u64(seed); @@ -3054,14 +3050,10 @@ mod tests { let operations = env::var("OPERATIONS") .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) .unwrap_or(10); - let seed_range = if let Ok(seed) = env::var("SEED") { - let seed = seed.parse().expect("invalid `SEED` variable"); - seed..seed + 1 - } else { - 0..iterations - }; + let start_seed = + env::var("SEED").map_or(0, |seed| seed.parse().expect("invalid `SEED` variable")); - for seed in seed_range { + for seed in start_seed..start_seed + iterations { dbg!(seed); let mut rng = &mut StdRng::seed_from_u64(seed); @@ -3079,6 +3071,8 @@ mod tests { network.add_peer(i as u16); } + log::info!("initial text: {:?}", base_text); + let mut mutation_count = operations; loop { let replica_index = rng.gen_range(0..peers); @@ -3086,6 +3080,7 @@ mod tests { buffers[replica_index].update(cx, |buffer, _| match rng.gen_range(0..=100) { 0..=50 if mutation_count != 0 => { let (_, _, ops) = buffer.randomly_mutate(&mut rng, None); + log::info!("buffer {} text: {:?}", buffer.replica_id, buffer.text()); network.broadcast(replica_id, ops, &mut rng); mutation_count -= 1; } @@ -3096,12 +3091,14 @@ mod tests { } 71..=100 if network.has_unreceived(replica_id) => { let ops = network.receive(replica_id, &mut rng); - log::info!( - "Peer {} applying {} ops from the network.", - replica_id, - ops.len() - ); - buffer.apply_ops(ops, None).unwrap(); + if !ops.is_empty() { + log::info!( + "peer {} applying {} ops from the network.", + replica_id, + ops.len() + ); + buffer.apply_ops(ops, None).unwrap(); + } } _ => {} }); From de9626ac12b7af452b9a4b3e42d4879af55dc073 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 3 Jun 2021 17:19:58 -0700 Subject: [PATCH 30/40] Get random concurrent edits test passing, except for undo --- zed/src/editor/buffer.rs | 113 +++++++++++++++++++++++---------------- 1 file changed, 68 insertions(+), 45 deletions(-) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index e954ff514247e2b7f4969fd1aa23ce406e26db98..0561821489d8744a0a5b3e4c01a102070fd175f8 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -1138,12 +1138,20 @@ impl Buffer { let mut fragment_start = old_fragments.start().offset(); for range in ranges { - if range.start > old_fragments.end(&cx).offset() { - if old_fragments.end(&cx).offset() > fragment_start { - let mut suffix = old_fragments.item().unwrap().clone(); - suffix.len = old_fragments.end(&cx).offset() - fragment_start; - new_ropes.push_fragment(&suffix, suffix.visible); - new_fragments.push(suffix, &None); + let fragment_end = old_fragments.end(&cx).offset(); + + // If the current fragment ends before this range, then jump ahead to the first fragment + // that extends past the start of this range, reusing any intervening fragments. + if fragment_end < range.start { + // If the current fragment has been partially consumed, then consume the rest of it + // and advance to the next fragment before slicing. + if fragment_start > old_fragments.start().offset() { + if fragment_end > fragment_start { + let mut suffix = old_fragments.item().unwrap().clone(); + suffix.len = fragment_end - fragment_start; + new_ropes.push_fragment(&suffix, suffix.visible); + new_fragments.push(suffix, &None); + } old_fragments.next(&cx); } @@ -1155,22 +1163,20 @@ impl Buffer { } // If we are at the end of a non-concurrent fragment, advance to the next one. - if let Some(fragment) = old_fragments.item() { - let fragment_end = old_fragments.end(&cx).offset(); - if range.start == fragment_end && fragment_end > fragment_start { - let mut fragment = fragment.clone(); - fragment.len = fragment_end - fragment_start; - new_ropes.push_fragment(&fragment, fragment.visible); - new_fragments.push(fragment, &None); - old_fragments.next(&cx); - fragment_start = old_fragments.start().offset(); - } + let fragment_end = old_fragments.end(&cx).offset(); + if fragment_end == range.start && fragment_end > fragment_start { + let mut fragment = old_fragments.item().unwrap().clone(); + fragment.len = fragment_end - fragment_start; + new_ropes.push_fragment(&fragment, fragment.visible); + new_fragments.push(fragment, &None); + old_fragments.next(&cx); + fragment_start = old_fragments.start().offset(); } // Skip over insertions that are concurrent to this edit, but have a lower lamport // timestamp. while let Some(fragment) = old_fragments.item() { - if range.start == fragment_start && fragment.lamport_timestamp > lamport_timestamp { + if fragment_start == range.start && fragment.lamport_timestamp > lamport_timestamp { new_ropes.push_fragment(fragment, fragment.visible); new_fragments.push(fragment.clone(), &None); old_fragments.next(&cx); @@ -1181,7 +1187,8 @@ impl Buffer { } debug_assert!(fragment_start <= range.start); - if range.start > fragment_start { + // Preserve any portion of the current fragment that precedes this range. + if fragment_start < range.start { let mut prefix = old_fragments.item().unwrap().clone(); prefix.len = range.start - fragment_start; fragment_start = range.start; @@ -1189,6 +1196,7 @@ impl Buffer { new_fragments.push(prefix, &None); } + // Insert the new text before any existing fragments within the range. if let Some(new_text) = new_text { new_ropes.push_str(new_text); new_fragments.push( @@ -1204,30 +1212,32 @@ impl Buffer { ); } - while range.end > fragment_start { + // Advance through every fragment that intersects this range, marking the intersecting + // portions as deleted. + while fragment_start < range.end { let fragment = old_fragments.item().unwrap(); let fragment_end = old_fragments.end(&cx).offset(); let mut intersection = fragment.clone(); - if intersection.was_visible(&version, &self.undo_map) { - let intersection_end = cmp::min(range.end, fragment_end); + let intersection_end = cmp::min(range.end, fragment_end); + if fragment_end > old_fragments.start().offset() { intersection.len = intersection_end - fragment_start; intersection.deletions.insert(local_timestamp); intersection.visible = false; + } + if intersection.len > 0 { + new_ropes.push_fragment(&intersection, fragment.visible); + new_fragments.push(intersection, &None); fragment_start = intersection_end; } - new_ropes.push_fragment(&intersection, fragment.visible); - new_fragments.push(intersection, &None); - - if range.end >= fragment_end { + if fragment_end <= range.end { old_fragments.next(&cx); } } } - if old_fragments - .item() - .map_or(false, |f| version.observed(f.insertion_id)) - { + // If the current fragment has been partially consumed, then consume the rest of it + // and advance to the next fragment before slicing. + if fragment_start > old_fragments.start().offset() { let fragment_end = old_fragments.end(&cx).offset(); if fragment_end > fragment_start { let mut suffix = old_fragments.item().unwrap().clone(); @@ -1454,9 +1464,14 @@ impl Buffer { let mut fragment_start = old_fragments.start().visible; for range in ranges { - if range.start > old_fragments.end(&None).visible { + let fragment_end = old_fragments.end(&None).visible; + + // If the current fragment ends before this range, then jump ahead to the first fragment + // that extends past the start of this range, reusing any intervening fragments. + if fragment_end < range.start { + // If the current fragment has been partially consumed, then consume the rest of it + // and advance to the next fragment before slicing. if fragment_start > old_fragments.start().visible { - let fragment_end = old_fragments.end(&None).visible; if fragment_end > fragment_start { let mut suffix = old_fragments.item().unwrap().clone(); suffix.len = fragment_end - fragment_start; @@ -1474,14 +1489,16 @@ impl Buffer { let full_range_start = range.start + old_fragments.start().deleted; - if range.start > fragment_start { + // Preserve any portion of the current fragment that precedes this range. + if fragment_start < range.start { let mut prefix = old_fragments.item().unwrap().clone(); prefix.len = range.start - fragment_start; - fragment_start = range.start; new_ropes.push_fragment(&prefix, prefix.visible); new_fragments.push(prefix, &None); + fragment_start = range.start; } + // Insert the new text before any existing fragments within the range. if let Some(new_text) = new_text.as_deref() { new_ropes.push_str(new_text); new_fragments.push( @@ -1497,21 +1514,24 @@ impl Buffer { ); } - while range.end > fragment_start { + // Advance through every fragment that intersects this range, marking the intersecting + // portions as deleted. + while fragment_start < range.end { let fragment = old_fragments.item().unwrap(); let fragment_end = old_fragments.end(&None).visible; let mut intersection = fragment.clone(); - if intersection.visible { - let intersection_end = cmp::min(range.end, fragment_end); + let intersection_end = cmp::min(range.end, fragment_end); + if fragment_end > old_fragments.start().visible { intersection.len = intersection_end - fragment_start; intersection.deletions.insert(local_timestamp); intersection.visible = false; + } + if intersection.len > 0 { + new_ropes.push_fragment(&intersection, fragment.visible); + new_fragments.push(intersection, &None); fragment_start = intersection_end; } - new_ropes.push_fragment(&intersection, fragment.visible); - new_fragments.push(intersection, &None); - - if range.end >= fragment_end { + if fragment_end <= range.end { old_fragments.next(&None); } } @@ -1520,6 +1540,8 @@ impl Buffer { edit.ranges.push(full_range_start..full_range_end); } + // If the current fragment has been partially consumed, then consume the rest of it + // and advance to the next fragment before slicing. if fragment_start > old_fragments.start().visible { let fragment_end = old_fragments.end(&None).visible; if fragment_end > fragment_start { @@ -1534,10 +1556,10 @@ impl Buffer { let suffix = old_fragments.suffix(&None); new_ropes.push_tree(suffix.summary().text); new_fragments.push_tree(suffix, &None); - + let (visible_text, deleted_text) = new_ropes.finish(); drop(old_fragments); + self.fragments = new_fragments; - let (visible_text, deleted_text) = new_ropes.finish(); self.visible_text = visible_text; self.deleted_text = deleted_text; edit @@ -1755,6 +1777,7 @@ impl<'a> RopeBuilder<'a> { } fn push_fragment(&mut self, fragment: &Fragment, was_visible: bool) { + debug_assert!(fragment.len > 0); self.push(fragment.len, was_visible, fragment.visible) } @@ -3085,9 +3108,9 @@ mod tests { mutation_count -= 1; } 51..=70 if mutation_count != 0 => { - let ops = buffer.randomly_undo_redo(&mut rng); - network.broadcast(replica_id, ops, &mut rng); - mutation_count -= 1; + // let ops = buffer.randomly_undo_redo(&mut rng); + // network.broadcast(replica_id, ops, &mut rng); + // mutation_count -= 1; } 71..=100 if network.has_unreceived(replica_id) => { let ops = network.receive(replica_id, &mut rng); From 23c4621b36053992b3a4baf82c4a284518ce1406 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 3 Jun 2021 17:41:51 -0700 Subject: [PATCH 31/40] Reenable undo/redo in randomized concurrent edit test Currently this fails --- zed/src/editor/buffer.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index 0561821489d8744a0a5b3e4c01a102070fd175f8..3c4b7a0bd89891156cf02cc85cb3513b89717d4c 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -3108,9 +3108,9 @@ mod tests { mutation_count -= 1; } 51..=70 if mutation_count != 0 => { - // let ops = buffer.randomly_undo_redo(&mut rng); - // network.broadcast(replica_id, ops, &mut rng); - // mutation_count -= 1; + let ops = buffer.randomly_undo_redo(&mut rng); + network.broadcast(replica_id, ops, &mut rng); + mutation_count -= 1; } 71..=100 if network.has_unreceived(replica_id) => { let ops = network.receive(replica_id, &mut rng); From 9bf3038857cd9be5bae0dce02c75d20049643703 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 4 Jun 2021 10:08:52 +0200 Subject: [PATCH 32/40] Coalesce contiguous ranges when editing locally --- zed/src/editor/buffer.rs | 28 +++++++++++++++++----------- 1 file changed, 17 insertions(+), 11 deletions(-) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index 3c4b7a0bd89891156cf02cc85cb3513b89717d4c..0697a44facb159acabb1002ef06be63aad577918 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -902,7 +902,7 @@ impl Buffer { pub fn edit( &mut self, - ranges: I, + ranges_iter: I, new_text: T, cx: Option<&mut ModelContext>, ) -> Option @@ -918,17 +918,23 @@ impl Buffer { None }; let has_new_text = new_text.is_some(); - let ranges = ranges - .into_iter() - .filter_map(|range| { - let range = range.start.to_offset(self)..range.end.to_offset(self); - if has_new_text || !range.is_empty() { - Some(range) + + // Skip invalid ranges and coalesce contiguous ones. + let mut ranges: Vec> = Vec::new(); + for range in ranges_iter { + let range = range.start.to_offset(self)..range.end.to_offset(self); + if has_new_text || !range.is_empty() { + if let Some(prev_range) = ranges.last_mut() { + if prev_range.end >= range.start { + prev_range.end = cmp::max(prev_range.end, range.end); + } else { + ranges.push(range); + } } else { - None + ranges.push(range); } - }) - .collect::>>(); + } + } if ranges.is_empty() { None @@ -3344,7 +3350,7 @@ mod tests { impl Buffer { fn random_byte_range(&mut self, start_offset: usize, rng: &mut impl Rng) -> Range { let end = self.clip_offset(rng.gen_range(start_offset..=self.len()), Bias::Right); - let start = self.clip_offset(rng.gen_range(start_offset..=end), Bias::Left); + let start = self.clip_offset(rng.gen_range(start_offset..=end), Bias::Right); start..end } From ec07b8ca1dd39f3d077ecc37088edff4d2e5d244 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 4 Jun 2021 11:07:52 +0200 Subject: [PATCH 33/40] Change fragment's visibility only if it was visible at a version --- zed/src/editor/buffer.rs | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index 0697a44facb159acabb1002ef06be63aad577918..c2cc34a08e37ad3186e70de16309eb30f32f7386 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -1225,7 +1225,7 @@ impl Buffer { let fragment_end = old_fragments.end(&cx).offset(); let mut intersection = fragment.clone(); let intersection_end = cmp::min(range.end, fragment_end); - if fragment_end > old_fragments.start().offset() { + if fragment.was_visible(version, &self.undo_map) { intersection.len = intersection_end - fragment_start; intersection.deletions.insert(local_timestamp); intersection.visible = false; @@ -1348,9 +1348,6 @@ impl Buffer { let mut new_ropes = RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); - let mut version_after_edit = edit.version.clone(); - version_after_edit.observe(edit.id); - for range in &edit.ranges { let mut end_offset = old_fragments.end(&version).offset(); @@ -1368,7 +1365,9 @@ impl Buffer { if let Some(fragment) = old_fragments.item() { let mut fragment = fragment.clone(); let fragment_was_visible = fragment.visible; - if version_after_edit.observed(fragment.insertion_id) { + if fragment.was_visible(&edit.version, &self.undo_map) + || fragment.insertion_id == edit.id + { fragment.visible = fragment.is_visible(&self.undo_map); fragment.max_undos.observe(undo.id); } @@ -1527,7 +1526,7 @@ impl Buffer { let fragment_end = old_fragments.end(&None).visible; let mut intersection = fragment.clone(); let intersection_end = cmp::min(range.end, fragment_end); - if fragment_end > old_fragments.start().visible { + if fragment.visible { intersection.len = intersection_end - fragment_start; intersection.deletions.insert(local_timestamp); intersection.visible = false; @@ -3425,7 +3424,7 @@ mod tests { pub fn randomly_undo_redo(&mut self, rng: &mut impl Rng) -> Vec { let mut ops = Vec::new(); - for _ in 0..rng.gen_range(1..5) { + for _ in 0..rng.gen_range(1..=5) { if let Some(edit_id) = self.history.ops.keys().choose(rng).copied() { log::info!("undoing buffer {} operation {:?}", self.replica_id, edit_id); ops.push(self.undo_or_redo(edit_id).unwrap()); From eaf09463beff633394df5ee092b5a189177d391f Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 4 Jun 2021 11:10:53 +0200 Subject: [PATCH 34/40] Remove commented out code --- zed/src/editor/buffer.rs | 9 --------- 1 file changed, 9 deletions(-) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index c2cc34a08e37ad3186e70de16309eb30f32f7386..b189cddf0fdac322ea7f988753f109d86180c5b4 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -1376,15 +1376,6 @@ impl Buffer { old_fragments.next(&version); end_offset = old_fragments.end(&version).offset(); - - // Skip over any fragments that were not present when the edit occurred. - // let newer_fragments = old_fragments.slice( - // &old_fragments.end(&version), - // SeekBias::Right, - // &version, - // ); - // new_ropes.push_tree(newer_fragments.summary().text); - // new_fragments.push_tree(newer_fragments, &None); } else { break; } From f016400ddc1feb38cfd9a9dc14ede2c84efd449b Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 4 Jun 2021 12:34:35 +0200 Subject: [PATCH 35/40] Remove `FullOffset` --- zed/src/editor/buffer.rs | 23 +++++++++-------------- 1 file changed, 9 insertions(+), 14 deletions(-) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index b189cddf0fdac322ea7f988753f109d86180c5b4..82e6818bfc79da69a9595b092cfbd2b9a68a926d 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -352,7 +352,7 @@ pub struct FragmentSummary { max_insertion_version: time::Global, } -#[derive(Default, Clone, Debug, PartialEq, Eq)] +#[derive(Copy, Default, Clone, Debug, PartialEq, Eq)] struct FragmentTextSummary { visible: usize, deleted: usize, @@ -1621,7 +1621,10 @@ impl Buffer { fn full_offset_for_anchor(&self, anchor: &Anchor) -> usize { match anchor { Anchor::Start => 0, - Anchor::End => self.fragments.extent::(&None).0, + Anchor::End => { + let summary = self.fragments.summary(); + summary.text.visible + summary.text.deleted + } Anchor::Middle { offset, bias, @@ -1629,14 +1632,15 @@ impl Buffer { } => { let mut cursor = self .fragments - .cursor::(); + .cursor::(); cursor.seek( &VersionedOffset::Offset(*offset), bias.to_seek_bias(), &Some(version.clone()), ); - let full_offset = cursor.start().1; - full_offset.0 + offset - cursor.start().0.offset() + let overshoot = offset - cursor.start().0.offset(); + let summary = cursor.start().1; + summary.visible + summary.deleted + overshoot } } } @@ -2135,15 +2139,6 @@ where } } -#[derive(Clone, Copy, Debug, Default)] -struct FullOffset(usize); - -impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FullOffset { - fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option) { - self.0 += summary.text.visible + summary.text.deleted; - } -} - impl Operation { fn replica_id(&self) -> ReplicaId { self.lamport_timestamp().replica_id From 3b9d760f2b65b1c77698ad7148e0dd894ba1706c Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 4 Jun 2021 12:36:38 +0200 Subject: [PATCH 36/40] Add blanket implementation for (D1, D2) when they impl `Dimension` --- zed/src/editor/buffer.rs | 11 ----------- zed/src/sum_tree.rs | 12 ++++++++++++ 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index 82e6818bfc79da69a9595b092cfbd2b9a68a926d..fe43bf2e67baa7cb6809d8d1d38ca0316d297997 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -2128,17 +2128,6 @@ impl<'a> sum_tree::SeekDimension<'a, FragmentSummary> for VersionedOffset { } } -impl<'a, T, U> sum_tree::Dimension<'a, FragmentSummary> for (T, U) -where - T: sum_tree::Dimension<'a, FragmentSummary>, - U: sum_tree::Dimension<'a, FragmentSummary>, -{ - fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option) { - self.0.add_summary(summary, cx); - self.1.add_summary(summary, cx); - } -} - impl Operation { fn replica_id(&self) -> ReplicaId { self.lamport_timestamp().replica_id diff --git a/zed/src/sum_tree.rs b/zed/src/sum_tree.rs index 35b91833761027d2edd71f089c7ad1e5057ad28f..48338d4a57f281ef763677354999c899a776d724 100644 --- a/zed/src/sum_tree.rs +++ b/zed/src/sum_tree.rs @@ -36,6 +36,18 @@ impl<'a, T: Summary> Dimension<'a, T> for () { fn add_summary(&mut self, _: &'a T, _: &T::Context) {} } +impl<'a, S, D1, D2> Dimension<'a, S> for (D1, D2) +where + S: Summary, + D1: Dimension<'a, S>, + D2: Dimension<'a, S>, +{ + fn add_summary(&mut self, summary: &'a S, cx: &S::Context) { + self.0.add_summary(summary, cx); + self.1.add_summary(summary, cx); + } +} + pub trait SeekDimension<'a, T: Summary>: Dimension<'a, T> { fn cmp(&self, other: &Self, cx: &T::Context) -> Ordering; } From e071d400582c2785d30351309222bb76cac6bdea Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 4 Jun 2021 13:39:57 +0200 Subject: [PATCH 37/40] Use a single `Bias` enum everywhere --- zed/src/editor.rs | 8 +-- zed/src/editor/buffer.rs | 39 +++++++------- zed/src/editor/buffer/anchor.rs | 46 ++--------------- zed/src/editor/buffer/rope.rs | 26 +++++----- zed/src/editor/display_map/fold_map.rs | 46 ++++++++--------- zed/src/sum_tree.rs | 71 ++++++++++---------------- zed/src/sum_tree/cursor.rs | 23 ++++----- zed/src/util.rs | 23 +++++++++ zed/src/worktree.rs | 23 +++++---- 9 files changed, 128 insertions(+), 177 deletions(-) diff --git a/zed/src/editor.rs b/zed/src/editor.rs index 58a723755f7af5f2034d3203e4ad0c2a1385e44d..4d2a1fdc179163f55d2cb4e202f96d35717bbe4a 100644 --- a/zed/src/editor.rs +++ b/zed/src/editor.rs @@ -5,7 +5,7 @@ pub mod movement; use crate::{ settings::{Settings, StyleId}, - util::post_inc, + util::{post_inc, Bias}, workspace, worktree::FileHandle, }; @@ -4137,12 +4137,6 @@ mod tests { } } -#[derive(Copy, Clone)] -pub enum Bias { - Left, - Right, -} - trait RangeExt { fn sorted(&self) -> Range; fn to_inclusive(&self) -> RangeInclusive; diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index fe43bf2e67baa7cb6809d8d1d38ca0316d297997..b00bb9fb02b7ffc17208fc25e592883d29cda32a 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -13,12 +13,12 @@ use similar::{ChangeTag, TextDiff}; use tree_sitter::{InputEdit, Parser, QueryCursor}; use crate::{ - editor::Bias, language::{Language, Tree}, operation_queue::{self, OperationQueue}, settings::{StyleId, ThemeMap}, - sum_tree::{self, FilterCursor, SeekBias, SumTree}, + sum_tree::{self, FilterCursor, SumTree}, time::{self, ReplicaId}, + util::Bias, worktree::FileHandle, }; use anyhow::{anyhow, Result}; @@ -1135,11 +1135,8 @@ impl Buffer { let mut new_ropes = RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); let mut old_fragments = self.fragments.cursor::(); - let mut new_fragments = old_fragments.slice( - &VersionedOffset::Offset(ranges[0].start), - SeekBias::Left, - &cx, - ); + let mut new_fragments = + old_fragments.slice(&VersionedOffset::Offset(ranges[0].start), Bias::Left, &cx); new_ropes.push_tree(new_fragments.summary().text); let mut fragment_start = old_fragments.start().offset(); @@ -1162,7 +1159,7 @@ impl Buffer { } let slice = - old_fragments.slice(&VersionedOffset::Offset(range.start), SeekBias::Left, &cx); + old_fragments.slice(&VersionedOffset::Offset(range.start), Bias::Left, &cx); new_ropes.push_tree(slice.summary().text); new_fragments.push_tree(slice, &None); fragment_start = old_fragments.start().offset(); @@ -1342,7 +1339,7 @@ impl Buffer { let version = Some(edit.version.clone()); let mut old_fragments = self.fragments.cursor::(); - old_fragments.seek(&VersionedOffset::Offset(0), SeekBias::Left, &version); + old_fragments.seek(&VersionedOffset::Offset(0), Bias::Left, &version); let mut new_fragments = SumTree::new(); let mut new_ropes = @@ -1354,7 +1351,7 @@ impl Buffer { if end_offset < range.start { let preceding_fragments = old_fragments.slice( &VersionedOffset::Offset(range.start), - SeekBias::Left, + Bias::Left, &version, ); new_ropes.push_tree(preceding_fragments.summary().text); @@ -1455,7 +1452,7 @@ impl Buffer { let mut new_ropes = RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); let mut old_fragments = self.fragments.cursor::(); - let mut new_fragments = old_fragments.slice(&ranges[0].start, SeekBias::Right, &None); + let mut new_fragments = old_fragments.slice(&ranges[0].start, Bias::Right, &None); new_ropes.push_tree(new_fragments.summary().text); let mut fragment_start = old_fragments.start().visible; @@ -1477,7 +1474,7 @@ impl Buffer { old_fragments.next(&None); } - let slice = old_fragments.slice(&range.start, SeekBias::Right, &None); + let slice = old_fragments.slice(&range.start, Bias::Right, &None); new_ropes.push_tree(slice.summary().text); new_fragments.push_tree(slice, &None); fragment_start = old_fragments.start().visible; @@ -1562,25 +1559,25 @@ impl Buffer { } pub fn anchor_before(&self, position: T) -> Anchor { - self.anchor_at(position, AnchorBias::Left) + self.anchor_at(position, Bias::Left) } pub fn anchor_after(&self, position: T) -> Anchor { - self.anchor_at(position, AnchorBias::Right) + self.anchor_at(position, Bias::Right) } - pub fn anchor_at(&self, position: T, bias: AnchorBias) -> Anchor { + pub fn anchor_at(&self, position: T, bias: Bias) -> Anchor { let offset = position.to_offset(self); let max_offset = self.len(); assert!(offset <= max_offset, "offset is out of range"); - if offset == 0 && bias == AnchorBias::Left { + if offset == 0 && bias == Bias::Left { Anchor::Start - } else if offset == max_offset && bias == AnchorBias::Right { + } else if offset == max_offset && bias == Bias::Right { Anchor::End } else { let mut cursor = self.fragments.cursor::(); - cursor.seek(&offset, bias.to_seek_bias(), &None); + cursor.seek(&offset, bias, &None); Anchor::Middle { offset: offset + cursor.start().deleted, bias, @@ -1603,7 +1600,7 @@ impl Buffer { .cursor::(); cursor.seek( &VersionedOffset::Offset(*offset), - bias.to_seek_bias(), + *bias, &Some(version.clone()), ); let fragment = cursor.item().unwrap(); @@ -1635,7 +1632,7 @@ impl Buffer { .cursor::(); cursor.seek( &VersionedOffset::Offset(*offset), - bias.to_seek_bias(), + *bias, &Some(version.clone()), ); let overshoot = offset - cursor.start().0.offset(); @@ -2814,7 +2811,7 @@ mod tests { buffer.add_selection_set( (0..3) .map(|row| { - let anchor = buffer.anchor_at(Point::new(row, 0), AnchorBias::Right); + let anchor = buffer.anchor_at(Point::new(row, 0), Bias::Right); Selection { id: row as usize, start: anchor.clone(), diff --git a/zed/src/editor/buffer/anchor.rs b/zed/src/editor/buffer/anchor.rs index 145330416c183d1141871fe916f7388b5ad4f1fb..6715054ada152df802cb71005233022e15d912c0 100644 --- a/zed/src/editor/buffer/anchor.rs +++ b/zed/src/editor/buffer/anchor.rs @@ -1,5 +1,5 @@ use super::Buffer; -use crate::{sum_tree, time}; +use crate::{time, util::Bias}; use anyhow::Result; use std::{cmp::Ordering, ops::Range}; @@ -9,47 +9,11 @@ pub enum Anchor { End, Middle { offset: usize, - bias: AnchorBias, + bias: Bias, version: time::Global, }, } -#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)] -pub enum AnchorBias { - Left, - Right, -} - -impl AnchorBias { - pub fn to_seek_bias(self) -> sum_tree::SeekBias { - match self { - AnchorBias::Left => sum_tree::SeekBias::Left, - AnchorBias::Right => sum_tree::SeekBias::Right, - } - } -} - -impl PartialOrd for AnchorBias { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl Ord for AnchorBias { - fn cmp(&self, other: &Self) -> Ordering { - use AnchorBias::*; - - if self == other { - return Ordering::Equal; - } - - match (self, other) { - (Left, _) => Ordering::Less, - (Right, _) => Ordering::Greater, - } - } -} - impl Anchor { pub fn cmp(&self, other: &Anchor, buffer: &Buffer) -> Result { if self == other { @@ -88,8 +52,7 @@ impl Anchor { match self { Anchor::Start | Anchor::Middle { - bias: AnchorBias::Left, - .. + bias: Bias::Left, .. } => self.clone(), _ => buffer.anchor_before(self), } @@ -99,8 +62,7 @@ impl Anchor { match self { Anchor::End | Anchor::Middle { - bias: AnchorBias::Right, - .. + bias: Bias::Right, .. } => self.clone(), _ => buffer.anchor_after(self), } diff --git a/zed/src/editor/buffer/rope.rs b/zed/src/editor/buffer/rope.rs index 1b5d8fb8965266fb8fa330234e118463307a383d..268924c158864952feed9f759fbc1e03eb8ea70f 100644 --- a/zed/src/editor/buffer/rope.rs +++ b/zed/src/editor/buffer/rope.rs @@ -1,7 +1,7 @@ use super::Point; use crate::{ - editor::Bias, - sum_tree::{self, SeekBias, SumTree}, + sum_tree::{self, SumTree}, + util::Bias, }; use arrayvec::ArrayString; use smallvec::SmallVec; @@ -129,7 +129,7 @@ impl Rope { pub fn to_point(&self, offset: usize) -> Point { assert!(offset <= self.summary().bytes); let mut cursor = self.chunks.cursor::(); - cursor.seek(&offset, SeekBias::Left, &()); + cursor.seek(&offset, Bias::Left, &()); let overshoot = offset - cursor.start().bytes; cursor.start().lines + cursor @@ -140,14 +140,14 @@ impl Rope { pub fn to_offset(&self, point: Point) -> usize { assert!(point <= self.summary().lines); let mut cursor = self.chunks.cursor::(); - cursor.seek(&point, SeekBias::Left, &()); + cursor.seek(&point, Bias::Left, &()); let overshoot = point - cursor.start().lines; cursor.start().bytes + cursor.item().map_or(0, |chunk| chunk.to_offset(overshoot)) } pub fn clip_offset(&self, mut offset: usize, bias: Bias) -> usize { let mut cursor = self.chunks.cursor::(); - cursor.seek(&offset, SeekBias::Left, &()); + cursor.seek(&offset, Bias::Left, &()); if let Some(chunk) = cursor.item() { let mut ix = offset - cursor.start(); while !chunk.0.is_char_boundary(ix) { @@ -170,7 +170,7 @@ impl Rope { pub fn clip_point(&self, point: Point, bias: Bias) -> Point { let mut cursor = self.chunks.cursor::(); - cursor.seek(&point, SeekBias::Right, &()); + cursor.seek(&point, Bias::Right, &()); if let Some(chunk) = cursor.item() { let overshoot = point - cursor.start(); *cursor.start() + chunk.clip_point(overshoot, bias) @@ -197,7 +197,7 @@ pub struct Cursor<'a> { impl<'a> Cursor<'a> { pub fn new(rope: &'a Rope, offset: usize) -> Self { let mut chunks = rope.chunks.cursor(); - chunks.seek(&offset, SeekBias::Right, &()); + chunks.seek(&offset, Bias::Right, &()); Self { rope, chunks, @@ -208,7 +208,7 @@ impl<'a> Cursor<'a> { pub fn seek_forward(&mut self, end_offset: usize) { debug_assert!(end_offset >= self.offset); - self.chunks.seek_forward(&end_offset, SeekBias::Right, &()); + self.chunks.seek_forward(&end_offset, Bias::Right, &()); self.offset = end_offset; } @@ -230,7 +230,7 @@ impl<'a> Cursor<'a> { if end_offset > self.chunks.end(&()) { self.chunks.next(&()); slice.append(Rope { - chunks: self.chunks.slice(&end_offset, SeekBias::Right, &()), + chunks: self.chunks.slice(&end_offset, Bias::Right, &()), }); if let Some(end_chunk) = self.chunks.item() { let end_ix = end_offset - self.chunks.start(); @@ -254,7 +254,7 @@ impl<'a> Cursor<'a> { if end_offset > self.chunks.end(&()) { self.chunks.next(&()); - summary += &self.chunks.summary(&end_offset, SeekBias::Right, &()); + summary += &self.chunks.summary(&end_offset, Bias::Right, &()); if let Some(end_chunk) = self.chunks.item() { let end_ix = end_offset - self.chunks.start(); summary += TextSummary::from(&end_chunk.0[..end_ix]); @@ -281,7 +281,7 @@ pub struct Chunks<'a> { impl<'a> Chunks<'a> { pub fn new(rope: &'a Rope, range: Range) -> Self { let mut chunks = rope.chunks.cursor(); - chunks.seek(&range.start, SeekBias::Right, &()); + chunks.seek(&range.start, Bias::Right, &()); Self { chunks, range } } @@ -291,9 +291,9 @@ impl<'a> Chunks<'a> { pub fn seek(&mut self, offset: usize) { if offset >= self.chunks.end(&()) { - self.chunks.seek_forward(&offset, SeekBias::Right, &()); + self.chunks.seek_forward(&offset, Bias::Right, &()); } else { - self.chunks.seek(&offset, SeekBias::Right, &()); + self.chunks.seek(&offset, Bias::Right, &()); } self.range.start = offset; } diff --git a/zed/src/editor/display_map/fold_map.rs b/zed/src/editor/display_map/fold_map.rs index 135ecb566f254be3d3a35536917aa5f2e49084a2..39b7b91b13230546126396480d601270da8c1ba8 100644 --- a/zed/src/editor/display_map/fold_map.rs +++ b/zed/src/editor/display_map/fold_map.rs @@ -1,12 +1,13 @@ use super::{ buffer::{AnchorRangeExt, TextSummary}, - Anchor, Bias, Buffer, DisplayPoint, Edit, Point, ToOffset, + Anchor, Buffer, DisplayPoint, Edit, Point, ToOffset, }; use crate::{ editor::buffer, settings::StyleId, - sum_tree::{self, Cursor, FilterCursor, SeekBias, SumTree}, + sum_tree::{self, Cursor, FilterCursor, SumTree}, time, + util::Bias, }; use gpui::{AppContext, ModelHandle}; use parking_lot::{Mutex, MutexGuard}; @@ -125,7 +126,7 @@ impl FoldMap { let mut new_tree = SumTree::new(); let mut cursor = self.folds.cursor::<_, ()>(); for fold in folds { - new_tree.push_tree(cursor.slice(&fold, SeekBias::Right, buffer), buffer); + new_tree.push_tree(cursor.slice(&fold, Bias::Right, buffer), buffer); new_tree.push(fold, buffer); } new_tree.push_tree(cursor.suffix(buffer), buffer); @@ -173,7 +174,7 @@ impl FoldMap { let mut cursor = self.folds.cursor::<_, ()>(); let mut folds = SumTree::new(); for fold_ix in fold_ixs_to_delete { - folds.push_tree(cursor.slice(&fold_ix, SeekBias::Right, buffer), buffer); + folds.push_tree(cursor.slice(&fold_ix, Bias::Right, buffer), buffer); cursor.next(buffer); } folds.push_tree(cursor.suffix(buffer), buffer); @@ -210,14 +211,14 @@ impl FoldMap { let offset = offset.to_offset(buffer); let transforms = self.sync(cx); let mut cursor = transforms.cursor::(); - cursor.seek(&offset, SeekBias::Right, &()); + cursor.seek(&offset, Bias::Right, &()); cursor.item().map_or(false, |t| t.display_text.is_some()) } pub fn is_line_folded(&self, display_row: u32, cx: &AppContext) -> bool { let transforms = self.sync(cx); let mut cursor = transforms.cursor::(); - cursor.seek(&DisplayPoint::new(display_row, 0), SeekBias::Right, &()); + cursor.seek(&DisplayPoint::new(display_row, 0), Bias::Right, &()); while let Some(transform) = cursor.item() { if transform.display_text.is_some() { return true; @@ -242,7 +243,7 @@ impl FoldMap { pub fn to_buffer_point(&self, display_point: DisplayPoint, cx: &AppContext) -> Point { let transforms = self.sync(cx); let mut cursor = transforms.cursor::(); - cursor.seek(&display_point, SeekBias::Right, &()); + cursor.seek(&display_point, Bias::Right, &()); let overshoot = display_point.0 - cursor.start().display.lines; cursor.start().buffer.lines + overshoot } @@ -250,7 +251,7 @@ impl FoldMap { pub fn to_display_point(&self, point: Point, cx: &AppContext) -> DisplayPoint { let transforms = self.sync(cx); let mut cursor = transforms.cursor::(); - cursor.seek(&point, SeekBias::Right, &()); + cursor.seek(&point, Bias::Right, &()); let overshoot = point - cursor.start().buffer.lines; DisplayPoint(cmp::min( cursor.start().display.lines + overshoot, @@ -275,17 +276,14 @@ impl FoldMap { let mut new_transforms = SumTree::new(); let mut transforms = self.transforms.lock(); let mut cursor = transforms.cursor::(); - cursor.seek(&0, SeekBias::Right, &()); + cursor.seek(&0, Bias::Right, &()); while let Some(mut edit) = edits.next() { - new_transforms.push_tree( - cursor.slice(&edit.old_range.start, SeekBias::Left, &()), - &(), - ); + new_transforms.push_tree(cursor.slice(&edit.old_range.start, Bias::Left, &()), &()); edit.new_range.start -= edit.old_range.start - cursor.start(); edit.old_range.start = *cursor.start(); - cursor.seek(&edit.old_range.end, SeekBias::Right, &()); + cursor.seek(&edit.old_range.end, Bias::Right, &()); cursor.next(&()); let mut delta = edit.delta(); @@ -302,7 +300,7 @@ impl FoldMap { if next_edit.old_range.end >= edit.old_range.end { edit.old_range.end = next_edit.old_range.end; - cursor.seek(&edit.old_range.end, SeekBias::Right, &()); + cursor.seek(&edit.old_range.end, Bias::Right, &()); cursor.next(&()); } } else { @@ -315,7 +313,7 @@ impl FoldMap { let anchor = buffer.anchor_before(edit.new_range.start); let mut folds_cursor = self.folds.cursor::<_, ()>(); - folds_cursor.seek(&Fold(anchor..Anchor::End), SeekBias::Left, buffer); + folds_cursor.seek(&Fold(anchor..Anchor::End), Bias::Left, buffer); let mut folds = iter::from_fn(move || { let item = folds_cursor .item() @@ -432,7 +430,7 @@ impl FoldMapSnapshot { let display_point = Point::new(start_row, 0); let mut cursor = self.transforms.cursor(); - cursor.seek(&DisplayPoint(display_point), SeekBias::Left, &()); + cursor.seek(&DisplayPoint(display_point), Bias::Left, &()); BufferRows { display_point, @@ -446,7 +444,7 @@ impl FoldMapSnapshot { pub fn chunks_at(&self, offset: DisplayOffset) -> Chunks { let mut transform_cursor = self.transforms.cursor::(); - transform_cursor.seek(&offset, SeekBias::Right, &()); + transform_cursor.seek(&offset, Bias::Right, &()); let overshoot = offset.0 - transform_cursor.start().display.bytes; let buffer_offset = transform_cursor.start().buffer.bytes + overshoot; Chunks { @@ -459,11 +457,11 @@ impl FoldMapSnapshot { pub fn highlighted_chunks(&mut self, range: Range) -> HighlightedChunks { let mut transform_cursor = self.transforms.cursor::(); - transform_cursor.seek(&range.end, SeekBias::Right, &()); + transform_cursor.seek(&range.end, Bias::Right, &()); let overshoot = range.end.0 - transform_cursor.start().display.bytes; let buffer_end = transform_cursor.start().buffer.bytes + overshoot; - transform_cursor.seek(&range.start, SeekBias::Right, &()); + transform_cursor.seek(&range.start, Bias::Right, &()); let overshoot = range.start.0 - transform_cursor.start().display.bytes; let buffer_start = transform_cursor.start().buffer.bytes + overshoot; @@ -484,7 +482,7 @@ impl FoldMapSnapshot { pub fn to_display_offset(&self, point: DisplayPoint) -> DisplayOffset { let mut cursor = self.transforms.cursor::(); - cursor.seek(&point, SeekBias::Right, &()); + cursor.seek(&point, Bias::Right, &()); let overshoot = point.0 - cursor.start().display.lines; let mut offset = cursor.start().display.bytes; if !overshoot.is_zero() { @@ -500,7 +498,7 @@ impl FoldMapSnapshot { pub fn to_buffer_offset(&self, point: DisplayPoint) -> usize { let mut cursor = self.transforms.cursor::(); - cursor.seek(&point, SeekBias::Right, &()); + cursor.seek(&point, Bias::Right, &()); let overshoot = point.0 - cursor.start().display.lines; self.buffer .to_offset(cursor.start().buffer.lines + overshoot) @@ -509,7 +507,7 @@ impl FoldMapSnapshot { #[cfg(test)] pub fn clip_offset(&self, offset: DisplayOffset, bias: Bias) -> DisplayOffset { let mut cursor = self.transforms.cursor::(); - cursor.seek(&offset, SeekBias::Right, &()); + cursor.seek(&offset, Bias::Right, &()); if let Some(transform) = cursor.item() { let transform_start = cursor.start().display.bytes; if transform.display_text.is_some() { @@ -534,7 +532,7 @@ impl FoldMapSnapshot { pub fn clip_point(&self, point: DisplayPoint, bias: Bias) -> DisplayPoint { let mut cursor = self.transforms.cursor::(); - cursor.seek(&point, SeekBias::Right, &()); + cursor.seek(&point, Bias::Right, &()); if let Some(transform) = cursor.item() { let transform_start = cursor.start().display.lines; if transform.display_text.is_some() { diff --git a/zed/src/sum_tree.rs b/zed/src/sum_tree.rs index 48338d4a57f281ef763677354999c899a776d724..e307d2668b5c704ffdc1017570f03198c2bb8715 100644 --- a/zed/src/sum_tree.rs +++ b/zed/src/sum_tree.rs @@ -1,5 +1,6 @@ mod cursor; +use crate::util::Bias; use arrayvec::ArrayVec; pub use cursor::Cursor; pub use cursor::FilterCursor; @@ -58,12 +59,6 @@ impl<'a, S: Summary, T: Dimension<'a, S> + Ord> SeekDimension<'a, S> for T { } } -#[derive(Copy, Clone, Eq, PartialEq)] -pub enum SeekBias { - Left, - Right, -} - #[derive(Debug, Clone)] pub struct SumTree(Arc>); @@ -417,7 +412,7 @@ impl SumTree { pub fn insert(&mut self, item: T, cx: &::Context) { *self = { let mut cursor = self.cursor::(); - let mut new_tree = cursor.slice(&item.key(), SeekBias::Left, cx); + let mut new_tree = cursor.slice(&item.key(), Bias::Left, cx); new_tree.push(item, cx); new_tree.push_tree(cursor.suffix(cx), cx); new_tree @@ -441,7 +436,7 @@ impl SumTree { let mut new_tree = SumTree::new(); let mut buffered_items = Vec::new(); - cursor.seek(&T::Key::default(), SeekBias::Left, cx); + cursor.seek(&T::Key::default(), Bias::Left, cx); for edit in edits { let new_key = edit.key(); let mut old_item = cursor.item(); @@ -451,7 +446,7 @@ impl SumTree { .map_or(false, |old_item| old_item.key() < new_key) { new_tree.extend(buffered_items.drain(..), cx); - let slice = cursor.slice(&new_key, SeekBias::Left, cx); + let slice = cursor.slice(&new_key, Bias::Left, cx); new_tree.push_tree(slice, cx); old_item = cursor.item(); } @@ -481,7 +476,7 @@ impl SumTree { pub fn get(&self, key: &T::Key, cx: &::Context) -> Option<&T> { let mut cursor = self.cursor::(); - if cursor.seek(key, SeekBias::Left, cx) { + if cursor.seek(key, Bias::Left, cx) { cursor.item() } else { None @@ -638,10 +633,10 @@ mod tests { tree = { let mut cursor = tree.cursor::(); - let mut new_tree = cursor.slice(&Count(splice_start), SeekBias::Right, &()); + let mut new_tree = cursor.slice(&Count(splice_start), Bias::Right, &()); new_tree.extend(new_items, &()); - cursor.seek(&Count(splice_end), SeekBias::Right, &()); - new_tree.push_tree(cursor.slice(&tree_end, SeekBias::Right, &()), &()); + cursor.seek(&Count(splice_end), Bias::Right, &()); + new_tree.push_tree(cursor.slice(&tree_end, Bias::Right, &()), &()); new_tree }; @@ -665,7 +660,7 @@ mod tests { let mut pos = rng.gen_range(0..tree.extent::(&()).0 + 1); let mut before_start = false; let mut cursor = tree.cursor::(); - cursor.seek(&Count(pos), SeekBias::Right, &()); + cursor.seek(&Count(pos), Bias::Right, &()); for i in 0..10 { assert_eq!(cursor.start().0, pos); @@ -701,16 +696,8 @@ mod tests { for _ in 0..10 { let end = rng.gen_range(0..tree.extent::(&()).0 + 1); let start = rng.gen_range(0..end + 1); - let start_bias = if rng.gen() { - SeekBias::Left - } else { - SeekBias::Right - }; - let end_bias = if rng.gen() { - SeekBias::Left - } else { - SeekBias::Right - }; + let start_bias = if rng.gen() { Bias::Left } else { Bias::Right }; + let end_bias = if rng.gen() { Bias::Left } else { Bias::Right }; let mut cursor = tree.cursor::(); cursor.seek(&Count(start), start_bias, &()); @@ -730,7 +717,7 @@ mod tests { let tree = SumTree::::new(); let mut cursor = tree.cursor::(); assert_eq!( - cursor.slice(&Count(0), SeekBias::Right, &()).items(&()), + cursor.slice(&Count(0), Bias::Right, &()).items(&()), Vec::::new() ); assert_eq!(cursor.item(), None); @@ -742,7 +729,7 @@ mod tests { tree.extend(vec![1], &()); let mut cursor = tree.cursor::(); assert_eq!( - cursor.slice(&Count(0), SeekBias::Right, &()).items(&()), + cursor.slice(&Count(0), Bias::Right, &()).items(&()), Vec::::new() ); assert_eq!(cursor.item(), Some(&1)); @@ -760,18 +747,15 @@ mod tests { assert_eq!(cursor.start(), &Sum(0)); let mut cursor = tree.cursor::(); - assert_eq!( - cursor.slice(&Count(1), SeekBias::Right, &()).items(&()), - [1] - ); + assert_eq!(cursor.slice(&Count(1), Bias::Right, &()).items(&()), [1]); assert_eq!(cursor.item(), None); assert_eq!(cursor.prev_item(), Some(&1)); assert_eq!(cursor.start(), &Sum(1)); - cursor.seek(&Count(0), SeekBias::Right, &()); + cursor.seek(&Count(0), Bias::Right, &()); assert_eq!( cursor - .slice(&tree.extent::(&()), SeekBias::Right, &()) + .slice(&tree.extent::(&()), Bias::Right, &()) .items(&()), [1] ); @@ -784,10 +768,7 @@ mod tests { tree.extend(vec![1, 2, 3, 4, 5, 6], &()); let mut cursor = tree.cursor::(); - assert_eq!( - cursor.slice(&Count(2), SeekBias::Right, &()).items(&()), - [1, 2] - ); + assert_eq!(cursor.slice(&Count(2), Bias::Right, &()).items(&()), [1, 2]); assert_eq!(cursor.item(), Some(&3)); assert_eq!(cursor.prev_item(), Some(&2)); assert_eq!(cursor.start(), &Sum(3)); @@ -856,7 +837,7 @@ mod tests { let mut cursor = tree.cursor::(); assert_eq!( cursor - .slice(&tree.extent::(&()), SeekBias::Right, &()) + .slice(&tree.extent::(&()), Bias::Right, &()) .items(&()), tree.items(&()) ); @@ -864,10 +845,10 @@ mod tests { assert_eq!(cursor.prev_item(), Some(&6)); assert_eq!(cursor.start(), &Sum(21)); - cursor.seek(&Count(3), SeekBias::Right, &()); + cursor.seek(&Count(3), Bias::Right, &()); assert_eq!( cursor - .slice(&tree.extent::(&()), SeekBias::Right, &()) + .slice(&tree.extent::(&()), Bias::Right, &()) .items(&()), [4, 5, 6] ); @@ -876,23 +857,23 @@ mod tests { assert_eq!(cursor.start(), &Sum(21)); // Seeking can bias left or right - cursor.seek(&Count(1), SeekBias::Left, &()); + cursor.seek(&Count(1), Bias::Left, &()); assert_eq!(cursor.item(), Some(&1)); - cursor.seek(&Count(1), SeekBias::Right, &()); + cursor.seek(&Count(1), Bias::Right, &()); assert_eq!(cursor.item(), Some(&2)); // Slicing without resetting starts from where the cursor is parked at. - cursor.seek(&Count(1), SeekBias::Right, &()); + cursor.seek(&Count(1), Bias::Right, &()); assert_eq!( - cursor.slice(&Count(3), SeekBias::Right, &()).items(&()), + cursor.slice(&Count(3), Bias::Right, &()).items(&()), vec![2, 3] ); assert_eq!( - cursor.slice(&Count(6), SeekBias::Left, &()).items(&()), + cursor.slice(&Count(6), Bias::Left, &()).items(&()), vec![4, 5] ); assert_eq!( - cursor.slice(&Count(6), SeekBias::Right, &()).items(&()), + cursor.slice(&Count(6), Bias::Right, &()).items(&()), vec![6] ); } diff --git a/zed/src/sum_tree/cursor.rs b/zed/src/sum_tree/cursor.rs index 649b3ad28a8b0f77ba94f4f90d8d47ec13821f0a..8374546a20f112035e139e35d714e3fd36ebdcb3 100644 --- a/zed/src/sum_tree/cursor.rs +++ b/zed/src/sum_tree/cursor.rs @@ -345,7 +345,7 @@ where S: SeekDimension<'a, T::Summary>, U: Dimension<'a, T::Summary>, { - pub fn seek(&mut self, pos: &S, bias: SeekBias, cx: &::Context) -> bool { + pub fn seek(&mut self, pos: &S, bias: Bias, cx: &::Context) -> bool { self.reset(); self.seek_internal::<()>(Some(pos), bias, &mut SeekAggregate::None, cx) } @@ -353,7 +353,7 @@ where pub fn seek_forward( &mut self, pos: &S, - bias: SeekBias, + bias: Bias, cx: &::Context, ) -> bool { self.seek_internal::<()>(Some(pos), bias, &mut SeekAggregate::None, cx) @@ -362,7 +362,7 @@ where pub fn slice( &mut self, end: &S, - bias: SeekBias, + bias: Bias, cx: &::Context, ) -> SumTree { let mut slice = SeekAggregate::Slice(SumTree::new()); @@ -376,7 +376,7 @@ where pub fn suffix(&mut self, cx: &::Context) -> SumTree { let mut slice = SeekAggregate::Slice(SumTree::new()); - self.seek_internal::<()>(None, SeekBias::Right, &mut slice, cx); + self.seek_internal::<()>(None, Bias::Right, &mut slice, cx); if let SeekAggregate::Slice(slice) = slice { slice } else { @@ -384,12 +384,7 @@ where } } - pub fn summary( - &mut self, - end: &S, - bias: SeekBias, - cx: &::Context, - ) -> D + pub fn summary(&mut self, end: &S, bias: Bias, cx: &::Context) -> D where D: Dimension<'a, T::Summary>, { @@ -405,7 +400,7 @@ where fn seek_internal( &mut self, target: Option<&S>, - bias: SeekBias, + bias: Bias, aggregate: &mut SeekAggregate, cx: &::Context, ) -> bool @@ -453,7 +448,7 @@ where let comparison = target.map_or(Ordering::Greater, |t| t.cmp(&child_end, cx)); if comparison == Ordering::Greater - || (comparison == Ordering::Equal && bias == SeekBias::Right) + || (comparison == Ordering::Equal && bias == Bias::Right) { self.seek_dimension = child_end; self.sum_dimension.add_summary(child_summary, cx); @@ -503,7 +498,7 @@ where let comparison = target.map_or(Ordering::Greater, |t| t.cmp(&child_end, cx)); if comparison == Ordering::Greater - || (comparison == Ordering::Equal && bias == SeekBias::Right) + || (comparison == Ordering::Equal && bias == Bias::Right) { self.seek_dimension = child_end; self.sum_dimension.add_summary(item_summary, cx); @@ -560,7 +555,7 @@ where debug_assert!(self.stack.is_empty() || self.stack.last().unwrap().tree.0.is_leaf()); let mut end = self.seek_dimension.clone(); - if bias == SeekBias::Left { + if bias == Bias::Left { if let Some(summary) = self.item_summary() { end.add_summary(summary, cx); } diff --git a/zed/src/util.rs b/zed/src/util.rs index 6015ce7e0a4acc4a9929cd62bbda8db2e833f649..c744a470983285a653dfb5231b834baf9b166e0b 100644 --- a/zed/src/util.rs +++ b/zed/src/util.rs @@ -1,6 +1,29 @@ use rand::prelude::*; use std::cmp::Ordering; +#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)] +pub enum Bias { + Left, + Right, +} + +impl PartialOrd for Bias { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for Bias { + fn cmp(&self, other: &Self) -> Ordering { + match (self, other) { + (Self::Left, Self::Left) => Ordering::Equal, + (Self::Left, Self::Right) => Ordering::Less, + (Self::Right, Self::Right) => Ordering::Equal, + (Self::Right, Self::Left) => Ordering::Greater, + } + } +} + pub fn post_inc(value: &mut usize) -> usize { let prev = *value; *value += 1; diff --git a/zed/src/worktree.rs b/zed/src/worktree.rs index 56d53c0c5cfbb33d844be88d1b02957d7dcd7301..762d98ee7212f0d1ffade0654970aac4f6fa7704 100644 --- a/zed/src/worktree.rs +++ b/zed/src/worktree.rs @@ -4,7 +4,8 @@ mod ignore; use crate::{ editor::{History, Rope}, - sum_tree::{self, Cursor, Edit, SeekBias, SumTree}, + sum_tree::{self, Cursor, Edit, SumTree}, + util::Bias, }; use ::ignore::gitignore::Gitignore; use anyhow::{Context, Result}; @@ -295,7 +296,7 @@ impl Snapshot { } let path = path.as_ref(); let mut cursor = self.entries.cursor::<_, ()>(); - if cursor.seek(&PathSearch::Exact(path), SeekBias::Left, &()) { + if cursor.seek(&PathSearch::Exact(path), Bias::Left, &()) { let entry = cursor.item().unwrap(); if entry.path.as_ref() == path { return matches!(entry.kind, EntryKind::PendingDir); @@ -310,7 +311,7 @@ impl Snapshot { fn entry_for_path(&self, path: impl AsRef) -> Option<&Entry> { let mut cursor = self.entries.cursor::<_, ()>(); - if cursor.seek(&PathSearch::Exact(path.as_ref()), SeekBias::Left, &()) { + if cursor.seek(&PathSearch::Exact(path.as_ref()), Bias::Left, &()) { cursor.item() } else { None @@ -367,8 +368,8 @@ impl Snapshot { fn remove_path(&mut self, path: &Path) { let new_entries = { let mut cursor = self.entries.cursor::<_, ()>(); - let mut new_entries = cursor.slice(&PathSearch::Exact(path), SeekBias::Left, &()); - cursor.seek_forward(&PathSearch::Successor(path), SeekBias::Left, &()); + let mut new_entries = cursor.slice(&PathSearch::Exact(path), Bias::Left, &()); + cursor.seek_forward(&PathSearch::Successor(path), Bias::Left, &()); new_entries.push_tree(cursor.suffix(&()), &()); new_entries }; @@ -1296,13 +1297,13 @@ pub enum FileIter<'a> { impl<'a> FileIter<'a> { fn all(snapshot: &'a Snapshot, start: usize) -> Self { let mut cursor = snapshot.entries.cursor(); - cursor.seek(&FileCount(start), SeekBias::Right, &()); + cursor.seek(&FileCount(start), Bias::Right, &()); Self::All(cursor) } fn visible(snapshot: &'a Snapshot, start: usize) -> Self { let mut cursor = snapshot.entries.cursor(); - cursor.seek(&VisibleFileCount(start), SeekBias::Right, &()); + cursor.seek(&VisibleFileCount(start), Bias::Right, &()); Self::Visible(cursor) } @@ -1310,11 +1311,11 @@ impl<'a> FileIter<'a> { match self { Self::All(cursor) => { let ix = *cursor.start(); - cursor.seek_forward(&FileCount(ix.0 + 1), SeekBias::Right, &()); + cursor.seek_forward(&FileCount(ix.0 + 1), Bias::Right, &()); } Self::Visible(cursor) => { let ix = *cursor.start(); - cursor.seek_forward(&VisibleFileCount(ix.0 + 1), SeekBias::Right, &()); + cursor.seek_forward(&VisibleFileCount(ix.0 + 1), Bias::Right, &()); } } } @@ -1348,7 +1349,7 @@ struct ChildEntriesIter<'a> { impl<'a> ChildEntriesIter<'a> { fn new(parent_path: &'a Path, snapshot: &'a Snapshot) -> Self { let mut cursor = snapshot.entries.cursor(); - cursor.seek(&PathSearch::Exact(parent_path), SeekBias::Right, &()); + cursor.seek(&PathSearch::Exact(parent_path), Bias::Right, &()); Self { parent_path, cursor, @@ -1363,7 +1364,7 @@ impl<'a> Iterator for ChildEntriesIter<'a> { if let Some(item) = self.cursor.item() { if item.path().starts_with(self.parent_path) { self.cursor - .seek_forward(&PathSearch::Successor(item.path()), SeekBias::Left, &()); + .seek_forward(&PathSearch::Successor(item.path()), Bias::Left, &()); Some(item) } else { None From 125be2f07a1a786661740d99ce0b65348437c899 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 4 Jun 2021 13:42:37 +0200 Subject: [PATCH 38/40] Save an allocation when editing locally --- zed/src/editor/buffer.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index b00bb9fb02b7ffc17208fc25e592883d29cda32a..49675a6dd3750595b7a35f396aae0ec8ba77795b 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -1445,8 +1445,7 @@ impl Buffer { id: local_timestamp, version: self.version(), ranges: Vec::with_capacity(ranges.len()), - // TODO: avoid cloning here - new_text: new_text.clone(), + new_text: None, }; let mut new_ropes = @@ -1555,6 +1554,7 @@ impl Buffer { self.fragments = new_fragments; self.visible_text = visible_text; self.deleted_text = deleted_text; + edit.new_text = new_text; edit } From 8f8c6c8addd04c185b3a151f93874467eeba44f5 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 4 Jun 2021 16:42:32 +0200 Subject: [PATCH 39/40] Delete unused `Insertion` struct --- zed/src/editor/buffer.rs | 8 -------- 1 file changed, 8 deletions(-) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index 49675a6dd3750595b7a35f396aae0ec8ba77795b..f455709ce5b086495b9496be7c346e0eaec63d11 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -326,14 +326,6 @@ struct Diff { changes: Vec<(ChangeTag, usize)>, } -#[derive(Clone, Eq, PartialEq, Debug)] -pub struct Insertion { - id: time::Local, - parent_id: time::Local, - offset_in_parent: usize, - lamport_timestamp: time::Lamport, -} - #[derive(Eq, PartialEq, Clone, Debug)] struct Fragment { len: usize, From 5d1afaf484c2abf532ccab5d50d6639494256752 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Fri, 4 Jun 2021 18:03:44 +0200 Subject: [PATCH 40/40] Reduce `Fragment` size by not storing `ReplicaId` twice --- zed/src/editor/buffer.rs | 119 +++++++++++++++++++++------------------ 1 file changed, 65 insertions(+), 54 deletions(-) diff --git a/zed/src/editor/buffer.rs b/zed/src/editor/buffer.rs index f455709ce5b086495b9496be7c346e0eaec63d11..68ddb22b5acb8b4c7a177a1375a4fb28f19847d4 100644 --- a/zed/src/editor/buffer.rs +++ b/zed/src/editor/buffer.rs @@ -169,7 +169,7 @@ impl History { } fn push(&mut self, op: EditOperation) { - self.ops.insert(op.id, op); + self.ops.insert(op.timestamp.local(), op); } fn start_transaction( @@ -326,12 +326,34 @@ struct Diff { changes: Vec<(ChangeTag, usize)>, } +#[derive(Copy, Clone, Debug, Default, Eq, PartialEq)] +struct InsertionTimestamp { + replica_id: ReplicaId, + local: time::Seq, + lamport: time::Seq, +} + +impl InsertionTimestamp { + fn local(&self) -> time::Local { + time::Local { + replica_id: self.replica_id, + value: self.local, + } + } + + fn lamport(&self) -> time::Lamport { + time::Lamport { + replica_id: self.replica_id, + value: self.lamport, + } + } +} + #[derive(Eq, PartialEq, Clone, Debug)] struct Fragment { + timestamp: InsertionTimestamp, len: usize, visible: bool, - insertion_id: time::Local, - lamport_timestamp: time::Lamport, deletions: HashSet, max_undos: time::Global, } @@ -359,10 +381,7 @@ impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary { #[derive(Clone, Debug, Eq, PartialEq)] pub enum Operation { - Edit { - edit: EditOperation, - lamport_timestamp: time::Lamport, - }, + Edit(EditOperation), Undo { undo: UndoOperation, lamport_timestamp: time::Lamport, @@ -376,7 +395,7 @@ pub enum Operation { #[derive(Clone, Debug, Eq, PartialEq)] pub struct EditOperation { - id: time::Local, + timestamp: InsertionTimestamp, version: time::Global, ranges: Vec>, new_text: Option, @@ -457,12 +476,11 @@ impl Buffer { if visible_text.len() > 0 { fragments.push( Fragment { - insertion_id: Default::default(), - lamport_timestamp: Default::default(), + timestamp: Default::default(), len: visible_text.len(), + visible: true, deletions: Default::default(), max_undos: Default::default(), - visible: true, }, &None, ); @@ -932,21 +950,21 @@ impl Buffer { None } else { self.start_transaction_at(None, Instant::now()).unwrap(); - let edit_id = self.local_clock.tick(); - let lamport_timestamp = self.lamport_clock.tick(); - let edit = self.apply_local_edit(&ranges, new_text, edit_id, lamport_timestamp); + let timestamp = InsertionTimestamp { + replica_id: self.replica_id, + local: self.local_clock.tick().value, + lamport: self.lamport_clock.tick().value, + }; + let edit = self.apply_local_edit(&ranges, new_text, timestamp); self.history.push(edit.clone()); - self.history.push_undo(edit.id); - self.last_edit = edit.id; - self.version.observe(edit.id); + self.history.push_undo(edit.timestamp.local()); + self.last_edit = edit.timestamp.local(); + self.version.observe(edit.timestamp.local()); self.end_transaction_at(None, Instant::now(), cx).unwrap(); - Some(Operation::Edit { - edit, - lamport_timestamp, - }) + Some(Operation::Edit(edit)) } } @@ -1067,20 +1085,15 @@ impl Buffer { fn apply_op(&mut self, op: Operation) -> Result<()> { match op { - Operation::Edit { - edit, - lamport_timestamp, - .. - } => { - if !self.version.observed(edit.id) { + Operation::Edit(edit) => { + if !self.version.observed(edit.timestamp.local()) { self.apply_remote_edit( &edit.version, &edit.ranges, edit.new_text.as_deref(), - edit.id, - lamport_timestamp, + edit.timestamp, ); - self.version.observe(edit.id); + self.version.observe(edit.timestamp.local()); self.history.push(edit); } } @@ -1116,8 +1129,7 @@ impl Buffer { version: &time::Global, ranges: &[Range], new_text: Option<&str>, - local_timestamp: time::Local, - lamport_timestamp: time::Lamport, + timestamp: InsertionTimestamp, ) { if ranges.is_empty() { return; @@ -1171,7 +1183,9 @@ impl Buffer { // Skip over insertions that are concurrent to this edit, but have a lower lamport // timestamp. while let Some(fragment) = old_fragments.item() { - if fragment_start == range.start && fragment.lamport_timestamp > lamport_timestamp { + if fragment_start == range.start + && fragment.timestamp.lamport() > timestamp.lamport() + { new_ropes.push_fragment(fragment, fragment.visible); new_fragments.push(fragment.clone(), &None); old_fragments.next(&cx); @@ -1196,8 +1210,7 @@ impl Buffer { new_ropes.push_str(new_text); new_fragments.push( Fragment { - insertion_id: local_timestamp, - lamport_timestamp, + timestamp, len: new_text.len(), deletions: Default::default(), max_undos: Default::default(), @@ -1216,7 +1229,7 @@ impl Buffer { let intersection_end = cmp::min(range.end, fragment_end); if fragment.was_visible(version, &self.undo_map) { intersection.len = intersection_end - fragment_start; - intersection.deletions.insert(local_timestamp); + intersection.deletions.insert(timestamp.local()); intersection.visible = false; } if intersection.len > 0 { @@ -1252,8 +1265,8 @@ impl Buffer { self.fragments = new_fragments; self.visible_text = visible_text; self.deleted_text = deleted_text; - self.local_clock.observe(local_timestamp); - self.lamport_clock.observe(lamport_timestamp); + self.local_clock.observe(timestamp.local()); + self.lamport_clock.observe(timestamp.lamport()); } pub fn undo(&mut self, mut cx: Option<&mut ModelContext>) -> Vec { @@ -1355,7 +1368,7 @@ impl Buffer { let mut fragment = fragment.clone(); let fragment_was_visible = fragment.visible; if fragment.was_visible(&edit.version, &self.undo_map) - || fragment.insertion_id == edit.id + || fragment.timestamp.local() == edit.timestamp.local() { fragment.visible = fragment.is_visible(&self.undo_map); fragment.max_undos.observe(undo.id); @@ -1403,7 +1416,7 @@ impl Buffer { false } else { match op { - Operation::Edit { edit, .. } => self.version >= edit.version, + Operation::Edit(edit) => self.version >= edit.version, Operation::Undo { undo, .. } => self.version.observed(undo.edit_id), Operation::UpdateSelections { selections, .. } => { if let Some(selections) = selections { @@ -1430,11 +1443,10 @@ impl Buffer { &mut self, ranges: &[Range], new_text: Option, - local_timestamp: time::Local, - lamport_timestamp: time::Lamport, + timestamp: InsertionTimestamp, ) -> EditOperation { let mut edit = EditOperation { - id: local_timestamp, + timestamp, version: self.version(), ranges: Vec::with_capacity(ranges.len()), new_text: None, @@ -1487,8 +1499,7 @@ impl Buffer { new_ropes.push_str(new_text); new_fragments.push( Fragment { - insertion_id: local_timestamp, - lamport_timestamp, + timestamp, len: new_text.len(), deletions: Default::default(), max_undos: Default::default(), @@ -1507,7 +1518,7 @@ impl Buffer { let intersection_end = cmp::min(range.end, fragment_end); if fragment.visible { intersection.len = intersection_end - fragment_start; - intersection.deletions.insert(local_timestamp); + intersection.deletions.insert(timestamp.local()); intersection.visible = false; } if intersection.len > 0 { @@ -1987,11 +1998,13 @@ impl<'a> Iterator for HighlightedChunks<'a> { impl Fragment { fn is_visible(&self, undos: &UndoMap) -> bool { - !undos.is_undone(self.insertion_id) && self.deletions.iter().all(|d| undos.is_undone(*d)) + !undos.is_undone(self.timestamp.local()) + && self.deletions.iter().all(|d| undos.is_undone(*d)) } fn was_visible(&self, version: &time::Global, undos: &UndoMap) -> bool { - (version.observed(self.insertion_id) && !undos.was_undone(self.insertion_id, version)) + (version.observed(self.timestamp.local()) + && !undos.was_undone(self.timestamp.local(), version)) && self .deletions .iter() @@ -2004,14 +2017,14 @@ impl sum_tree::Item for Fragment { fn summary(&self) -> Self::Summary { let mut max_version = time::Global::new(); - max_version.observe(self.insertion_id); + max_version.observe(self.timestamp.local()); for deletion in &self.deletions { max_version.observe(*deletion); } max_version.join(&self.max_undos); let mut min_insertion_version = time::Global::new(); - min_insertion_version.observe(self.insertion_id); + min_insertion_version.observe(self.timestamp.local()); let max_insertion_version = min_insertion_version.clone(); if self.visible { FragmentSummary { @@ -2124,9 +2137,7 @@ impl Operation { fn lamport_timestamp(&self) -> time::Lamport { match self { - Operation::Edit { - lamport_timestamp, .. - } => *lamport_timestamp, + Operation::Edit(edit) => edit.timestamp.lamport(), Operation::Undo { lamport_timestamp, .. } => *lamport_timestamp, @@ -3474,7 +3485,7 @@ mod tests { impl Operation { fn edit_id(&self) -> Option { match self { - Operation::Edit { edit, .. } => Some(edit.id), + Operation::Edit(edit) => Some(edit.timestamp.local()), Operation::Undo { undo, .. } => Some(undo.edit_id), Operation::UpdateSelections { .. } => None, }