crates/editor/src/display_map.rs 🔗
@@ -20,6 +20,7 @@
mod block_map;
mod crease_map;
mod custom_highlights;
+mod filter_map;
mod fold_map;
mod inlay_map;
pub(crate) mod invisibles;
Cole Miller and Cameron created
Co-authored-by: Cameron <cameron@zed.dev>
crates/editor/src/display_map.rs | 1
crates/editor/src/display_map/block_map.rs | 124 ++++-
crates/editor/src/display_map/filter_map.rs | 526 +++++++++++++++++++++++
crates/editor/src/display_map/inlay_map.rs | 3
crates/multi_buffer/src/multi_buffer.rs | 138 ++++++
5 files changed, 761 insertions(+), 31 deletions(-)
@@ -20,6 +20,7 @@
mod block_map;
mod crease_map;
mod custom_highlights;
+mod filter_map;
mod fold_map;
mod inlay_map;
pub(crate) mod invisibles;
@@ -571,29 +571,9 @@ impl BlockMap {
let mut edits = edits.into_iter().peekable();
while let Some(edit) = edits.next() {
- // FIXME biases?
-
- let mut old_buffer_start = self
- .wrap_snapshot
- .borrow()
- .to_point(WrapPoint::new(edit.old.start, 0), Bias::Left);
- old_buffer_start.column = 0;
- let mut old_start = WrapRow(
- self.wrap_snapshot
- .borrow()
- .make_wrap_point(old_buffer_start, Bias::Left)
- .row(),
- );
-
- let mut new_buffer_start =
- wrap_snapshot.to_point(WrapPoint::new(edit.new.start, 0), Bias::Left);
- new_buffer_start.column = 0;
+ dbg!(&edit);
+ let mut old_start = WrapRow(edit.old.start);
let mut new_start = WrapRow(edit.new.start);
- let mut new_start = WrapRow(
- wrap_snapshot
- .make_wrap_point(new_buffer_start, Bias::Left)
- .row(),
- );
// Only preserve transforms that:
// * Strictly precedes this edit
@@ -608,6 +588,7 @@ impl BlockMap {
{
// Preserve the transform (push and next)
new_transforms.push(transform.clone(), ());
+ // ^ FIXME wrong?
cursor.next();
// Preserve below blocks at end of edit
@@ -640,7 +621,7 @@ impl BlockMap {
// replacement.
debug_assert!(transform.summary.input_rows > 0);
old_start.0 -= transform_rows_before_edit;
- new_start.0 -= transform_rows_before_edit;
+ new_start.0 -= dbg!(transform_rows_before_edit);
}
}
@@ -686,6 +667,62 @@ impl BlockMap {
}
}
+ // FIXME
+ // loop:
+ // peek next diff hunk in edit range
+ // peek next row boundary
+ // if they are equal, it's a diff hunk, so handle that (insert isomorphic and one non-isomorphic transform, possibly insert block)
+ // else must be a normal row boundary, so insert isomorphic transform and then block as needed
+ // and uh figure out what to do about the other kinds of block that exist
+ if true {
+ // FIXME persistent cursors/iterators for row boundaries and diff hunks
+ let mut current_wrap_row = new_start.0;
+ dbg!("---------", new_end.0);
+ loop {
+ if dbg!(current_wrap_row) > new_end.0 {
+ dbg!();
+ break;
+ }
+
+ let Some(next_row_boundary) =
+ wrap_snapshot.next_row_boundary(WrapPoint::new(current_wrap_row, 0))
+ else {
+ dbg!();
+ break;
+ };
+
+ push_isomorphic(
+ &mut new_transforms,
+ next_row_boundary - current_wrap_row,
+ wrap_snapshot,
+ );
+ new_transforms.push(
+ Transform {
+ summary: TransformSummary {
+ input_rows: 0,
+ output_rows: 1,
+ longest_row: 0,
+ longest_row_chars: 0,
+ },
+ block: Some(Block::Spacer {
+ height: 1,
+ id: SpacerId(self.next_block_id.fetch_add(1, SeqCst)),
+ }),
+ },
+ (),
+ );
+ current_wrap_row = next_row_boundary;
+ }
+
+ // FIXME
+ // let rows_after_last_block = new_end
+ // .0
+ // .saturating_sub(new_transforms.summary().input_rows);
+ // push_isomorphic(&mut new_transforms, rows_after_last_block, wrap_snapshot);
+
+ continue;
+ }
+
// Find the blocks within this edited region.
let new_buffer_start =
wrap_snapshot.to_point(WrapPoint::new(new_start.0, 0), Bias::Left);
@@ -746,10 +783,6 @@ impl BlockMap {
BlockMap::sort_blocks(&mut blocks_in_edit);
- let hunks = buffer
- .diff_hunks_in_range(new_buffer_start..new_buffer_end)
- .collect::<Vec<_>>();
-
// For each of these blocks, insert a new isomorphic transform preceding the block,
// and then insert the block itself.
let mut just_processed_folded_buffer = false;
@@ -801,10 +834,11 @@ impl BlockMap {
}
new_transforms.append(cursor.suffix(), ());
- debug_assert_eq!(
- new_transforms.summary().input_rows,
- wrap_snapshot.max_point().row() + 1
- );
+ // FIXME
+ // debug_assert_eq!(
+ // new_transforms.summary().input_rows,
+ // wrap_snapshot.max_point().row() + 1
+ // );
drop(cursor);
*transforms = new_transforms;
@@ -3016,6 +3050,34 @@ mod tests {
);
}
+ #[gpui::test]
+ fn test_spacers(cx: &mut gpui::TestAppContext) {
+ cx.update(init_test);
+
+ let text = "aaa\nbbb\nccc\nddd\n";
+
+ let buffer = cx.update(|cx| MultiBuffer::build_simple(text, cx));
+ let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx));
+ let subscription = buffer.update(cx, |buffer, _| buffer.subscribe());
+ let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
+ let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot);
+ let (mut tab_map, tab_snapshot) = TabMap::new(fold_snapshot, 1.try_into().unwrap());
+ let (wrap_map, wrap_snapshot) =
+ cx.update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), px(14.0), None, cx));
+ let mut block_map = BlockMap::new(wrap_snapshot.clone(), 1, 1);
+
+ let snapshot = block_map.read(wrap_snapshot, Default::default());
+
+ let blocks = snapshot
+ .blocks_in_range(0..8)
+ .map(|(start_row, block)| start_row..start_row + block.height())
+ .collect::<Vec<_>>();
+
+ assert_eq!(blocks, &[1..2, 3..4, 5..6,]);
+
+ assert_eq!(snapshot.text(), "aaa\n\nbbb\n\nccc\n\nddd\n\n");
+ }
+
#[gpui::test(iterations = 100)]
fn test_random_blocks(cx: &mut gpui::TestAppContext, mut rng: StdRng) {
cx.update(init_test);
@@ -0,0 +1,526 @@
+use std::{cmp, ops::Range};
+
+use buffer_diff::{DiffHunkStatus, DiffHunkStatusKind};
+use multi_buffer::{AnchorRangeExt as _, MultiBufferSnapshot};
+use rope::{Point, TextSummary};
+use sum_tree::{Dimensions, SumTree};
+use text::Bias;
+
+/// All summaries are an integral number of multibuffer rows.
+#[derive(Debug, Clone, Copy)]
+struct WholeLineTextSummary(pub TextSummary);
+
+impl WholeLineTextSummary {
+ pub fn empty() -> Self {
+ Self(TextSummary::default())
+ }
+}
+
+#[derive(Debug, Clone, Copy)]
+enum Transform {
+ Isomorphic { summary: WholeLineTextSummary },
+ Filter { summary: WholeLineTextSummary },
+}
+
+impl Transform {
+ fn is_isomorphic(&self) -> bool {
+ matches!(self, Transform::Isomorphic { .. })
+ }
+}
+
+impl sum_tree::Item for Transform {
+ type Summary = TransformSummary;
+
+ fn summary(&self, cx: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
+ match self {
+ Self::Isomorphic { summary } => TransformSummary {
+ input: *summary,
+ output: *summary,
+ },
+ Self::Filter { summary } => TransformSummary {
+ input: *summary,
+ output: WholeLineTextSummary::empty(),
+ },
+ }
+ }
+}
+
+#[derive(Debug, Clone, Copy)]
+struct TransformSummary {
+ input: WholeLineTextSummary,
+ output: WholeLineTextSummary,
+}
+
+impl sum_tree::ContextLessSummary for TransformSummary {
+ fn zero() -> Self {
+ TransformSummary {
+ input: WholeLineTextSummary::empty(),
+ output: WholeLineTextSummary::empty(),
+ }
+ }
+
+ fn add_summary(&mut self, summary: &Self) {
+ self.input.0 += summary.input.0;
+ self.output.0 += summary.output.0;
+ }
+}
+
+struct FilterMap {
+ snapshot: FilterSnapshot,
+ mode: Option<FilterMode>,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+enum FilterMode {
+ RemoveDeletions,
+ RemoveInsertions,
+}
+
+impl FilterMode {
+ fn should_remove(self, kind: DiffHunkStatusKind) -> bool {
+ todo!()
+ }
+}
+
+#[derive(Clone)]
+struct FilterSnapshot {
+ transforms: SumTree<Transform>,
+ buffer_snapshot: MultiBufferSnapshot,
+}
+
+/// A byte index into the buffer (after ignored diff hunk lines are deleted)
+#[derive(Debug, Clone, Copy, PartialOrd, Ord, PartialEq, Eq)]
+struct FilterOffset(usize);
+
+impl sum_tree::Dimension<'_, TransformSummary> for FilterOffset {
+ fn zero(cx: <TransformSummary as sum_tree::Summary>::Context<'_>) -> Self {
+ FilterOffset(0)
+ }
+
+ fn add_summary(
+ &mut self,
+ summary: &'_ TransformSummary,
+ cx: <TransformSummary as sum_tree::Summary>::Context<'_>,
+ ) {
+ self.0 += summary.output.0.len;
+ }
+}
+
+impl sum_tree::Dimension<'_, TransformSummary> for usize {
+ fn zero(cx: <TransformSummary as sum_tree::Summary>::Context<'_>) -> Self {
+ 0
+ }
+
+ fn add_summary(
+ &mut self,
+ summary: &'_ TransformSummary,
+ cx: <TransformSummary as sum_tree::Summary>::Context<'_>,
+ ) {
+ *self += summary.input.0.len;
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialOrd, Ord, PartialEq, Eq)]
+struct FilterPoint(Point);
+
+impl sum_tree::Dimension<'_, TransformSummary> for FilterPoint {
+ fn zero(cx: <TransformSummary as sum_tree::Summary>::Context<'_>) -> Self {
+ FilterPoint(Point::zero())
+ }
+
+ fn add_summary(
+ &mut self,
+ summary: &'_ TransformSummary,
+ cx: <TransformSummary as sum_tree::Summary>::Context<'_>,
+ ) {
+ self.0 += summary.output.0.lines;
+ }
+}
+
+type FilterEdit = text::Edit<FilterOffset>;
+
+impl FilterMap {
+ fn new(mode: Option<FilterMode>, buffer_snapshot: MultiBufferSnapshot) -> Self {
+ let mut this = Self {
+ mode,
+ snapshot: FilterSnapshot {
+ buffer_snapshot: buffer_snapshot.clone(),
+ transforms: SumTree::default(),
+ },
+ };
+ this.sync(
+ buffer_snapshot.clone(),
+ vec![text::Edit {
+ old: 0..buffer_snapshot.len(),
+ new: 0..buffer_snapshot.len(),
+ }],
+ );
+ this
+ }
+
+ #[cfg(debug_assertions)]
+ fn check_invariants(&self) {
+ use itertools::Itertools;
+
+ assert_eq!(
+ self.snapshot.transforms.summary().input.0,
+ self.snapshot.buffer_snapshot.text_summary(),
+ "input summary does not match buffer snapshot"
+ );
+
+ self.snapshot
+ .transforms
+ .iter()
+ .tuple_windows()
+ .for_each(|(left, right)| {
+ assert!(
+ left.is_isomorphic() || right.is_isomorphic(),
+ "two consecutive non-isomorphic transforms"
+ );
+ assert!(
+ !left.is_isomorphic() || !right.is_isomorphic(),
+ "two consecutive isomorphic transforms"
+ );
+ });
+
+ let Some(mode) = self.mode else {
+ assert_eq!(
+ self.snapshot.transforms.iter().count(),
+ 1,
+ "more than one transform in a trivial map"
+ );
+ assert_eq!(
+ self.snapshot.transforms.summary().output.0,
+ self.snapshot.buffer_snapshot.text_summary(),
+ "output summary for trivial map does not match buffer snapshot"
+ );
+ return;
+ };
+
+ let mut expected_summary = TextSummary::default();
+ let mut anchor = multi_buffer::Anchor::min();
+ for hunk in self.snapshot.buffer_snapshot.diff_hunks() {
+ expected_summary += self
+ .snapshot
+ .buffer_snapshot
+ .text_summary_for_range::<TextSummary, _>(anchor..hunk.multi_buffer_range().start);
+ if !mode.should_remove(hunk.status().kind) {
+ expected_summary += self
+ .snapshot
+ .buffer_snapshot
+ .text_summary_for_range::<TextSummary, _>(hunk.multi_buffer_range());
+ }
+ anchor = hunk.multi_buffer_range().end;
+ }
+ expected_summary += self
+ .snapshot
+ .buffer_snapshot
+ .text_summary_for_range::<TextSummary, _>(anchor..multi_buffer::Anchor::max());
+
+ assert_eq!(
+ self.snapshot.transforms.summary().output.0,
+ expected_summary,
+ "wrong output summary for nontrivial map"
+ )
+ }
+}
+
+impl FilterMap {
+ fn sync(
+ &mut self,
+ buffer_snapshot: MultiBufferSnapshot,
+ buffer_edits: Vec<text::Edit<usize>>,
+ ) -> (FilterSnapshot, Vec<FilterEdit>) {
+ let Some(mode) = self.mode else {
+ self.snapshot.buffer_snapshot = buffer_snapshot.clone();
+ self.snapshot.transforms = SumTree::from_item(
+ Transform::Isomorphic {
+ summary: WholeLineTextSummary(buffer_snapshot.text_summary()),
+ },
+ (),
+ );
+ return (
+ self.snapshot.clone(),
+ buffer_edits
+ .into_iter()
+ .map(|edit| text::Edit {
+ old: FilterOffset(edit.old.start)..FilterOffset(edit.old.end),
+ new: FilterOffset(edit.new.start)..FilterOffset(edit.new.end),
+ })
+ .collect(),
+ );
+ };
+
+ let mut new_transforms = SumTree::new(());
+ let mut cursor = self
+ .snapshot
+ .transforms
+ .cursor::<Dimensions<usize, FilterOffset>>(());
+ let mut output_edits = Vec::new();
+
+ // TODO in what follows we repeatedly call text_summary_for_range,
+ // could use a persistent usize cursor over buffer_snapshot instead.
+
+ for buffer_edit in buffer_edits {
+ // Reuse any old transforms that strictly precede the start of the edit.
+ new_transforms.append(cursor.slice(&buffer_edit.old.end, Bias::Right), ());
+
+ let mut edit_old_start = cursor.start().1;
+ let mut edit_new_start = FilterOffset(new_transforms.summary().output.0.len);
+
+ // If the edit starts in the middle of a transform, split the transform and push the unaffected portion.
+ if buffer_edit.old.start > cursor.start().0 {
+ let summary = self
+ .snapshot
+ .buffer_snapshot
+ .text_summary_for_range(cursor.start().0..buffer_edit.old.start);
+ match cursor.item() {
+ Some(Transform::Isomorphic { .. }) => {
+ push_isomorphic(&mut new_transforms, summary);
+ edit_old_start.0 += summary.len;
+ edit_new_start.0 += summary.len;
+ cursor.next();
+ }
+ Some(Transform::Filter { .. }) => {
+ push_filter(&mut new_transforms, summary);
+ cursor.next();
+ }
+ None => {}
+ }
+ }
+
+ // For each hunk in the edit, push the non-hunk region preceding it, then
+ // possibly filter the hunk depending on the mode.
+ for hunk in buffer_snapshot.diff_hunks_in_range(buffer_edit.new.clone()) {
+ let mut hunk_range = hunk.multi_buffer_range().to_offset(&buffer_snapshot);
+ hunk_range.start = std::cmp::max(hunk_range.start, buffer_edit.new.start);
+ hunk_range.end = std::cmp::min(hunk_range.end, buffer_edit.new.end);
+ let prefix_range = new_transforms.summary().input.0.len..hunk_range.start;
+ push_isomorphic(
+ &mut new_transforms,
+ buffer_snapshot.text_summary_for_range(prefix_range),
+ );
+ let hunk_summary = buffer_snapshot.text_summary_for_range(hunk_range);
+ if (mode == FilterMode::RemoveDeletions)
+ == (hunk.status().kind == DiffHunkStatusKind::Deleted)
+ {
+ push_filter(&mut new_transforms, hunk_summary);
+ } else {
+ push_isomorphic(&mut new_transforms, hunk_summary);
+ }
+ }
+
+ // Push any non-hunk content after the last hunk.
+ if buffer_edit.new.end > new_transforms.summary().input.0.len {
+ let suffix_range = new_transforms.summary().input.0.len..buffer_edit.new.end;
+ push_isomorphic(
+ &mut new_transforms,
+ buffer_snapshot.text_summary_for_range(suffix_range),
+ );
+ }
+
+ // Set up the cursor for the next iteration by seeking it to the end of the edit
+ // and pushing the second half of any transform that's split thereby (we already
+ // covered the first half just above).
+ cursor.seek(&buffer_edit.old.end, Bias::Right);
+ let mut edit_old_end = cursor.end().1;
+ let mut edit_new_end = FilterOffset(new_transforms.summary().output.0.len);
+ if buffer_edit.old.end > cursor.start().0 {
+ let summary = self
+ .snapshot
+ .buffer_snapshot
+ .text_summary_for_range(buffer_edit.old.end..cursor.end().0);
+ match cursor.item() {
+ Some(Transform::Isomorphic { .. }) => {
+ push_isomorphic(&mut new_transforms, summary);
+ edit_old_end.0 += buffer_edit.old.end - cursor.start().0;
+ edit_new_end.0 += buffer_edit.old.end - cursor.start().0;
+ cursor.next();
+ }
+ Some(Transform::Filter { .. }) => {
+ push_filter(&mut new_transforms, summary);
+ cursor.next();
+ }
+ None => {}
+ }
+ }
+
+ output_edits.push(text::Edit {
+ old: edit_old_start..edit_old_end,
+ new: edit_new_start..edit_new_end,
+ })
+ }
+
+ // Append old transforms after the last edit.
+ new_transforms.append(cursor.slice(&usize::MAX, Bias::Left), ());
+
+ drop(cursor);
+
+ self.snapshot.transforms = new_transforms;
+ self.snapshot.buffer_snapshot = buffer_snapshot;
+ #[cfg(debug_assertions)]
+ self.check_invariants();
+ (self.snapshot.clone(), output_edits)
+ }
+}
+
+fn push_isomorphic(transforms: &mut SumTree<Transform>, summary_to_add: TextSummary) {
+ let mut merged = false;
+ transforms.update_last(
+ |transform| {
+ if let Transform::Isomorphic { summary } = transform {
+ summary.0 += summary_to_add;
+ merged = true;
+ }
+ },
+ (),
+ );
+ if !merged {
+ transforms.push(
+ Transform::Isomorphic {
+ summary: WholeLineTextSummary(summary_to_add),
+ },
+ (),
+ );
+ }
+}
+
+fn push_filter(transforms: &mut SumTree<Transform>, summary_to_add: TextSummary) {
+ let mut merged = false;
+ transforms.update_last(
+ |transform| {
+ if let Transform::Filter { summary } = transform {
+ summary.0 += summary_to_add;
+ merged = true;
+ }
+ },
+ (),
+ );
+ if !merged {
+ transforms.push(
+ Transform::Filter {
+ summary: WholeLineTextSummary(summary_to_add),
+ },
+ (),
+ );
+ }
+}
+
+impl FilterSnapshot {
+ fn text_summary_for_range(&self, range: Range<FilterOffset>) -> TextSummary {
+ let mut summary = TextSummary::default();
+
+ let mut cursor = self
+ .transforms
+ .cursor::<Dimensions<FilterOffset, usize>>(());
+ cursor.seek(&range.start, Bias::Right);
+
+ let overshoot = range.start.0 - cursor.start().0.0;
+ match cursor.item() {
+ Some(Transform::Isomorphic { .. }) => {
+ let buffer_start = cursor.start().1;
+ let suffix_start = buffer_start + overshoot;
+ let suffix_end =
+ buffer_start + (cmp::min(cursor.end().0, range.end).0 - cursor.start().0.0);
+ summary = self
+ .buffer_snapshot
+ .text_summary_for_range(suffix_start..suffix_end);
+ cursor.next();
+ }
+ Some(Transform::Filter { .. }) | None => {}
+ }
+
+ if range.end > cursor.start().0 {
+ summary += cursor
+ .summary::<_, TransformSummary>(&range.end, Bias::Right)
+ .output
+ .0;
+
+ let overshoot = range.end.0 - cursor.start().0.0;
+ match cursor.item() {
+ Some(Transform::Isomorphic { .. }) => {
+ let prefix_start = cursor.start().1;
+ let prefix_end = prefix_start + overshoot;
+ summary += self
+ .buffer_snapshot
+ .text_summary_for_range::<TextSummary, _>(prefix_start..prefix_end);
+ }
+ Some(Transform::Filter { .. }) | None => {}
+ }
+ }
+
+ summary
+ }
+
+ fn to_point(&self, offset: FilterOffset) -> FilterPoint {
+ let (start, _, item) = self
+ .transforms
+ .find::<Dimensions<FilterOffset, FilterPoint, usize>, _>((), &offset, Bias::Right);
+ let overshoot = offset.0 - start.0.0;
+ match item {
+ Some(Transform::Isomorphic { .. }) => {
+ let buffer_offset_start = start.2;
+ let buffer_offset_end = buffer_offset_start + overshoot;
+ let buffer_start = self.buffer_snapshot.offset_to_point(buffer_offset_start);
+ let buffer_end = self.buffer_snapshot.offset_to_point(buffer_offset_end);
+ FilterPoint(start.1.0 + (buffer_end - buffer_start))
+ }
+ Some(Transform::Filter { .. }) | None => self.max_point(),
+ }
+ }
+
+ fn max_point(&self) -> FilterPoint {
+ FilterPoint(self.transforms.summary().output.0.lines)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use collections::HashMap;
+ use gpui::{AppContext as _, Entity};
+ use language::{Buffer, Capability};
+ use multi_buffer::{MultiBuffer, MultiBufferSnapshot, randomly_mutate_multibuffer_with_diffs};
+ use rand::{Rng as _, rngs::StdRng};
+ use text::BufferId;
+
+ use crate::display_map::filter_map::{FilterMap, FilterMode, FilterSnapshot};
+
+ #[gpui::test(iterations = 100)]
+ fn test_random_filter_map(cx: &mut gpui::TestAppContext, mut rng: StdRng) {
+ let operations = std::env::var("OPERATIONS")
+ .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
+ .unwrap_or(10);
+
+ let mut buffers: Vec<Entity<Buffer>> = Vec::new();
+ let mut base_texts: HashMap<BufferId, String> = HashMap::default();
+ let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
+ let mut needs_diff_calculation = false;
+
+ let mode = if rng.random() {
+ FilterMode::RemoveDeletions
+ } else {
+ FilterMode::RemoveInsertions
+ };
+ let mut filter_map = FilterMap::new(
+ Some(mode),
+ multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx)),
+ );
+
+ for _ in 0..operations {
+ let subscription = multibuffer.update(cx, |multibuffer, cx| multibuffer.subscribe());
+ randomly_mutate_multibuffer_with_diffs(
+ multibuffer.clone(),
+ &mut buffers,
+ &mut base_texts,
+ &mut needs_diff_calculation,
+ rng.clone(),
+ cx,
+ );
+ cx.run_until_parked();
+ let buffer_edits = subscription.consume();
+ let buffer_snapshot =
+ multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx));
+ filter_map.sync(buffer_snapshot, buffer_edits.edits().to_owned());
+ }
+ }
+}
@@ -978,6 +978,9 @@ impl InlaySnapshot {
let mut cursor = self.transforms.cursor::<Dimensions<InlayOffset, usize>>(());
cursor.seek(&range.start, Bias::Right);
+ // :SomeTypeInlay
+ // |---| >|----v-----|< |----------------| |------------v--|
+ // |---| >|----------|< |----------------| |------------v--|
let overshoot = range.start.0 - cursor.start().0.0;
match cursor.item() {
@@ -7173,3 +7173,141 @@ pub mod debug {
}
}
}
+
+#[cfg(feature = "test-support")]
+pub fn randomly_mutate_multibuffer_with_diffs(
+ multibuffer: Entity<MultiBuffer>,
+ buffers: &mut Vec<Entity<Buffer>>,
+ base_texts: &mut HashMap<BufferId, String>,
+ needs_diff_calculation: &mut bool,
+ mut rng: rand::rngs::StdRng,
+ cx: &mut gpui::TestAppContext,
+) {
+ use rand::Rng as _;
+ use rand::seq::IndexedRandom as _;
+
+ let excerpt_ids = multibuffer.read_with(cx, |multibuffer, _| multibuffer.excerpt_ids());
+ match rng.random_range(0..100) {
+ 0..=14 if !buffers.is_empty() => {
+ let buffer = buffers.choose(&mut rng).unwrap();
+ buffer.update(cx, |buf, cx| {
+ let edit_count = rng.random_range(1..5);
+ buf.randomly_edit(&mut rng, edit_count, cx);
+ log::info!("buffer text:\n{}", buf.text());
+ *needs_diff_calculation = true;
+ });
+ }
+ 15..=19 if !excerpt_ids.is_empty() => {
+ multibuffer.update(cx, |multibuffer, cx| {
+ let ids = multibuffer.excerpt_ids();
+ let mut excerpts = HashSet::default();
+ for _ in 0..rng.random_range(0..ids.len()) {
+ excerpts.extend(ids.choose(&mut rng).copied());
+ }
+
+ let line_count = rng.random_range(0..5);
+
+ log::info!("Expanding excerpts by {line_count} lines");
+ multibuffer.expand_excerpts(
+ excerpts.iter().cloned(),
+ line_count,
+ ExpandExcerptDirection::UpAndDown,
+ cx,
+ );
+ });
+ }
+ 20..=29 if !excerpt_ids.is_empty() => {
+ let mut ids_to_remove = vec![];
+ for _ in 0..rng.random_range(1..=3) {
+ let Some(id) = excerpt_ids.choose(&mut rng) else {
+ break;
+ };
+ ids_to_remove.push(*id);
+ }
+ let snapshot = multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx));
+ ids_to_remove.sort_unstable_by(|a, b| a.cmp(b, &snapshot));
+ drop(snapshot);
+ multibuffer.update(cx, |multibuffer, cx| {
+ multibuffer.remove_excerpts(ids_to_remove, cx)
+ });
+ }
+ 56..=85 if *needs_diff_calculation => {
+ multibuffer.update(cx, |multibuffer, cx| {
+ for buffer in multibuffer.all_buffers() {
+ let snapshot = buffer.read(cx).snapshot();
+ multibuffer
+ .diff_for(snapshot.remote_id())
+ .unwrap()
+ .update(cx, |diff, cx| {
+ log::info!("recalculating diff for buffer {:?}", snapshot.remote_id(),);
+ diff.recalculate_diff_sync(snapshot.text, cx);
+ });
+ }
+ *needs_diff_calculation = false;
+ });
+ }
+ _ => {
+ let buffer_handle = if buffers.is_empty() || rng.random_bool(0.4) {
+ let mut base_text = util::RandomCharIter::new(&mut rng)
+ .take(256)
+ .collect::<String>();
+
+ let buffer = cx.new(|cx| Buffer::local(base_text.clone(), cx));
+ text::LineEnding::normalize(&mut base_text);
+ base_texts.insert(
+ buffer.read_with(cx, |buffer, _| buffer.remote_id()),
+ base_text,
+ );
+ buffers.push(buffer);
+ buffers.last().unwrap()
+ } else {
+ buffers.choose(&mut rng).unwrap()
+ };
+
+ let prev_excerpt_id = multibuffer
+ .read_with(cx, |multibuffer, cx| multibuffer.excerpt_ids())
+ .choose(&mut rng)
+ .copied()
+ .unwrap_or(ExcerptId::max());
+
+ let range = buffer_handle.read_with(cx, |buffer, _| {
+ let end_row = rng.random_range(0..=buffer.max_point().row);
+ let start_row = rng.random_range(0..=end_row);
+ let end_ix = buffer.point_to_offset(Point::new(end_row, 0));
+ let start_ix = buffer.point_to_offset(Point::new(start_row, 0));
+
+ log::info!(
+ "Inserting excerpt for buffer {}: {:?}[{:?}] = {:?}",
+ buffer.remote_id(),
+ buffer.text(),
+ start_ix..end_ix,
+ &buffer.text()[start_ix..end_ix]
+ );
+
+ start_ix..end_ix
+ });
+
+ multibuffer.update(cx, |multibuffer, cx| {
+ let id = buffer_handle.read(cx).remote_id();
+ if multibuffer.diff_for(id).is_none() {
+ let base_text = base_texts.get(&id).unwrap();
+ let diff =
+ cx.new(|cx| BufferDiff::new_with_base_text(base_text, buffer_handle, cx));
+ multibuffer.add_diff(diff, cx)
+ }
+ });
+
+ multibuffer.update(cx, |multibuffer, cx| {
+ multibuffer
+ .insert_excerpts_after(
+ prev_excerpt_id,
+ buffer_handle.clone(),
+ [ExcerptRange::new(range.clone())],
+ cx,
+ )
+ .pop()
+ .unwrap()
+ });
+ }
+ }
+}