Detailed changes
@@ -38,3 +38,7 @@ slow-timeout = { period = "300s", terminate-after = 1 }
[[profile.default.overrides]]
filter = 'package(vim) and (test(test_command_read) or test(test_capital_f_and_capital_t) or test(test_f_and_t) or test(test_change_paragraph_object) or test(test_change_surrounding_character_objects) or test(test_change_word_object) or test(test_delete_paragraph_object) or test(test_delete_surrounding_character_objects) or test(test_delete_word_object))'
slow-timeout = { period = "300s", terminate-after = 1 }
+
+[[profile.default.overrides]]
+filter = 'package(editor) and test(test_random_split_editor)'
+slow-timeout = { period = "300s", terminate-after = 1 }
@@ -5516,6 +5516,7 @@ dependencies = [
"ui",
"unicode-script",
"unicode-segmentation",
+ "unicode-width",
"unindent",
"url",
"util",
@@ -707,6 +707,7 @@ tracing = "0.1.40"
unicase = "2.6"
unicode-script = "0.5.7"
unicode-segmentation = "1.10"
+unicode-width = "0.2"
unindent = "0.2.0"
url = "2.2"
urlencoding = "2.1.2"
@@ -227,7 +227,7 @@ impl PendingDiff {
diff.update_diff(
text_snapshot.clone(),
Some(base_text.clone()),
- false,
+ None,
language,
cx,
)
@@ -399,7 +399,7 @@ async fn build_buffer_diff(
secondary_diff.update_diff(
text_snapshot.clone(),
Some(old_text),
- true,
+ Some(false),
language.clone(),
cx,
)
@@ -408,7 +408,7 @@ impl ActionLog {
diff.update_diff(
buffer_snapshot.clone(),
Some(new_base_text),
- true,
+ Some(true),
language,
cx,
)
@@ -302,7 +302,7 @@ impl CodegenAlternative {
let snapshot = buffer.read(cx).snapshot(cx);
let (old_buffer, _, _) = snapshot
- .range_to_buffer_ranges(range.clone())
+ .range_to_buffer_ranges(range.start..=range.end)
.pop()
.unwrap();
let old_buffer = cx.new(|cx| {
@@ -679,7 +679,7 @@ impl CodegenAlternative {
let language_name = {
let multibuffer = self.buffer.read(cx);
let snapshot = multibuffer.snapshot(cx);
- let ranges = snapshot.range_to_buffer_ranges(self.range.clone());
+ let ranges = snapshot.range_to_buffer_ranges(self.range.start..=self.range.end);
ranges
.first()
.and_then(|(buffer, _, _)| buffer.language())
@@ -1074,7 +1074,8 @@ impl InlineAssistant {
let language_name = assist.editor.upgrade().and_then(|editor| {
let multibuffer = editor.read(cx).buffer().read(cx);
let snapshot = multibuffer.snapshot(cx);
- let ranges = snapshot.range_to_buffer_ranges(assist.range.clone());
+ let ranges =
+ snapshot.range_to_buffer_ranges(assist.range.start..=assist.range.end);
ranges
.first()
.and_then(|(buffer, _, _)| buffer.language())
@@ -2,21 +2,51 @@ use futures::channel::oneshot;
use git2::{DiffLineType as GitDiffLineType, DiffOptions as GitOptions, Patch as GitPatch};
use gpui::{App, AppContext as _, Context, Entity, EventEmitter, Task};
use language::{
- BufferRow, Capability, DiffOptions, File, Language, LanguageName, LanguageRegistry,
+ Capability, Diff, DiffOptions, File, Language, LanguageName, LanguageRegistry,
language_settings::language_settings, word_diff_ranges,
};
use rope::Rope;
use std::{cmp::Ordering, future::Future, iter, ops::Range, sync::Arc};
use sum_tree::SumTree;
-use text::{Anchor, Bias, BufferId, OffsetRangeExt, Point, ToOffset as _, ToPoint as _};
+use text::{
+ Anchor, Bias, BufferId, Edit, OffsetRangeExt, Patch, Point, ToOffset as _, ToPoint as _,
+};
use util::ResultExt;
+fn translate_point_through_patch(
+ patch: &Patch<Point>,
+ point: Point,
+) -> (Range<Point>, Range<Point>) {
+ let edits = patch.edits();
+
+ let ix = match edits.binary_search_by(|probe| probe.old.start.cmp(&point)) {
+ Ok(ix) => ix,
+ Err(ix) => {
+ if ix == 0 {
+ return (point..point, point..point);
+ } else {
+ ix - 1
+ }
+ }
+ };
+
+ if let Some(edit) = edits.get(ix) {
+ if point > edit.old.end {
+ let translated = edit.new.end + (point - edit.old.end);
+ (translated..translated, point..point)
+ } else {
+ (edit.new.start..edit.new.end, edit.old.start..edit.old.end)
+ }
+ } else {
+ (point..point, point..point)
+ }
+}
+
pub const MAX_WORD_DIFF_LINE_COUNT: usize = 5;
pub struct BufferDiff {
pub buffer_id: BufferId,
inner: BufferDiffInner<Entity<language::Buffer>>,
- // diff of the index vs head
secondary_diff: Option<Entity<BufferDiff>>,
}
@@ -37,8 +67,10 @@ impl std::fmt::Debug for BufferDiffSnapshot {
#[derive(Clone)]
pub struct BufferDiffUpdate {
- base_text_changed: bool,
inner: BufferDiffInner<Arc<str>>,
+ buffer_snapshot: text::BufferSnapshot,
+ base_text_edits: Option<Diff>,
+ base_text_changed: bool,
}
#[derive(Clone)]
@@ -47,6 +79,13 @@ struct BufferDiffInner<BaseText> {
pending_hunks: SumTree<PendingHunk>,
base_text: BaseText,
base_text_exists: bool,
+ buffer_snapshot: text::BufferSnapshot,
+}
+
+impl<BaseText> BufferDiffInner<BaseText> {
+ fn buffer_version(&self) -> &clock::Global {
+ self.buffer_snapshot.version()
+ }
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -182,6 +221,18 @@ impl sum_tree::SeekTarget<'_, DiffHunkSummary, DiffHunkSummary> for Anchor {
}
}
+impl sum_tree::SeekTarget<'_, DiffHunkSummary, DiffHunkSummary> for usize {
+ fn cmp(&self, cursor_location: &DiffHunkSummary, _cx: &text::BufferSnapshot) -> Ordering {
+ if *self < cursor_location.diff_base_byte_range.start {
+ Ordering::Less
+ } else if *self > cursor_location.diff_base_byte_range.end {
+ Ordering::Greater
+ } else {
+ Ordering::Equal
+ }
+ }
+}
+
impl std::fmt::Debug for BufferDiffInner<language::BufferSnapshot> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("BufferDiffSnapshot")
@@ -216,6 +267,14 @@ impl BufferDiffSnapshot {
self.secondary_diff.as_deref()
}
+ pub fn buffer_version(&self) -> &clock::Global {
+ self.inner.buffer_version()
+ }
+
+ pub fn original_buffer_snapshot(&self) -> &text::BufferSnapshot {
+ &self.inner.buffer_snapshot
+ }
+
#[ztracing::instrument(skip_all)]
pub fn hunks_intersecting_range<'a>(
&'a self,
@@ -326,63 +385,182 @@ impl BufferDiffSnapshot {
(new_id == old_id && new_version == old_version) || (new_empty && old_empty)
}
- pub fn row_to_base_text_row(
+ #[allow(unused)]
+ fn hunk_before_base_text_offset<'a>(
&self,
- row: BufferRow,
- bias: Bias,
- buffer: &text::BufferSnapshot,
- ) -> u32 {
- // TODO(split-diff) expose a parameter to reuse a cursor to avoid repeatedly seeking from the start
- let target = buffer.anchor_before(Point::new(row, 0));
- // Find the last hunk that starts before the target.
- let mut cursor = self.inner.hunks.cursor::<DiffHunkSummary>(buffer);
- cursor.seek(&target, Bias::Left);
+ target: usize,
+ cursor: &mut sum_tree::Cursor<'a, '_, InternalDiffHunk, DiffHunkSummary>,
+ ) -> Option<&'a InternalDiffHunk> {
+ cursor.seek_forward(&target, Bias::Left);
if cursor
.item()
- .is_none_or(|hunk| hunk.buffer_range.start.cmp(&target, buffer).is_gt())
+ .is_none_or(|hunk| target < hunk.diff_base_byte_range.start)
{
cursor.prev();
}
+ let result = cursor
+ .item()
+ .filter(|hunk| target >= hunk.diff_base_byte_range.start);
+ if cursor.item().is_none() {
+ cursor.reset();
+ }
+ result
+ }
- let unclipped_point = if let Some(hunk) = cursor.item()
- && hunk.buffer_range.start.cmp(&target, buffer).is_le()
+ #[allow(unused)]
+ fn hunk_before_buffer_anchor<'a>(
+ &self,
+ target: Anchor,
+ cursor: &mut sum_tree::Cursor<'a, '_, InternalDiffHunk, DiffHunkSummary>,
+ buffer: &text::BufferSnapshot,
+ ) -> Option<&'a InternalDiffHunk> {
+ cursor.seek_forward(&target, Bias::Left);
+ if cursor
+ .item()
+ .is_none_or(|hunk| target.cmp(&hunk.buffer_range.start, buffer).is_lt())
{
- // Found a hunk that starts before the target.
- let hunk_base_text_end = cursor.end().diff_base_byte_range.end;
- let unclipped_point = if target.cmp(&cursor.end().buffer_range.end, buffer).is_ge() {
- // Target falls strictly between two hunks.
- let mut unclipped_point = hunk_base_text_end.to_point(self.base_text());
- unclipped_point +=
- Point::new(row, 0) - cursor.end().buffer_range.end.to_point(buffer);
- unclipped_point
- } else if bias == Bias::Right {
- hunk_base_text_end.to_point(self.base_text())
+ cursor.prev();
+ }
+ let result = cursor
+ .item()
+ .filter(|hunk| target.cmp(&hunk.buffer_range.start, buffer).is_ge());
+ if cursor.item().is_none() {
+ cursor.reset();
+ }
+ result
+ }
+
+ pub fn points_to_base_text_points<'a>(
+ &'a self,
+ points: impl IntoIterator<Item = Point> + 'a,
+ buffer: &'a text::BufferSnapshot,
+ ) -> (
+ impl 'a + Iterator<Item = Range<Point>>,
+ Option<Range<Point>>,
+ Option<(Point, Range<Point>)>,
+ ) {
+ let original_snapshot = self.original_buffer_snapshot();
+
+ let edits_since: Vec<Edit<Point>> = buffer
+ .edits_since::<Point>(original_snapshot.version())
+ .collect();
+ let mut inverted_edits_since = Patch::new(edits_since);
+ inverted_edits_since.invert();
+
+ let composed = inverted_edits_since.compose(
+ self.inner
+ .hunks
+ .iter()
+ .map(|hunk| {
+ let old_start = hunk.buffer_range.start.to_point(original_snapshot);
+ let old_end = hunk.buffer_range.end.to_point(original_snapshot);
+ let new_start = self
+ .base_text()
+ .offset_to_point(hunk.diff_base_byte_range.start);
+ let new_end = self
+ .base_text()
+ .offset_to_point(hunk.diff_base_byte_range.end);
+ Edit {
+ old: old_start..old_end,
+ new: new_start..new_end,
+ }
+ })
+ .chain(
+ if !self.inner.base_text_exists && self.inner.hunks.is_empty() {
+ Some(Edit {
+ old: Point::zero()..original_snapshot.max_point(),
+ new: Point::zero()..Point::zero(),
+ })
+ } else {
+ None
+ },
+ ),
+ );
+
+ let mut points = points.into_iter().peekable();
+
+ let first_group = points.peek().map(|point| {
+ let (_, old_range) = translate_point_through_patch(&composed, *point);
+ old_range
+ });
+
+ let prev_boundary = points.peek().and_then(|first_point| {
+ if first_point.row > 0 {
+ let prev_point = Point::new(first_point.row - 1, 0);
+ let (range, _) = translate_point_through_patch(&composed, prev_point);
+ Some((prev_point, range))
} else {
- hunk.diff_base_byte_range.start.to_point(self.base_text())
- };
- // Move the cursor so that at the next step we can clip with the start of the next hunk.
- cursor.next();
- unclipped_point
- } else {
- // Target is before the added region for the first hunk.
- debug_assert!(self.inner.hunks.first().is_none_or(|first_hunk| {
- target.cmp(&first_hunk.buffer_range.start, buffer).is_le()
- }));
- Point::new(row, 0)
- };
+ None
+ }
+ });
- // If the target falls in the region between two hunks, we added an overshoot above.
- // There may be changes in the main buffer that are not reflected in the hunks,
- // so we need to ensure this overshoot keeps us in the corresponding base text region.
- let max_point = if let Some(next_hunk) = cursor.item() {
- next_hunk
- .diff_base_byte_range
- .start
- .to_point(self.base_text())
- } else {
- self.base_text().max_point()
- };
- unclipped_point.min(max_point).row
+ let iter = points.map(move |point| {
+ let (range, _) = translate_point_through_patch(&composed, point);
+ range
+ });
+
+ (iter, first_group, prev_boundary)
+ }
+
+ pub fn base_text_points_to_points<'a>(
+ &'a self,
+ points: impl IntoIterator<Item = Point> + 'a,
+ buffer: &'a text::BufferSnapshot,
+ ) -> (
+ impl 'a + Iterator<Item = Range<Point>>,
+ Option<Range<Point>>,
+ Option<(Point, Range<Point>)>,
+ ) {
+ let original_snapshot = self.original_buffer_snapshot();
+
+ let mut hunk_edits: Vec<Edit<Point>> = Vec::new();
+ for hunk in self.inner.hunks.iter() {
+ let old_start = self
+ .base_text()
+ .offset_to_point(hunk.diff_base_byte_range.start);
+ let old_end = self
+ .base_text()
+ .offset_to_point(hunk.diff_base_byte_range.end);
+ let new_start = hunk.buffer_range.start.to_point(original_snapshot);
+ let new_end = hunk.buffer_range.end.to_point(original_snapshot);
+ hunk_edits.push(Edit {
+ old: old_start..old_end,
+ new: new_start..new_end,
+ });
+ }
+ if !self.inner.base_text_exists && hunk_edits.is_empty() {
+ hunk_edits.push(Edit {
+ old: Point::zero()..Point::zero(),
+ new: Point::zero()..original_snapshot.max_point(),
+ })
+ }
+ let hunk_patch = Patch::new(hunk_edits);
+
+ let composed = hunk_patch.compose(buffer.edits_since::<Point>(original_snapshot.version()));
+
+ let mut points = points.into_iter().peekable();
+
+ let first_group = points.peek().map(|point| {
+ let (_, result) = translate_point_through_patch(&composed, *point);
+ result
+ });
+
+ let prev_boundary = points.peek().and_then(|first_point| {
+ if first_point.row > 0 {
+ let prev_point = Point::new(first_point.row - 1, 0);
+ let (range, _) = translate_point_through_patch(&composed, prev_point);
+ Some((prev_point, range))
+ } else {
+ None
+ }
+ });
+
+ let iter = points.map(move |point| {
+ let (range, _) = translate_point_through_patch(&composed, point);
+ range
+ });
+
+ (iter, first_group, prev_boundary)
}
}
@@ -862,8 +1040,9 @@ fn compute_hunks(
fn compare_hunks(
new_hunks: &SumTree<InternalDiffHunk>,
old_hunks: &SumTree<InternalDiffHunk>,
+ old_snapshot: &text::BufferSnapshot,
new_snapshot: &text::BufferSnapshot,
-) -> (Option<Range<Anchor>>, Option<Range<usize>>) {
+) -> DiffChanged {
let mut new_cursor = new_hunks.cursor::<()>(new_snapshot);
let mut old_cursor = old_hunks.cursor::<()>(new_snapshot);
old_cursor.next();
@@ -873,6 +1052,10 @@ fn compare_hunks(
let mut base_text_start = None;
let mut base_text_end = None;
+ let mut last_unchanged_new_hunk_end: Option<text::Anchor> = None;
+ let mut has_changes = false;
+ let mut extended_end_candidate: Option<text::Anchor> = None;
+
loop {
match (new_cursor.item(), old_cursor.item()) {
(Some(new_hunk), Some(old_hunk)) => {
@@ -882,6 +1065,8 @@ fn compare_hunks(
.cmp(&old_hunk.buffer_range.start, new_snapshot)
{
Ordering::Less => {
+ has_changes = true;
+ extended_end_candidate = None;
start.get_or_insert(new_hunk.buffer_range.start);
base_text_start.get_or_insert(new_hunk.diff_base_byte_range.start);
end.replace(new_hunk.buffer_range.end);
@@ -890,6 +1075,8 @@ fn compare_hunks(
}
Ordering::Equal => {
if new_hunk != old_hunk {
+ has_changes = true;
+ extended_end_candidate = None;
start.get_or_insert(new_hunk.buffer_range.start);
base_text_start.get_or_insert(new_hunk.diff_base_byte_range.start);
if old_hunk
@@ -909,12 +1096,20 @@ fn compare_hunks(
.end
.max(new_hunk.diff_base_byte_range.end),
);
+ } else {
+ if !has_changes {
+ last_unchanged_new_hunk_end = Some(new_hunk.buffer_range.end);
+ } else if extended_end_candidate.is_none() {
+ extended_end_candidate = Some(new_hunk.buffer_range.start);
+ }
}
new_cursor.next();
old_cursor.next();
}
Ordering::Greater => {
+ has_changes = true;
+ extended_end_candidate = None;
start.get_or_insert(old_hunk.buffer_range.start);
base_text_start.get_or_insert(old_hunk.diff_base_byte_range.start);
end.replace(old_hunk.buffer_range.end);
@@ -924,18 +1119,26 @@ fn compare_hunks(
}
}
(Some(new_hunk), None) => {
+ has_changes = true;
+ extended_end_candidate = None;
start.get_or_insert(new_hunk.buffer_range.start);
base_text_start.get_or_insert(new_hunk.diff_base_byte_range.start);
- // TODO(cole) it seems like this could move end backward?
- end.replace(new_hunk.buffer_range.end);
+ if end.is_none_or(|end| end.cmp(&new_hunk.buffer_range.end, &new_snapshot).is_le())
+ {
+ end.replace(new_hunk.buffer_range.end);
+ }
base_text_end = base_text_end.max(Some(new_hunk.diff_base_byte_range.end));
new_cursor.next();
}
(None, Some(old_hunk)) => {
+ has_changes = true;
+ extended_end_candidate = None;
start.get_or_insert(old_hunk.buffer_range.start);
base_text_start.get_or_insert(old_hunk.diff_base_byte_range.start);
- // TODO(cole) it seems like this could move end backward?
- end.replace(old_hunk.buffer_range.end);
+ if end.is_none_or(|end| end.cmp(&old_hunk.buffer_range.end, &new_snapshot).is_le())
+ {
+ end.replace(old_hunk.buffer_range.end);
+ }
base_text_end = base_text_end.max(Some(old_hunk.diff_base_byte_range.end));
old_cursor.next();
}
@@ -943,12 +1146,46 @@ fn compare_hunks(
}
}
- (
- start.zip(end).map(|(start, end)| start..end),
- base_text_start
- .zip(base_text_end)
- .map(|(start, end)| start..end),
- )
+ let changed_range = start.zip(end).map(|(start, end)| start..end);
+ let base_text_changed_range = base_text_start
+ .zip(base_text_end)
+ .map(|(start, end)| start..end);
+
+ let extended_range = if has_changes && let Some(changed_range) = changed_range.clone() {
+ let extended_start = *last_unchanged_new_hunk_end
+ .unwrap_or(text::Anchor::min_for_buffer(new_snapshot.remote_id()))
+ .min(&changed_range.start, new_snapshot);
+ let extended_start = new_snapshot
+ .anchored_edits_since_in_range::<usize>(
+ &old_snapshot.version(),
+ extended_start..changed_range.start,
+ )
+ .map(|(_, anchors)| anchors.start)
+ .min_by(|a, b| a.cmp(b, new_snapshot))
+ .unwrap_or(changed_range.start);
+
+ let extended_end = *extended_end_candidate
+ .unwrap_or(text::Anchor::max_for_buffer(new_snapshot.remote_id()))
+ .max(&changed_range.end, new_snapshot);
+ let extended_end = new_snapshot
+ .anchored_edits_since_in_range::<usize>(
+ &old_snapshot.version(),
+ changed_range.end..extended_end,
+ )
+ .map(|(_, anchors)| anchors.end)
+ .max_by(|a, b| a.cmp(b, new_snapshot))
+ .unwrap_or(changed_range.end);
+
+ Some(extended_start..extended_end)
+ } else {
+ None
+ };
+
+ DiffChanged {
+ changed_range,
+ base_text_changed_range,
+ extended_range,
+ }
}
fn process_patch_hunk(
@@ -1076,12 +1313,16 @@ impl std::fmt::Debug for BufferDiff {
}
}
+#[derive(Clone, Debug, Default)]
+pub struct DiffChanged {
+ pub changed_range: Option<Range<text::Anchor>>,
+ pub base_text_changed_range: Option<Range<usize>>,
+ pub extended_range: Option<Range<text::Anchor>>,
+}
+
#[derive(Clone, Debug)]
pub enum BufferDiffEvent {
- DiffChanged {
- changed_range: Option<Range<text::Anchor>>,
- base_text_changed_range: Option<Range<usize>>,
- },
+ DiffChanged(DiffChanged),
LanguageChanged,
HunksStagedOrUnstaged(Option<Rope>),
}
@@ -1103,6 +1344,7 @@ impl BufferDiff {
hunks: SumTree::new(buffer),
pending_hunks: SumTree::new(buffer),
base_text_exists: false,
+ buffer_snapshot: buffer.clone(),
},
secondary_diff: None,
}
@@ -1123,6 +1365,7 @@ impl BufferDiff {
hunks: SumTree::new(buffer),
pending_hunks: SumTree::new(buffer),
base_text_exists: true,
+ buffer_snapshot: buffer.clone(),
},
secondary_diff: None,
}
@@ -1140,7 +1383,7 @@ impl BufferDiff {
let inner = cx.foreground_executor().block_on(this.update_diff(
buffer.clone(),
Some(Arc::from(base_text)),
- true,
+ Some(false),
None,
cx,
));
@@ -1162,10 +1405,13 @@ impl BufferDiff {
buffer_range: Anchor::min_min_range_for_buffer(self.buffer_id),
diff_base_byte_range: 0..0,
});
- cx.emit(BufferDiffEvent::DiffChanged {
- changed_range: Some(Anchor::min_max_range_for_buffer(self.buffer_id)),
- base_text_changed_range: Some(0..self.base_text(cx).len()),
- });
+ let changed_range = Some(Anchor::min_max_range_for_buffer(self.buffer_id));
+ let base_text_range = Some(0..self.base_text(cx).len());
+ cx.emit(BufferDiffEvent::DiffChanged(DiffChanged {
+ changed_range: changed_range.clone(),
+ base_text_changed_range: base_text_range,
+ extended_range: changed_range,
+ }));
}
}
@@ -1195,13 +1441,14 @@ impl BufferDiff {
new_index_text.clone(),
));
if let Some((first, last)) = hunks.first().zip(hunks.last()) {
- let changed_range = first.buffer_range.start..last.buffer_range.end;
+ let changed_range = Some(first.buffer_range.start..last.buffer_range.end);
let base_text_changed_range =
- first.diff_base_byte_range.start..last.diff_base_byte_range.end;
- cx.emit(BufferDiffEvent::DiffChanged {
- changed_range: Some(changed_range),
- base_text_changed_range: Some(base_text_changed_range),
- });
+ Some(first.diff_base_byte_range.start..last.diff_base_byte_range.end);
+ cx.emit(BufferDiffEvent::DiffChanged(DiffChanged {
+ changed_range: changed_range.clone(),
+ base_text_changed_range,
+ extended_range: changed_range,
+ }));
}
new_index_text
}
@@ -1224,13 +1471,14 @@ impl BufferDiff {
self.inner
.stage_or_unstage_hunks_impl(&secondary, stage, &hunks, buffer, file_exists, cx);
if let Some((first, last)) = hunks.first().zip(hunks.last()) {
- let changed_range = first.buffer_range.start..last.buffer_range.end;
+ let changed_range = Some(first.buffer_range.start..last.buffer_range.end);
let base_text_changed_range =
- first.diff_base_byte_range.start..last.diff_base_byte_range.end;
- cx.emit(BufferDiffEvent::DiffChanged {
- changed_range: Some(changed_range),
- base_text_changed_range: Some(base_text_changed_range),
- });
+ Some(first.diff_base_byte_range.start..last.diff_base_byte_range.end);
+ cx.emit(BufferDiffEvent::DiffChanged(DiffChanged {
+ changed_range: changed_range.clone(),
+ base_text_changed_range,
+ extended_range: changed_range,
+ }));
}
}
@@ -1238,45 +1486,73 @@ impl BufferDiff {
&self,
buffer: text::BufferSnapshot,
base_text: Option<Arc<str>>,
- base_text_changed: bool,
+ base_text_change: Option<bool>,
language: Option<Arc<Language>>,
cx: &App,
) -> Task<BufferDiffUpdate> {
let prev_base_text = self.base_text(cx).as_rope().clone();
+ let base_text_changed = base_text_change.is_some();
+ let compute_base_text_edits = base_text_change == Some(true);
let diff_options = build_diff_options(
None,
language.as_ref().map(|l| l.name()),
language.as_ref().map(|l| l.default_scope()),
cx,
);
+ let buffer_snapshot = buffer.clone();
- cx.background_executor().spawn(async move {
- let base_text_rope = if let Some(base_text) = &base_text {
- if base_text_changed {
- Rope::from(base_text.as_ref())
+ let base_text_diff_task = if base_text_changed && compute_base_text_edits {
+ base_text
+ .as_ref()
+ .map(|new_text| self.inner.base_text.read(cx).diff(new_text.clone(), cx))
+ } else {
+ None
+ };
+
+ let hunk_task = cx.background_executor().spawn({
+ let buffer_snapshot = buffer_snapshot.clone();
+ async move {
+ let base_text_rope = if let Some(base_text) = &base_text {
+ if base_text_changed {
+ Rope::from(base_text.as_ref())
+ } else {
+ prev_base_text
+ }
} else {
- prev_base_text
+ Rope::new()
+ };
+ let base_text_exists = base_text.is_some();
+ let hunks = compute_hunks(
+ base_text
+ .clone()
+ .map(|base_text| (base_text, base_text_rope.clone())),
+ buffer.clone(),
+ diff_options,
+ );
+ let base_text = base_text.unwrap_or_default();
+ BufferDiffInner {
+ base_text,
+ hunks,
+ base_text_exists,
+ pending_hunks: SumTree::new(&buffer),
+ buffer_snapshot,
}
- } else {
- Rope::new()
- };
- let base_text_exists = base_text.is_some();
- let hunks = compute_hunks(
- base_text
- .clone()
- .map(|base_text| (base_text, base_text_rope.clone())),
- buffer.clone(),
- diff_options,
- );
- let base_text = base_text.unwrap_or_default();
- let inner = BufferDiffInner {
- base_text,
- hunks,
- base_text_exists,
- pending_hunks: SumTree::new(&buffer),
+ }
+ });
+
+ cx.background_executor().spawn(async move {
+ let (inner, base_text_edits) = match base_text_diff_task {
+ Some(diff_task) => {
+ let (inner, diff) = futures::join!(hunk_task, diff_task);
+ (inner, Some(diff))
+ }
+ None => (hunk_task.await, None),
};
+
BufferDiffUpdate {
inner,
+ buffer_snapshot,
+ base_text_edits,
base_text_changed,
}
})
@@ -1313,23 +1589,37 @@ impl BufferDiff {
secondary_diff_change: Option<Range<Anchor>>,
clear_pending_hunks: bool,
cx: &mut Context<Self>,
- ) -> impl Future<Output = (Option<Range<Anchor>>, Option<Range<usize>>)> + use<> {
+ ) -> impl Future<Output = DiffChanged> + use<> {
log::debug!("set snapshot with secondary {secondary_diff_change:?}");
let old_snapshot = self.snapshot(cx);
let state = &mut self.inner;
let new_state = update.inner;
- let (mut changed_range, mut base_text_changed_range) =
- match (state.base_text_exists, new_state.base_text_exists) {
- (false, false) => (None, None),
- (true, true) if !update.base_text_changed => {
- compare_hunks(&new_state.hunks, &old_snapshot.inner.hunks, buffer)
+ let base_text_changed = update.base_text_changed;
+
+ let old_buffer_snapshot = &old_snapshot.inner.buffer_snapshot;
+ let DiffChanged {
+ mut changed_range,
+ mut base_text_changed_range,
+ mut extended_range,
+ } = match (state.base_text_exists, new_state.base_text_exists) {
+ (false, false) => DiffChanged::default(),
+ (true, true) if !base_text_changed => compare_hunks(
+ &new_state.hunks,
+ &old_snapshot.inner.hunks,
+ old_buffer_snapshot,
+ buffer,
+ ),
+ _ => {
+ let full_range = text::Anchor::min_max_range_for_buffer(self.buffer_id);
+ let full_base_range = 0..new_state.base_text.len();
+ DiffChanged {
+ changed_range: Some(full_range.clone()),
+ base_text_changed_range: Some(full_base_range),
+ extended_range: Some(full_range),
}
- _ => (
- Some(text::Anchor::min_max_range_for_buffer(self.buffer_id)),
- Some(0..new_state.base_text.len()),
- ),
- };
+ }
+ };
if let Some(secondary_changed_range) = secondary_diff_change
&& let (Some(secondary_hunk_range), Some(secondary_base_range)) =
@@ -1339,20 +1629,34 @@ impl BufferDiff {
range.start = *secondary_hunk_range.start.min(&range.start, buffer);
range.end = *secondary_hunk_range.end.max(&range.end, buffer);
} else {
- changed_range = Some(secondary_hunk_range);
+ changed_range = Some(secondary_hunk_range.clone());
}
- if let Some(base_text_range) = &mut base_text_changed_range {
+ if let Some(base_text_range) = base_text_changed_range.as_mut() {
base_text_range.start = secondary_base_range.start.min(base_text_range.start);
base_text_range.end = secondary_base_range.end.max(base_text_range.end);
} else {
base_text_changed_range = Some(secondary_base_range);
}
+
+ if let Some(ext) = &mut extended_range {
+ ext.start = *ext.start.min(&secondary_hunk_range.start, buffer);
+ ext.end = *ext.end.max(&secondary_hunk_range.end, buffer);
+ } else {
+ extended_range = Some(secondary_hunk_range);
+ }
}
let state = &mut self.inner;
state.base_text_exists = new_state.base_text_exists;
- let parsing_idle = if update.base_text_changed {
+ let parsing_idle = if let Some(diff) = update.base_text_edits {
+ state.base_text.update(cx, |base_text, cx| {
+ base_text.set_capability(Capability::ReadWrite, cx);
+ base_text.apply_diff(diff, cx);
+ base_text.set_capability(Capability::ReadOnly, cx);
+ Some(base_text.parsing_idle())
+ })
+ } else if update.base_text_changed {
state.base_text.update(cx, |base_text, cx| {
base_text.set_capability(Capability::ReadWrite, cx);
base_text.set_text(new_state.base_text.clone(), cx);
@@ -1363,17 +1667,19 @@ impl BufferDiff {
None
};
state.hunks = new_state.hunks;
- if update.base_text_changed || clear_pending_hunks {
+ state.buffer_snapshot = update.buffer_snapshot;
+ if base_text_changed || clear_pending_hunks {
if let Some((first, last)) = state.pending_hunks.first().zip(state.pending_hunks.last())
{
+ let pending_range = first.buffer_range.start..last.buffer_range.end;
if let Some(range) = &mut changed_range {
- range.start = *range.start.min(&first.buffer_range.start, buffer);
- range.end = *range.end.max(&last.buffer_range.end, buffer);
+ range.start = *range.start.min(&pending_range.start, buffer);
+ range.end = *range.end.max(&pending_range.end, buffer);
} else {
- changed_range = Some(first.buffer_range.start..last.buffer_range.end);
+ changed_range = Some(pending_range.clone());
}
- if let Some(base_text_range) = &mut base_text_changed_range {
+ if let Some(base_text_range) = base_text_changed_range.as_mut() {
base_text_range.start =
base_text_range.start.min(first.diff_base_byte_range.start);
base_text_range.end = base_text_range.end.max(last.diff_base_byte_range.end);
@@ -1381,6 +1687,13 @@ impl BufferDiff {
base_text_changed_range =
Some(first.diff_base_byte_range.start..last.diff_base_byte_range.end);
}
+
+ if let Some(ext) = &mut extended_range {
+ ext.start = *ext.start.min(&pending_range.start, buffer);
+ ext.end = *ext.end.max(&pending_range.end, buffer);
+ } else {
+ extended_range = Some(pending_range);
+ }
}
state.pending_hunks = SumTree::new(buffer);
}
@@ -1389,7 +1702,11 @@ impl BufferDiff {
if let Some(parsing_idle) = parsing_idle {
parsing_idle.await;
}
- (changed_range, base_text_changed_range)
+ DiffChanged {
+ changed_range,
+ base_text_changed_range,
+ extended_range,
+ }
}
}
@@ -1419,15 +1736,12 @@ impl BufferDiff {
);
cx.spawn(async move |this, cx| {
- let (changed_range, base_text_changed_range) = fut.await;
+ let change = fut.await;
this.update(cx, |_, cx| {
- cx.emit(BufferDiffEvent::DiffChanged {
- changed_range: changed_range.clone(),
- base_text_changed_range,
- });
+ cx.emit(BufferDiffEvent::DiffChanged(change.clone()));
})
.ok();
- changed_range
+ change.changed_range
})
}
@@ -1446,6 +1760,7 @@ impl BufferDiff {
pending_hunks: self.inner.pending_hunks.clone(),
base_text: self.inner.base_text.read(cx).snapshot(),
base_text_exists: self.inner.base_text_exists,
+ buffer_snapshot: self.inner.buffer_snapshot.clone(),
},
secondary_diff: self
.secondary_diff
@@ -1469,7 +1784,7 @@ impl BufferDiff {
cx.spawn(async move |this, cx| {
let Some(state) = this
.update(cx, |this, cx| {
- this.update_diff(buffer.clone(), base_text, true, language, cx)
+ this.update_diff(buffer.clone(), base_text, Some(false), language, cx)
})
.log_err()
else {
@@ -1498,15 +1813,12 @@ impl BufferDiff {
pub fn recalculate_diff_sync(&mut self, buffer: &text::BufferSnapshot, cx: &mut Context<Self>) {
let language = self.base_text(cx).language().cloned();
let base_text = self.base_text_string(cx).map(|s| s.as_str().into());
- let fut = self.update_diff(buffer.clone(), base_text, false, language, cx);
+ let fut = self.update_diff(buffer.clone(), base_text, None, language, cx);
let fg_executor = cx.foreground_executor().clone();
let snapshot = fg_executor.block_on(fut);
let fut = self.set_snapshot_with_secondary_inner(snapshot, buffer, None, false, cx);
- let (changed_range, base_text_changed_range) = fg_executor.block_on(fut);
- cx.emit(BufferDiffEvent::DiffChanged {
- changed_range,
- base_text_changed_range,
- })
+ let change = fg_executor.block_on(fut);
+ cx.emit(BufferDiffEvent::DiffChanged(change));
}
pub fn base_text_buffer(&self) -> Entity<language::Buffer> {
@@ -2297,11 +2609,19 @@ mod tests {
let empty_diff = cx.update(|cx| BufferDiff::new(&buffer, cx).snapshot(cx));
let diff_1 = BufferDiffSnapshot::new_sync(buffer.clone(), base_text.clone(), cx);
- let (range, base_text_range) =
- compare_hunks(&diff_1.inner.hunks, &empty_diff.inner.hunks, &buffer);
- let range = range.unwrap();
+ let DiffChanged {
+ changed_range,
+ base_text_changed_range,
+ extended_range: _,
+ } = compare_hunks(
+ &diff_1.inner.hunks,
+ &empty_diff.inner.hunks,
+ &buffer,
+ &buffer,
+ );
+ let range = changed_range.unwrap();
assert_eq!(range.to_point(&buffer), Point::new(0, 0)..Point::new(8, 0));
- let base_text_range = base_text_range.unwrap();
+ let base_text_range = base_text_changed_range.unwrap();
assert_eq!(
base_text_range.to_point(diff_1.base_text()),
Point::new(0, 0)..Point::new(10, 0)
@@ -2322,14 +2642,19 @@ mod tests {
.unindent(),
);
let diff_2 = BufferDiffSnapshot::new_sync(buffer.clone(), base_text.clone(), cx);
- let (range, base_text_range) =
- compare_hunks(&diff_2.inner.hunks, &diff_1.inner.hunks, &buffer);
+ let DiffChanged {
+ changed_range,
+ base_text_changed_range,
+ extended_range: _,
+ } = compare_hunks(&diff_2.inner.hunks, &diff_1.inner.hunks, &buffer, &buffer);
assert_eq!(
- range.unwrap().to_point(&buffer),
+ changed_range.unwrap().to_point(&buffer),
Point::new(4, 0)..Point::new(5, 0),
);
assert_eq!(
- base_text_range.unwrap().to_point(diff_2.base_text()),
+ base_text_changed_range
+ .unwrap()
+ .to_point(diff_2.base_text()),
Point::new(6, 0)..Point::new(7, 0),
);
@@ -2348,11 +2673,14 @@ mod tests {
.unindent(),
);
let diff_3 = BufferDiffSnapshot::new_sync(buffer.clone(), base_text.clone(), cx);
- let (range, base_text_range) =
- compare_hunks(&diff_3.inner.hunks, &diff_2.inner.hunks, &buffer);
- let range = range.unwrap();
+ let DiffChanged {
+ changed_range,
+ base_text_changed_range,
+ extended_range: _,
+ } = compare_hunks(&diff_3.inner.hunks, &diff_2.inner.hunks, &buffer, &buffer);
+ let range = changed_range.unwrap();
assert_eq!(range.to_point(&buffer), Point::new(1, 0)..Point::new(2, 0));
- let base_text_range = base_text_range.unwrap();
+ let base_text_range = base_text_changed_range.unwrap();
assert_eq!(
base_text_range.to_point(diff_3.base_text()),
Point::new(2, 0)..Point::new(4, 0)
@@ -2372,11 +2700,14 @@ mod tests {
.unindent(),
);
let diff_4 = BufferDiffSnapshot::new_sync(buffer.clone(), base_text.clone(), cx);
- let (range, base_text_range) =
- compare_hunks(&diff_4.inner.hunks, &diff_3.inner.hunks, &buffer);
- let range = range.unwrap();
+ let DiffChanged {
+ changed_range,
+ base_text_changed_range,
+ extended_range: _,
+ } = compare_hunks(&diff_4.inner.hunks, &diff_3.inner.hunks, &buffer, &buffer);
+ let range = changed_range.unwrap();
assert_eq!(range.to_point(&buffer), Point::new(3, 4)..Point::new(4, 0));
- let base_text_range = base_text_range.unwrap();
+ let base_text_range = base_text_changed_range.unwrap();
assert_eq!(
base_text_range.to_point(diff_4.base_text()),
Point::new(6, 0)..Point::new(7, 0)
@@ -2397,11 +2728,14 @@ mod tests {
.unindent(),
);
let diff_5 = BufferDiffSnapshot::new_sync(buffer.snapshot(), base_text.clone(), cx);
- let (range, base_text_range) =
- compare_hunks(&diff_5.inner.hunks, &diff_4.inner.hunks, &buffer);
- let range = range.unwrap();
+ let DiffChanged {
+ changed_range,
+ base_text_changed_range,
+ extended_range: _,
+ } = compare_hunks(&diff_5.inner.hunks, &diff_4.inner.hunks, &buffer, &buffer);
+ let range = changed_range.unwrap();
assert_eq!(range.to_point(&buffer), Point::new(3, 0)..Point::new(4, 0));
- let base_text_range = base_text_range.unwrap();
+ let base_text_range = base_text_changed_range.unwrap();
assert_eq!(
base_text_range.to_point(diff_5.base_text()),
Point::new(5, 0)..Point::new(5, 0)
@@ -2422,11 +2756,14 @@ mod tests {
.unindent(),
);
let diff_6 = BufferDiffSnapshot::new_sync(buffer.snapshot(), base_text, cx);
- let (range, base_text_range) =
- compare_hunks(&diff_6.inner.hunks, &diff_5.inner.hunks, &buffer);
- let range = range.unwrap();
+ let DiffChanged {
+ changed_range,
+ base_text_changed_range,
+ extended_range: _,
+ } = compare_hunks(&diff_6.inner.hunks, &diff_5.inner.hunks, &buffer, &buffer);
+ let range = changed_range.unwrap();
assert_eq!(range.to_point(&buffer), Point::new(7, 0)..Point::new(8, 0));
- let base_text_range = base_text_range.unwrap();
+ let base_text_range = base_text_changed_range.unwrap();
assert_eq!(
base_text_range.to_point(diff_6.base_text()),
Point::new(9, 0)..Point::new(10, 0)
@@ -329,7 +329,7 @@ impl RatePredictionsModal {
let update = diff.update_diff(
new_buffer_snapshot.text.clone(),
Some(old_buffer_snapshot.text().into()),
- true,
+ Some(true),
language,
cx,
);
@@ -122,6 +122,7 @@ tree-sitter-typescript.workspace = true
tree-sitter-yaml.workspace = true
tree-sitter-bash.workspace = true
tree-sitter-md.workspace = true
+unicode-width.workspace = true
unindent.workspace = true
util = { workspace = true, features = ["test-support"] }
workspace = { workspace = true, features = ["test-support"] }
@@ -90,19 +90,23 @@ pub use fold_map::{
};
pub use inlay_map::{InlayOffset, InlayPoint};
pub use invisibles::{is_invisible, replacement};
+pub use wrap_map::{WrapPoint, WrapRow, WrapSnapshot};
use collections::{HashMap, HashSet};
-use gpui::{App, Context, Entity, Font, HighlightStyle, LineLayout, Pixels, UnderlineStyle};
+use gpui::{
+ App, Context, Entity, EntityId, Font, HighlightStyle, LineLayout, Pixels, UnderlineStyle,
+ WeakEntity,
+};
use language::{Point, Subscription as BufferSubscription, language_settings::language_settings};
use multi_buffer::{
- Anchor, AnchorRangeExt, MultiBuffer, MultiBufferOffset, MultiBufferOffsetUtf16,
+ Anchor, AnchorRangeExt, ExcerptId, MultiBuffer, MultiBufferOffset, MultiBufferOffsetUtf16,
MultiBufferPoint, MultiBufferRow, MultiBufferSnapshot, RowInfo, ToOffset, ToPoint,
};
use project::InlayId;
use project::project_settings::DiagnosticSeverity;
use serde::Deserialize;
use sum_tree::{Bias, TreeMap};
-use text::{BufferId, LineIndent};
+use text::{BufferId, LineIndent, Patch};
use ui::{SharedString, px};
use unicode_segmentation::UnicodeSegmentation;
use ztracing::instrument;
@@ -113,7 +117,7 @@ use std::{
fmt::Debug,
iter,
num::NonZeroU32,
- ops::{Add, Range, Sub},
+ ops::{Add, Bound, Range, Sub},
sync::Arc,
};
@@ -124,7 +128,7 @@ use block_map::{BlockRow, BlockSnapshot};
use fold_map::FoldSnapshot;
use inlay_map::InlaySnapshot;
use tab_map::TabSnapshot;
-use wrap_map::{WrapMap, WrapSnapshot};
+use wrap_map::{WrapMap, WrapPatch};
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum FoldStatus {
@@ -145,11 +149,28 @@ pub trait ToDisplayPoint {
type TextHighlights = TreeMap<HighlightKey, Arc<(HighlightStyle, Vec<Range<Anchor>>)>>;
type InlayHighlights = TreeMap<TypeId, TreeMap<InlayId, (HighlightStyle, InlayHighlight)>>;
+#[derive(Debug)]
+pub struct MultiBufferRowMapping {
+ pub first_group: Option<Range<MultiBufferPoint>>,
+ pub boundaries: Vec<(MultiBufferPoint, Range<MultiBufferPoint>)>,
+ pub prev_boundary: Option<(MultiBufferPoint, Range<MultiBufferPoint>)>,
+ pub source_excerpt_end: MultiBufferPoint,
+ pub target_excerpt_end: MultiBufferPoint,
+}
+
+pub type ConvertMultiBufferRows = fn(
+ &HashMap<ExcerptId, ExcerptId>,
+ &MultiBufferSnapshot,
+ &MultiBufferSnapshot,
+ (Bound<MultiBufferPoint>, Bound<MultiBufferPoint>),
+) -> Vec<MultiBufferRowMapping>;
+
/// Decides how text in a [`MultiBuffer`] should be displayed in a buffer, handling inlay hints,
/// folding, hard tabs, soft wrapping, custom blocks (like diagnostics), and highlighting.
///
/// See the [module level documentation](self) for more information.
pub struct DisplayMap {
+ entity_id: EntityId,
/// The buffer that we are displaying.
buffer: Entity<MultiBuffer>,
buffer_subscription: BufferSubscription<MultiBufferOffset>,
@@ -173,6 +194,107 @@ pub struct DisplayMap {
pub clip_at_line_ends: bool,
pub(crate) masked: bool,
pub(crate) diagnostics_max_severity: DiagnosticSeverity,
+ pub(crate) companion: Option<(WeakEntity<DisplayMap>, Entity<Companion>)>,
+}
+
+pub(crate) struct Companion {
+ rhs_display_map_id: EntityId,
+ rhs_buffer_to_lhs_buffer: HashMap<BufferId, BufferId>,
+ lhs_buffer_to_rhs_buffer: HashMap<BufferId, BufferId>,
+ rhs_excerpt_to_lhs_excerpt: HashMap<ExcerptId, ExcerptId>,
+ lhs_excerpt_to_rhs_excerpt: HashMap<ExcerptId, ExcerptId>,
+ rhs_rows_to_lhs_rows: ConvertMultiBufferRows,
+ lhs_rows_to_rhs_rows: ConvertMultiBufferRows,
+}
+
+impl Companion {
+ pub(crate) fn new(
+ rhs_display_map_id: EntityId,
+ rhs_rows_to_lhs_rows: ConvertMultiBufferRows,
+ lhs_rows_to_rhs_rows: ConvertMultiBufferRows,
+ ) -> Self {
+ Self {
+ rhs_display_map_id,
+ rhs_buffer_to_lhs_buffer: Default::default(),
+ lhs_buffer_to_rhs_buffer: Default::default(),
+ rhs_excerpt_to_lhs_excerpt: Default::default(),
+ lhs_excerpt_to_rhs_excerpt: Default::default(),
+ rhs_rows_to_lhs_rows,
+ lhs_rows_to_rhs_rows,
+ }
+ }
+
+ pub(crate) fn convert_rows_to_companion(
+ &self,
+ display_map_id: EntityId,
+ companion_snapshot: &MultiBufferSnapshot,
+ our_snapshot: &MultiBufferSnapshot,
+ bounds: (Bound<MultiBufferPoint>, Bound<MultiBufferPoint>),
+ ) -> Vec<MultiBufferRowMapping> {
+ let (excerpt_map, convert_fn) = if display_map_id == self.rhs_display_map_id {
+ (&self.rhs_excerpt_to_lhs_excerpt, self.rhs_rows_to_lhs_rows)
+ } else {
+ (&self.lhs_excerpt_to_rhs_excerpt, self.lhs_rows_to_rhs_rows)
+ };
+ convert_fn(excerpt_map, companion_snapshot, our_snapshot, bounds)
+ }
+
+ pub(crate) fn convert_rows_from_companion(
+ &self,
+ display_map_id: EntityId,
+ our_snapshot: &MultiBufferSnapshot,
+ companion_snapshot: &MultiBufferSnapshot,
+ bounds: (Bound<MultiBufferPoint>, Bound<MultiBufferPoint>),
+ ) -> Vec<MultiBufferRowMapping> {
+ let (excerpt_map, convert_fn) = if display_map_id == self.rhs_display_map_id {
+ (&self.lhs_excerpt_to_rhs_excerpt, self.lhs_rows_to_rhs_rows)
+ } else {
+ (&self.rhs_excerpt_to_lhs_excerpt, self.rhs_rows_to_lhs_rows)
+ };
+ convert_fn(excerpt_map, our_snapshot, companion_snapshot, bounds)
+ }
+
+ pub(crate) fn companion_excerpt_to_excerpt(
+ &self,
+ display_map_id: EntityId,
+ ) -> &HashMap<ExcerptId, ExcerptId> {
+ if display_map_id == self.rhs_display_map_id {
+ &self.lhs_excerpt_to_rhs_excerpt
+ } else {
+ &self.rhs_excerpt_to_lhs_excerpt
+ }
+ }
+
+ fn buffer_to_companion_buffer(&self, display_map_id: EntityId) -> &HashMap<BufferId, BufferId> {
+ if display_map_id == self.rhs_display_map_id {
+ &self.rhs_buffer_to_lhs_buffer
+ } else {
+ &self.lhs_buffer_to_rhs_buffer
+ }
+ }
+
+ pub(crate) fn add_excerpt_mapping(&mut self, lhs_id: ExcerptId, rhs_id: ExcerptId) {
+ self.lhs_excerpt_to_rhs_excerpt.insert(lhs_id, rhs_id);
+ self.rhs_excerpt_to_lhs_excerpt.insert(rhs_id, lhs_id);
+ }
+
+ pub(crate) fn remove_excerpt_mappings(
+ &mut self,
+ lhs_ids: impl IntoIterator<Item = ExcerptId>,
+ rhs_ids: impl IntoIterator<Item = ExcerptId>,
+ ) {
+ for id in lhs_ids {
+ self.lhs_excerpt_to_rhs_excerpt.remove(&id);
+ }
+ for id in rhs_ids {
+ self.rhs_excerpt_to_lhs_excerpt.remove(&id);
+ }
+ }
+
+ pub(crate) fn add_buffer_mapping(&mut self, lhs_buffer: BufferId, rhs_buffer: BufferId) {
+ self.lhs_buffer_to_rhs_buffer.insert(lhs_buffer, rhs_buffer);
+ self.rhs_buffer_to_lhs_buffer.insert(rhs_buffer, lhs_buffer);
+ }
}
impl DisplayMap {
@@ -201,6 +323,7 @@ impl DisplayMap {
cx.observe(&wrap_map, |_, _, cx| cx.notify()).detach();
DisplayMap {
+ entity_id: cx.entity_id(),
buffer,
buffer_subscription,
fold_map,
@@ -215,22 +338,132 @@ impl DisplayMap {
inlay_highlights: Default::default(),
clip_at_line_ends: false,
masked: false,
+ companion: None,
}
}
- #[instrument(skip_all)]
- pub fn snapshot(&mut self, cx: &mut Context<Self>) -> DisplaySnapshot {
- let tab_size = Self::tab_size(&self.buffer, cx);
+ pub(crate) fn set_companion(
+ &mut self,
+ companion: Option<(WeakEntity<DisplayMap>, Entity<Companion>)>,
+ cx: &mut Context<Self>,
+ ) {
+ self.companion = companion;
let buffer_snapshot = self.buffer.read(cx).snapshot(cx);
let edits = self.buffer_subscription.consume().into_inner();
- let (inlay_snapshot, edits) = self.inlay_map.sync(buffer_snapshot, edits);
- let (fold_snapshot, edits) = self.fold_map.read(inlay_snapshot, edits);
- let (tab_snapshot, edits) = self.tab_map.sync(fold_snapshot, edits, tab_size);
- let (wrap_snapshot, edits) = self
+ let tab_size = Self::tab_size(&self.buffer, cx);
+ let edits = Patch::new(edits)
+ .compose([text::Edit {
+ old: MultiBufferOffset(0)..buffer_snapshot.len(),
+ new: MultiBufferOffset(0)..buffer_snapshot.len(),
+ }])
+ .into_inner();
+
+ let (snapshot, edits) = self.inlay_map.sync(buffer_snapshot, edits);
+ let (snapshot, edits) = self.fold_map.read(snapshot, edits);
+ let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
+ let (snapshot, edits) = self
.wrap_map
- .update(cx, |map, cx| map.sync(tab_snapshot, edits, cx));
- let block_snapshot = self.block_map.read(wrap_snapshot, edits).snapshot;
+ .update(cx, |map, cx| map.sync(snapshot, edits, cx));
+
+ let companion_wrap_data = self.companion.as_ref().and_then(|(companion_dm, _)| {
+ companion_dm
+ .update(cx, |dm, cx| dm.sync_through_wrap(cx))
+ .ok()
+ });
+
+ let companion_wrap_edits = companion_wrap_data
+ .as_ref()
+ .map(|(snapshot, edits)| (snapshot, edits));
+ let companion_ref = self.companion.as_ref().map(|(_, c)| c.read(cx));
+
+ self.block_map.read(
+ snapshot.clone(),
+ edits.clone(),
+ companion_wrap_edits,
+ companion_ref.map(|c| (c, self.entity_id)),
+ );
+
+ if let Some((companion_dm, _)) = &self.companion {
+ let _ = companion_dm.update(cx, |dm, _cx| {
+ if let Some((companion_snapshot, companion_edits)) = companion_wrap_data {
+ let their_companion_ref = dm.companion.as_ref().map(|(_, c)| c.read(_cx));
+ dm.block_map.read(
+ companion_snapshot,
+ companion_edits,
+ Some((&snapshot, &edits)),
+ their_companion_ref.map(|c| (c, dm.entity_id)),
+ );
+ }
+ });
+ }
+ }
+
+ pub(crate) fn companion(&self) -> Option<&Entity<Companion>> {
+ self.companion.as_ref().map(|(_, c)| c)
+ }
+
+ pub(crate) fn companion_excerpt_to_my_excerpt(
+ &self,
+ their_id: ExcerptId,
+ cx: &App,
+ ) -> Option<ExcerptId> {
+ let (_, companion) = self.companion.as_ref()?;
+ let c = companion.read(cx);
+ c.companion_excerpt_to_excerpt(self.entity_id)
+ .get(&their_id)
+ .copied()
+ }
+
+ fn sync_through_wrap(&mut self, cx: &mut App) -> (WrapSnapshot, WrapPatch) {
+ let tab_size = Self::tab_size(&self.buffer, cx);
+ let buffer_snapshot = self.buffer.read(cx).snapshot(cx);
+ let edits = self.buffer_subscription.consume().into_inner();
+
+ let (snapshot, edits) = self.inlay_map.sync(buffer_snapshot, edits);
+ let (snapshot, edits) = self.fold_map.read(snapshot, edits);
+ let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
+ self.wrap_map
+ .update(cx, |map, cx| map.sync(snapshot, edits, cx))
+ }
+
+ #[instrument(skip_all)]
+ pub fn snapshot(&mut self, cx: &mut Context<Self>) -> DisplaySnapshot {
+ let (self_wrap_snapshot, self_wrap_edits) = self.sync_through_wrap(cx);
+ let companion_wrap_data = self.companion.as_ref().and_then(|(companion_dm, _)| {
+ companion_dm
+ .update(cx, |dm, cx| dm.sync_through_wrap(cx))
+ .ok()
+ });
+
+ let companion_wrap_edits = companion_wrap_data
+ .as_ref()
+ .map(|(snapshot, edits)| (snapshot, edits));
+ let companion_ref = self.companion.as_ref().map(|(_, c)| c.read(cx));
+
+ let block_snapshot = self
+ .block_map
+ .read(
+ self_wrap_snapshot.clone(),
+ self_wrap_edits.clone(),
+ companion_wrap_edits,
+ companion_ref.map(|c| (c, self.entity_id)),
+ )
+ .snapshot;
+
+ if let Some((companion_dm, _)) = &self.companion {
+ let _ = companion_dm.update(cx, |dm, _cx| {
+ if let Some((companion_snapshot, companion_edits)) = companion_wrap_data {
+ let their_companion_ref = dm.companion.as_ref().map(|(_, c)| c.read(_cx));
+ dm.block_map.read(
+ companion_snapshot,
+ companion_edits,
+ Some((&self_wrap_snapshot, &self_wrap_edits)),
+ their_companion_ref.map(|c| (c, dm.entity_id)),
+ );
+ }
+ });
+ }
DisplaySnapshot {
block_snapshot,
@@ -266,13 +499,31 @@ impl DisplayMap {
let buffer_snapshot = self.buffer.read(cx).snapshot(cx);
let edits = self.buffer_subscription.consume().into_inner();
let tab_size = Self::tab_size(&self.buffer, cx);
+
+ let companion_wrap_data = self.companion.as_ref().and_then(|(companion_dm, _)| {
+ companion_dm
+ .update(cx, |dm, cx| dm.sync_through_wrap(cx))
+ .ok()
+ });
+
+ let companion_wrap_edits = companion_wrap_data
+ .as_ref()
+ .map(|(snapshot, edits)| (snapshot, edits));
+
let (snapshot, edits) = self.inlay_map.sync(buffer_snapshot.clone(), edits);
let (mut fold_map, snapshot, edits) = self.fold_map.write(snapshot, edits);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
- self.block_map.read(snapshot, edits);
+
+ let companion_ref = self.companion.as_ref().map(|(_, c)| c.read(cx));
+ self.block_map.read(
+ snapshot,
+ edits,
+ companion_wrap_edits,
+ companion_ref.map(|c| (c, self.entity_id)),
+ );
let inline = creases.iter().filter_map(|crease| {
if let Crease::Inline {
@@ -287,10 +538,24 @@ impl DisplayMap {
let (snapshot, edits) = fold_map.fold(inline);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
- let (snapshot, edits) = self
+ let (self_new_wrap_snapshot, self_new_wrap_edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
- let mut block_map = self.block_map.write(snapshot, edits);
+
+ let (self_wrap_snapshot, self_wrap_edits) =
+ (self_new_wrap_snapshot.clone(), self_new_wrap_edits.clone());
+
+ let companion_wrap_edits = companion_wrap_data
+ .as_ref()
+ .map(|(snapshot, edits)| (snapshot, edits));
+ let companion_ref = self.companion.as_ref().map(|(_, c)| c.read(cx));
+
+ let mut block_map = self.block_map.write(
+ self_new_wrap_snapshot,
+ self_new_wrap_edits,
+ companion_wrap_edits,
+ companion_ref.map(|c| (c, self.entity_id)),
+ );
let blocks = creases.into_iter().filter_map(|crease| {
if let Crease::Block {
range,
@@ -327,6 +592,20 @@ impl DisplayMap {
}
}),
);
+
+ if let Some((companion_dm, _)) = &self.companion {
+ let _ = companion_dm.update(cx, |dm, cx| {
+ if let Some((companion_snapshot, companion_edits)) = companion_wrap_data {
+ let their_companion_ref = dm.companion.as_ref().map(|(_, c)| c.read(cx));
+ dm.block_map.read(
+ companion_snapshot,
+ companion_edits,
+ Some((&self_wrap_snapshot, &self_wrap_edits)),
+ their_companion_ref.map(|c| (c, dm.entity_id)),
+ );
+ }
+ });
+ }
}
/// Removes any folds with the given ranges.
@@ -340,19 +619,66 @@ impl DisplayMap {
let snapshot = self.buffer.read(cx).snapshot(cx);
let edits = self.buffer_subscription.consume().into_inner();
let tab_size = Self::tab_size(&self.buffer, cx);
+
+ let companion_wrap_data = self.companion.as_ref().and_then(|(companion_dm, _)| {
+ companion_dm
+ .update(cx, |dm, cx| dm.sync_through_wrap(cx))
+ .ok()
+ });
+
+ let companion_wrap_edits = companion_wrap_data
+ .as_ref()
+ .map(|(snapshot, edits)| (snapshot, edits));
+
let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
let (mut fold_map, snapshot, edits) = self.fold_map.write(snapshot, edits);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
- self.block_map.read(snapshot, edits);
+
+ let companion_ref = self.companion.as_ref().map(|(_, c)| c.read(cx));
+ self.block_map.read(
+ snapshot,
+ edits,
+ companion_wrap_edits,
+ companion_ref.map(|c| (c, self.entity_id)),
+ );
+
let (snapshot, edits) = fold_map.remove_folds(ranges, type_id);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
- let (snapshot, edits) = self
+ let (self_new_wrap_snapshot, self_new_wrap_edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
- self.block_map.write(snapshot, edits);
+
+ let (self_wrap_snapshot, self_wrap_edits) =
+ (self_new_wrap_snapshot.clone(), self_new_wrap_edits.clone());
+
+ let companion_wrap_edits = companion_wrap_data
+ .as_ref()
+ .map(|(snapshot, edits)| (snapshot, edits));
+ let companion_ref = self.companion.as_ref().map(|(_, c)| c.read(cx));
+
+ self.block_map.write(
+ self_new_wrap_snapshot,
+ self_new_wrap_edits,
+ companion_wrap_edits,
+ companion_ref.map(|c| (c, self.entity_id)),
+ );
+
+ if let Some((companion_dm, _)) = &self.companion {
+ let _ = companion_dm.update(cx, |dm, cx| {
+ if let Some((companion_snapshot, companion_edits)) = companion_wrap_data {
+ let their_companion_ref = dm.companion.as_ref().map(|(_, c)| c.read(cx));
+ dm.block_map.read(
+ companion_snapshot,
+ companion_edits,
+ Some((&self_wrap_snapshot, &self_wrap_edits)),
+ their_companion_ref.map(|c| (c, dm.entity_id)),
+ );
+ }
+ });
+ }
}
/// Removes any folds whose ranges intersect any of the given ranges.
@@ -370,56 +696,161 @@ impl DisplayMap {
.collect::<Vec<_>>();
let edits = self.buffer_subscription.consume().into_inner();
let tab_size = Self::tab_size(&self.buffer, cx);
+
+ let companion_wrap_data = self.companion.as_ref().and_then(|(companion_dm, _)| {
+ companion_dm
+ .update(cx, |dm, cx| dm.sync_through_wrap(cx))
+ .ok()
+ });
+
+ let companion_wrap_edits = companion_wrap_data
+ .as_ref()
+ .map(|(snapshot, edits)| (snapshot, edits));
+
let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
let (mut fold_map, snapshot, edits) = self.fold_map.write(snapshot, edits);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
- self.block_map.read(snapshot, edits);
+
+ let companion_ref = self.companion.as_ref().map(|(_, c)| c.read(cx));
+ self.block_map.read(
+ snapshot,
+ edits,
+ companion_wrap_edits,
+ companion_ref.map(|c| (c, self.entity_id)),
+ );
let (snapshot, edits) =
fold_map.unfold_intersecting(offset_ranges.iter().cloned(), inclusive);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
- let (snapshot, edits) = self
+ let (self_new_wrap_snapshot, self_new_wrap_edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
- let mut block_map = self.block_map.write(snapshot, edits);
+
+ let (self_wrap_snapshot, self_wrap_edits) =
+ (self_new_wrap_snapshot.clone(), self_new_wrap_edits.clone());
+
+ let companion_wrap_edits = companion_wrap_data
+ .as_ref()
+ .map(|(snapshot, edits)| (snapshot, edits));
+ let companion_ref = self.companion.as_ref().map(|(_, c)| c.read(cx));
+
+ let mut block_map = self.block_map.write(
+ self_new_wrap_snapshot,
+ self_new_wrap_edits,
+ companion_wrap_edits,
+ companion_ref.map(|c| (c, self.entity_id)),
+ );
block_map.remove_intersecting_replace_blocks(offset_ranges, inclusive);
+
+ if let Some((companion_dm, _)) = &self.companion {
+ let _ = companion_dm.update(cx, |dm, cx| {
+ if let Some((companion_snapshot, companion_edits)) = companion_wrap_data {
+ let their_companion_ref = dm.companion.as_ref().map(|(_, c)| c.read(cx));
+ dm.block_map.read(
+ companion_snapshot,
+ companion_edits,
+ Some((&self_wrap_snapshot, &self_wrap_edits)),
+ their_companion_ref.map(|c| (c, dm.entity_id)),
+ );
+ }
+ });
+ }
}
#[instrument(skip_all)]
pub fn disable_header_for_buffer(&mut self, buffer_id: BufferId, cx: &mut Context<Self>) {
- let snapshot = self.buffer.read(cx).snapshot(cx);
- let edits = self.buffer_subscription.consume().into_inner();
- let tab_size = Self::tab_size(&self.buffer, cx);
- let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
- let (snapshot, edits) = self.fold_map.read(snapshot, edits);
- let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
- let (snapshot, edits) = self
- .wrap_map
- .update(cx, |map, cx| map.sync(snapshot, edits, cx));
- let mut block_map = self.block_map.write(snapshot, edits);
- block_map.disable_header_for_buffer(buffer_id)
+ let (self_wrap_snapshot, self_wrap_edits) = self.sync_through_wrap(cx);
+
+ let companion_wrap_data = self.companion.as_ref().and_then(|(companion_dm, _)| {
+ companion_dm
+ .update(cx, |dm, cx| dm.sync_through_wrap(cx))
+ .ok()
+ });
+
+ let companion_wrap_edits = companion_wrap_data
+ .as_ref()
+ .map(|(snapshot, edits)| (snapshot, edits));
+ let companion_ref = self.companion.as_ref().map(|(_, c)| c.read(cx));
+
+ let mut block_map = self.block_map.write(
+ self_wrap_snapshot.clone(),
+ self_wrap_edits.clone(),
+ companion_wrap_edits,
+ companion_ref.map(|c| (c, self.entity_id)),
+ );
+ block_map.disable_header_for_buffer(buffer_id);
+
+ if let Some((companion_dm, _)) = &self.companion {
+ let _ = companion_dm.update(cx, |dm, cx| {
+ if let Some((companion_snapshot, companion_edits)) = companion_wrap_data {
+ let their_companion_ref = dm.companion.as_ref().map(|(_, c)| c.read(cx));
+ dm.block_map.read(
+ companion_snapshot,
+ companion_edits,
+ Some((&self_wrap_snapshot, &self_wrap_edits)),
+ their_companion_ref.map(|c| (c, dm.entity_id)),
+ );
+ }
+ });
+ }
}
#[instrument(skip_all)]
pub fn fold_buffers(
&mut self,
buffer_ids: impl IntoIterator<Item = language::BufferId>,
- cx: &mut Context<Self>,
+ cx: &mut App,
) {
- let snapshot = self.buffer.read(cx).snapshot(cx);
- let edits = self.buffer_subscription.consume().into_inner();
- let tab_size = Self::tab_size(&self.buffer, cx);
- let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
- let (snapshot, edits) = self.fold_map.read(snapshot, edits);
- let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
- let (snapshot, edits) = self
- .wrap_map
- .update(cx, |map, cx| map.sync(snapshot, edits, cx));
- let mut block_map = self.block_map.write(snapshot, edits);
- block_map.fold_buffers(buffer_ids, self.buffer.read(cx), cx)
+ let buffer_ids: Vec<_> = buffer_ids.into_iter().collect();
+
+ let (self_wrap_snapshot, self_wrap_edits) = self.sync_through_wrap(cx);
+
+ let companion_wrap_data = self.companion.as_ref().and_then(|(companion_dm, _)| {
+ companion_dm
+ .update(cx, |dm, cx| dm.sync_through_wrap(cx))
+ .ok()
+ });
+
+ let companion_wrap_edits = companion_wrap_data
+ .as_ref()
+ .map(|(snapshot, edits)| (snapshot, edits));
+ let companion_ref = self.companion.as_ref().map(|(_, c)| c.read(cx));
+
+ let mut block_map = self.block_map.write(
+ self_wrap_snapshot.clone(),
+ self_wrap_edits.clone(),
+ companion_wrap_edits,
+ companion_ref.map(|c| (c, self.entity_id)),
+ );
+ block_map.fold_buffers(buffer_ids.iter().copied(), self.buffer.read(cx), cx);
+
+ if let Some((companion_dm, companion_entity)) = &self.companion {
+ let buffer_mapping = companion_entity
+ .read(cx)
+ .buffer_to_companion_buffer(self.entity_id);
+ let their_buffer_ids: Vec<_> = buffer_ids
+ .iter()
+ .filter_map(|id| buffer_mapping.get(id).copied())
+ .collect();
+
+ let _ = companion_dm.update(cx, |dm, cx| {
+ if let Some((companion_snapshot, companion_edits)) = companion_wrap_data {
+ let their_companion_ref = dm.companion.as_ref().map(|(_, c)| c.read(cx));
+ let mut block_map = dm.block_map.write(
+ companion_snapshot,
+ companion_edits,
+ Some((&self_wrap_snapshot, &self_wrap_edits)),
+ their_companion_ref.map(|c| (c, dm.entity_id)),
+ );
+ if !their_buffer_ids.is_empty() {
+ block_map.fold_buffers(their_buffer_ids, dm.buffer.read(cx), cx);
+ }
+ }
+ });
+ }
}
#[instrument(skip_all)]
@@ -428,17 +859,53 @@ impl DisplayMap {
buffer_ids: impl IntoIterator<Item = language::BufferId>,
cx: &mut Context<Self>,
) {
- let snapshot = self.buffer.read(cx).snapshot(cx);
- let edits = self.buffer_subscription.consume().into_inner();
- let tab_size = Self::tab_size(&self.buffer, cx);
- let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
- let (snapshot, edits) = self.fold_map.read(snapshot, edits);
- let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
- let (snapshot, edits) = self
- .wrap_map
- .update(cx, |map, cx| map.sync(snapshot, edits, cx));
- let mut block_map = self.block_map.write(snapshot, edits);
- block_map.unfold_buffers(buffer_ids, self.buffer.read(cx), cx)
+ let buffer_ids: Vec<_> = buffer_ids.into_iter().collect();
+
+ let (self_wrap_snapshot, self_wrap_edits) = self.sync_through_wrap(cx);
+
+ let companion_wrap_data = self.companion.as_ref().and_then(|(companion_dm, _)| {
+ companion_dm
+ .update(cx, |dm, cx| dm.sync_through_wrap(cx))
+ .ok()
+ });
+
+ let companion_wrap_edits = companion_wrap_data
+ .as_ref()
+ .map(|(snapshot, edits)| (snapshot, edits));
+ let companion_ref = self.companion.as_ref().map(|(_, c)| c.read(cx));
+
+ let mut block_map = self.block_map.write(
+ self_wrap_snapshot.clone(),
+ self_wrap_edits.clone(),
+ companion_wrap_edits,
+ companion_ref.map(|c| (c, self.entity_id)),
+ );
+ block_map.unfold_buffers(buffer_ids.iter().copied(), self.buffer.read(cx), cx);
+
+ if let Some((companion_dm, companion_entity)) = &self.companion {
+ let buffer_mapping = companion_entity
+ .read(cx)
+ .buffer_to_companion_buffer(self.entity_id);
+ let their_buffer_ids: Vec<_> = buffer_ids
+ .iter()
+ .filter_map(|id| buffer_mapping.get(id).copied())
+ .collect();
+
+ let _ = companion_dm.update(cx, |dm, cx| {
+ if let Some((companion_snapshot, companion_edits)) = companion_wrap_data {
+ let their_companion_ref = dm.companion.as_ref().map(|(_, c)| c.read(cx));
+ let mut block_map = dm.block_map.write(
+ companion_snapshot,
+ companion_edits,
+ Some((&self_wrap_snapshot, &self_wrap_edits)),
+ their_companion_ref.map(|c| (c, dm.entity_id)),
+ );
+ if !their_buffer_ids.is_empty() {
+ block_map.unfold_buffers(their_buffer_ids, dm.buffer.read(cx), cx);
+ }
+ }
+ });
+ }
}
#[instrument(skip_all)]
@@ -477,32 +944,80 @@ impl DisplayMap {
blocks: impl IntoIterator<Item = BlockProperties<Anchor>>,
cx: &mut Context<Self>,
) -> Vec<CustomBlockId> {
- let snapshot = self.buffer.read(cx).snapshot(cx);
- let edits = self.buffer_subscription.consume().into_inner();
- let tab_size = Self::tab_size(&self.buffer, cx);
- let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
- let (snapshot, edits) = self.fold_map.read(snapshot, edits);
- let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
- let (snapshot, edits) = self
- .wrap_map
- .update(cx, |map, cx| map.sync(snapshot, edits, cx));
- let mut block_map = self.block_map.write(snapshot, edits);
- block_map.insert(blocks)
+ let (self_wrap_snapshot, self_wrap_edits) = self.sync_through_wrap(cx);
+
+ let companion_wrap_data = self.companion.as_ref().and_then(|(companion_dm, _)| {
+ companion_dm
+ .update(cx, |dm, cx| dm.sync_through_wrap(cx))
+ .ok()
+ });
+
+ let companion_wrap_edits = companion_wrap_data
+ .as_ref()
+ .map(|(snapshot, edits)| (snapshot, edits));
+ let companion_ref = self.companion.as_ref().map(|(_, c)| c.read(cx));
+
+ let mut block_map = self.block_map.write(
+ self_wrap_snapshot.clone(),
+ self_wrap_edits.clone(),
+ companion_wrap_edits,
+ companion_ref.map(|c| (c, self.entity_id)),
+ );
+ let result = block_map.insert(blocks);
+
+ if let Some((companion_dm, _)) = &self.companion {
+ let _ = companion_dm.update(cx, |dm, cx| {
+ if let Some((companion_snapshot, companion_edits)) = companion_wrap_data {
+ let their_companion_ref = dm.companion.as_ref().map(|(_, c)| c.read(cx));
+ dm.block_map.read(
+ companion_snapshot,
+ companion_edits,
+ Some((&self_wrap_snapshot, &self_wrap_edits)),
+ their_companion_ref.map(|c| (c, dm.entity_id)),
+ );
+ }
+ });
+ }
+
+ result
}
#[instrument(skip_all)]
pub fn resize_blocks(&mut self, heights: HashMap<CustomBlockId, u32>, cx: &mut Context<Self>) {
- let snapshot = self.buffer.read(cx).snapshot(cx);
- let edits = self.buffer_subscription.consume().into_inner();
- let tab_size = Self::tab_size(&self.buffer, cx);
- let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
- let (snapshot, edits) = self.fold_map.read(snapshot, edits);
- let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
- let (snapshot, edits) = self
- .wrap_map
- .update(cx, |map, cx| map.sync(snapshot, edits, cx));
- let mut block_map = self.block_map.write(snapshot, edits);
+ let (self_wrap_snapshot, self_wrap_edits) = self.sync_through_wrap(cx);
+
+ let companion_wrap_data = self.companion.as_ref().and_then(|(companion_dm, _)| {
+ companion_dm
+ .update(cx, |dm, cx| dm.sync_through_wrap(cx))
+ .ok()
+ });
+
+ let companion_wrap_edits = companion_wrap_data
+ .as_ref()
+ .map(|(snapshot, edits)| (snapshot, edits));
+ let companion_ref = self.companion.as_ref().map(|(_, c)| c.read(cx));
+
+ let mut block_map = self.block_map.write(
+ self_wrap_snapshot.clone(),
+ self_wrap_edits.clone(),
+ companion_wrap_edits,
+ companion_ref.map(|c| (c, self.entity_id)),
+ );
block_map.resize(heights);
+
+ if let Some((companion_dm, _)) = &self.companion {
+ let _ = companion_dm.update(cx, |dm, cx| {
+ if let Some((companion_snapshot, companion_edits)) = companion_wrap_data {
+ let their_companion_ref = dm.companion.as_ref().map(|(_, c)| c.read(cx));
+ dm.block_map.read(
+ companion_snapshot,
+ companion_edits,
+ Some((&self_wrap_snapshot, &self_wrap_edits)),
+ their_companion_ref.map(|c| (c, dm.entity_id)),
+ );
+ }
+ });
+ }
}
#[instrument(skip_all)]
@@ -512,17 +1027,40 @@ impl DisplayMap {
#[instrument(skip_all)]
pub fn remove_blocks(&mut self, ids: HashSet<CustomBlockId>, cx: &mut Context<Self>) {
- let snapshot = self.buffer.read(cx).snapshot(cx);
- let edits = self.buffer_subscription.consume().into_inner();
- let tab_size = Self::tab_size(&self.buffer, cx);
- let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
- let (snapshot, edits) = self.fold_map.read(snapshot, edits);
- let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
- let (snapshot, edits) = self
- .wrap_map
- .update(cx, |map, cx| map.sync(snapshot, edits, cx));
- let mut block_map = self.block_map.write(snapshot, edits);
+ let (self_wrap_snapshot, self_wrap_edits) = self.sync_through_wrap(cx);
+
+ let companion_wrap_data = self.companion.as_ref().and_then(|(companion_dm, _)| {
+ companion_dm
+ .update(cx, |dm, cx| dm.sync_through_wrap(cx))
+ .ok()
+ });
+
+ let companion_wrap_edits = companion_wrap_data
+ .as_ref()
+ .map(|(snapshot, edits)| (snapshot, edits));
+ let companion_ref = self.companion.as_ref().map(|(_, c)| c.read(cx));
+
+ let mut block_map = self.block_map.write(
+ self_wrap_snapshot.clone(),
+ self_wrap_edits.clone(),
+ companion_wrap_edits,
+ companion_ref.map(|c| (c, self.entity_id)),
+ );
block_map.remove(ids);
+
+ if let Some((companion_dm, _)) = &self.companion {
+ let _ = companion_dm.update(cx, |dm, cx| {
+ if let Some((companion_snapshot, companion_edits)) = companion_wrap_data {
+ let their_companion_ref = dm.companion.as_ref().map(|(_, c)| c.read(cx));
+ dm.block_map.read(
+ companion_snapshot,
+ companion_edits,
+ Some((&self_wrap_snapshot, &self_wrap_edits)),
+ their_companion_ref.map(|c| (c, dm.entity_id)),
+ );
+ }
+ });
+ }
}
#[instrument(skip_all)]
@@ -531,17 +1069,41 @@ impl DisplayMap {
block_id: CustomBlockId,
cx: &mut Context<Self>,
) -> Option<DisplayRow> {
- let snapshot = self.buffer.read(cx).snapshot(cx);
- let edits = self.buffer_subscription.consume().into_inner();
- let tab_size = Self::tab_size(&self.buffer, cx);
- let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
- let (snapshot, edits) = self.fold_map.read(snapshot, edits);
- let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
- let (snapshot, edits) = self
- .wrap_map
- .update(cx, |map, cx| map.sync(snapshot, edits, cx));
- let block_map = self.block_map.read(snapshot, edits);
+ let (self_wrap_snapshot, self_wrap_edits) = self.sync_through_wrap(cx);
+
+ let companion_wrap_data = self.companion.as_ref().and_then(|(companion_dm, _)| {
+ companion_dm
+ .update(cx, |dm, cx| dm.sync_through_wrap(cx))
+ .ok()
+ });
+
+ let companion_wrap_edits = companion_wrap_data
+ .as_ref()
+ .map(|(snapshot, edits)| (snapshot, edits));
+ let companion_ref = self.companion.as_ref().map(|(_, c)| c.read(cx));
+
+ let block_map = self.block_map.read(
+ self_wrap_snapshot.clone(),
+ self_wrap_edits.clone(),
+ companion_wrap_edits,
+ companion_ref.map(|c| (c, self.entity_id)),
+ );
let block_row = block_map.row_for_block(block_id)?;
+
+ if let Some((companion_dm, _)) = &self.companion {
+ let _ = companion_dm.update(cx, |dm, cx| {
+ if let Some((companion_snapshot, companion_edits)) = companion_wrap_data {
+ let their_companion_ref = dm.companion.as_ref().map(|(_, c)| c.read(cx));
+ dm.block_map.read(
+ companion_snapshot,
+ companion_edits,
+ Some((&self_wrap_snapshot, &self_wrap_edits)),
+ their_companion_ref.map(|c| (c, dm.entity_id)),
+ );
+ }
+ });
+ }
+
Some(DisplayRow(block_row.0))
}
@@ -644,21 +1206,67 @@ impl DisplayMap {
let snapshot = self.buffer.read(cx).snapshot(cx);
let edits = self.buffer_subscription.consume().into_inner();
let tab_size = Self::tab_size(&self.buffer, cx);
+
+ let companion_wrap_data = self.companion.as_ref().and_then(|(companion_dm, _)| {
+ companion_dm
+ .update(cx, |dm, cx| dm.sync_through_wrap(cx))
+ .ok()
+ });
+
+ let companion_wrap_edits = companion_wrap_data
+ .as_ref()
+ .map(|(snapshot, edits)| (snapshot, edits));
+
let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
let (mut fold_map, snapshot, edits) = self.fold_map.write(snapshot, edits);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
let (snapshot, edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
- self.block_map.read(snapshot, edits);
+
+ let companion_ref = self.companion.as_ref().map(|(_, c)| c.read(cx));
+ self.block_map.read(
+ snapshot,
+ edits,
+ companion_wrap_edits,
+ companion_ref.map(|c| (c, self.entity_id)),
+ );
let (snapshot, edits) = fold_map.update_fold_widths(widths);
let widths_changed = !edits.is_empty();
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
- let (snapshot, edits) = self
+ let (self_new_wrap_snapshot, self_new_wrap_edits) = self
.wrap_map
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
- self.block_map.read(snapshot, edits);
+
+ let (self_wrap_snapshot, self_wrap_edits) =
+ (self_new_wrap_snapshot.clone(), self_new_wrap_edits.clone());
+
+ let companion_wrap_edits = companion_wrap_data
+ .as_ref()
+ .map(|(snapshot, edits)| (snapshot, edits));
+ let companion_ref = self.companion.as_ref().map(|(_, c)| c.read(cx));
+
+ self.block_map.read(
+ self_new_wrap_snapshot,
+ self_new_wrap_edits,
+ companion_wrap_edits,
+ companion_ref.map(|c| (c, self.entity_id)),
+ );
+
+ if let Some((companion_dm, _)) = &self.companion {
+ let _ = companion_dm.update(cx, |dm, cx| {
+ if let Some((companion_snapshot, companion_edits)) = companion_wrap_data {
+ let their_companion_ref = dm.companion.as_ref().map(|(_, c)| c.read(cx));
+ dm.block_map.read(
+ companion_snapshot,
+ companion_edits,
+ Some((&self_wrap_snapshot, &self_wrap_edits)),
+ their_companion_ref.map(|c| (c, dm.entity_id)),
+ );
+ }
+ });
+ }
widths_changed
}
@@ -5,14 +5,14 @@ use super::{
};
use crate::{
EditorStyle, GutterDimensions,
- display_map::{dimensions::RowDelta, wrap_map::WrapRow},
+ display_map::{Companion, dimensions::RowDelta, wrap_map::WrapRow},
};
use collections::{Bound, HashMap, HashSet};
use gpui::{AnyElement, App, EntityId, Pixels, Window};
use language::{Patch, Point};
use multi_buffer::{
- Anchor, ExcerptId, ExcerptInfo, MultiBuffer, MultiBufferOffset, MultiBufferRow,
- MultiBufferSnapshot, RowInfo, ToOffset, ToPoint as _,
+ Anchor, ExcerptId, ExcerptInfo, MultiBuffer, MultiBufferOffset, MultiBufferPoint,
+ MultiBufferRow, MultiBufferSnapshot, RowInfo, ToOffset, ToPoint as _,
};
use parking_lot::Mutex;
use std::{
@@ -52,7 +52,16 @@ pub struct BlockMapReader<'a> {
pub snapshot: BlockSnapshot,
}
-pub struct BlockMapWriter<'a>(&'a mut BlockMap);
+pub struct BlockMapWriter<'a> {
+ block_map: &'a mut BlockMap,
+ companion: Option<BlockMapWriterCompanion<'a>>,
+}
+
+struct BlockMapWriterCompanion<'a> {
+ companion: &'a Companion,
+ snapshot: &'a WrapSnapshot,
+ entity: EntityId,
+}
#[derive(Clone)]
pub struct BlockSnapshot {
@@ -80,6 +89,9 @@ impl From<CustomBlockId> for ElementId {
}
}
+#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct SpacerId(pub usize);
+
/// A zero-indexed point in a text buffer consisting of a row and column
/// adjusted for inserted blocks, wrapped rows, tabs, folds and inlays.
#[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)]
@@ -275,6 +287,7 @@ pub enum BlockId {
ExcerptBoundary(ExcerptId),
FoldedBuffer(ExcerptId),
Custom(CustomBlockId),
+ Spacer(SpacerId),
}
impl From<BlockId> for ElementId {
@@ -285,6 +298,7 @@ impl From<BlockId> for ElementId {
("ExcerptBoundary", EntityId::from(excerpt_id)).into()
}
BlockId::FoldedBuffer(id) => ("FoldedBuffer", EntityId::from(id)).into(),
+ BlockId::Spacer(SpacerId(id)) => ("Spacer", id).into(),
}
}
}
@@ -295,6 +309,7 @@ impl std::fmt::Display for BlockId {
Self::Custom(id) => write!(f, "Block({id:?})"),
Self::ExcerptBoundary(id) => write!(f, "ExcerptHeader({id:?})"),
Self::FoldedBuffer(id) => write!(f, "FoldedBuffer({id:?})"),
+ Self::Spacer(id) => write!(f, "Spacer({id:?})"),
}
}
}
@@ -320,6 +335,11 @@ pub enum Block {
excerpt: ExcerptInfo,
height: u32,
},
+ Spacer {
+ id: SpacerId,
+ height: u32,
+ is_below: bool,
+ },
}
impl Block {
@@ -335,6 +355,7 @@ impl Block {
excerpt: next_excerpt,
..
} => BlockId::ExcerptBoundary(next_excerpt.id),
+ Block::Spacer { id, .. } => BlockId::Spacer(*id),
}
}
@@ -343,7 +364,8 @@ impl Block {
Block::Custom(block) => block.height.is_some(),
Block::ExcerptBoundary { .. }
| Block::FoldedBuffer { .. }
- | Block::BufferHeader { .. } => true,
+ | Block::BufferHeader { .. }
+ | Block::Spacer { .. } => true,
}
}
@@ -352,7 +374,8 @@ impl Block {
Block::Custom(block) => block.height.unwrap_or(0),
Block::ExcerptBoundary { height, .. }
| Block::FoldedBuffer { height, .. }
- | Block::BufferHeader { height, .. } => *height,
+ | Block::BufferHeader { height, .. }
+ | Block::Spacer { height, .. } => *height,
}
}
@@ -361,7 +384,8 @@ impl Block {
Block::Custom(block) => block.style,
Block::ExcerptBoundary { .. }
| Block::FoldedBuffer { .. }
- | Block::BufferHeader { .. } => BlockStyle::Sticky,
+ | Block::BufferHeader { .. }
+ | Block::Spacer { .. } => BlockStyle::Sticky,
}
}
@@ -371,6 +395,7 @@ impl Block {
Block::FoldedBuffer { .. } => false,
Block::ExcerptBoundary { .. } => true,
Block::BufferHeader { .. } => true,
+ Block::Spacer { is_below, .. } => !*is_below,
}
}
@@ -380,6 +405,7 @@ impl Block {
Block::FoldedBuffer { .. } => false,
Block::ExcerptBoundary { .. } => false,
Block::BufferHeader { .. } => false,
+ Block::Spacer { .. } => false,
}
}
@@ -392,6 +418,7 @@ impl Block {
Block::FoldedBuffer { .. } => false,
Block::ExcerptBoundary { .. } => false,
Block::BufferHeader { .. } => false,
+ Block::Spacer { is_below, .. } => *is_below,
}
}
@@ -401,6 +428,7 @@ impl Block {
Block::FoldedBuffer { .. } => true,
Block::ExcerptBoundary { .. } => false,
Block::BufferHeader { .. } => false,
+ Block::Spacer { .. } => false,
}
}
@@ -410,6 +438,7 @@ impl Block {
Block::FoldedBuffer { .. } => true,
Block::ExcerptBoundary { .. } => true,
Block::BufferHeader { .. } => true,
+ Block::Spacer { .. } => false,
}
}
@@ -419,6 +448,7 @@ impl Block {
Block::FoldedBuffer { .. } => true,
Block::ExcerptBoundary { .. } => false,
Block::BufferHeader { .. } => true,
+ Block::Spacer { .. } => false,
}
}
}
@@ -445,6 +475,15 @@ impl Debug for Block {
.field("excerpt", excerpt)
.field("height", height)
.finish(),
+ Self::Spacer {
+ id,
+ height,
+ is_below: _,
+ } => f
+ .debug_struct("Spacer")
+ .field("id", id)
+ .field("height", height)
+ .finish(),
}
}
}
@@ -502,13 +541,21 @@ impl BlockMap {
old: WrapRow(0)..row_count,
new: WrapRow(0)..row_count,
}]),
+ None,
+ None,
);
map
}
#[ztracing::instrument(skip_all)]
- pub fn read(&self, wrap_snapshot: WrapSnapshot, edits: WrapPatch) -> BlockMapReader<'_> {
- self.sync(&wrap_snapshot, edits);
+ pub(crate) fn read(
+ &self,
+ wrap_snapshot: WrapSnapshot,
+ edits: WrapPatch,
+ companion_wrap_edits: Option<(&WrapSnapshot, &WrapPatch)>,
+ companion: Option<(&Companion, EntityId)>,
+ ) -> BlockMapReader<'_> {
+ self.sync(&wrap_snapshot, edits, companion_wrap_edits, companion);
*self.wrap_snapshot.borrow_mut() = wrap_snapshot.clone();
BlockMapReader {
blocks: &self.custom_blocks,
@@ -523,16 +570,38 @@ impl BlockMap {
}
#[ztracing::instrument(skip_all)]
- pub fn write(&mut self, wrap_snapshot: WrapSnapshot, edits: WrapPatch) -> BlockMapWriter<'_> {
- self.sync(&wrap_snapshot, edits);
+ pub(crate) fn write<'a>(
+ &'a mut self,
+ wrap_snapshot: WrapSnapshot,
+ edits: WrapPatch,
+ companion_wrap_edits: Option<(&'a WrapSnapshot, &'a WrapPatch)>,
+ companion: Option<(&'a Companion, EntityId)>,
+ ) -> BlockMapWriter<'a> {
+ self.sync(&wrap_snapshot, edits, companion_wrap_edits, companion);
*self.wrap_snapshot.borrow_mut() = wrap_snapshot;
- BlockMapWriter(self)
+ let companion = match (companion_wrap_edits, companion) {
+ (Some(_), None) | (None, Some(_)) => unreachable!(),
+ (None, None) => None,
+ (Some(companion_wrap_edits), Some(companion)) => Some(BlockMapWriterCompanion {
+ companion: companion.0,
+ snapshot: companion_wrap_edits.0,
+ entity: companion.1,
+ }),
+ };
+ BlockMapWriter {
+ block_map: self,
+ companion,
+ }
}
#[ztracing::instrument(skip_all, fields(edits = ?edits))]
- fn sync(&self, wrap_snapshot: &WrapSnapshot, mut edits: WrapPatch) {
- let _timer = zlog::time!("BlockMap::sync").warn_if_gt(std::time::Duration::from_millis(50));
-
+ fn sync(
+ &self,
+ wrap_snapshot: &WrapSnapshot,
+ mut edits: WrapPatch,
+ companion_wrap_edits: Option<(&WrapSnapshot, &WrapPatch)>,
+ companion: Option<(&Companion, EntityId)>,
+ ) {
let buffer = wrap_snapshot.buffer_snapshot();
// Handle changing the last excerpt if it is empty.
@@ -552,6 +621,79 @@ impl BlockMap {
}]);
}
+ // Pull in companion edits to ensure we recompute spacers in ranges that have changed in the companion.
+ if let Some((companion_new_snapshot, companion_edits)) = companion_wrap_edits
+ && let Some((companion, display_map_id)) = companion
+ {
+ let mut companion_edits_in_my_space: Vec<WrapEdit> = companion_edits
+ .clone()
+ .into_inner()
+ .iter()
+ .map(|edit| {
+ let companion_start = companion_new_snapshot
+ .to_point(WrapPoint::new(edit.new.start, 0), Bias::Left);
+ let companion_end = companion_new_snapshot
+ .to_point(WrapPoint::new(edit.new.end, 0), Bias::Left);
+
+ let my_start = companion
+ .convert_rows_from_companion(
+ display_map_id,
+ wrap_snapshot.buffer_snapshot(),
+ companion_new_snapshot.buffer_snapshot(),
+ (
+ Bound::Included(companion_start),
+ Bound::Included(companion_start),
+ ),
+ )
+ .first()
+ .and_then(|t| t.boundaries.first())
+ .map(|(_, range)| range.start)
+ .unwrap_or(wrap_snapshot.buffer_snapshot().max_point());
+ let my_end = companion
+ .convert_rows_from_companion(
+ display_map_id,
+ wrap_snapshot.buffer_snapshot(),
+ companion_new_snapshot.buffer_snapshot(),
+ (
+ Bound::Included(companion_end),
+ Bound::Included(companion_end),
+ ),
+ )
+ .first()
+ .and_then(|t| t.boundaries.last())
+ .map(|(_, range)| range.end)
+ .unwrap_or(wrap_snapshot.buffer_snapshot().max_point());
+
+ let my_start = wrap_snapshot.make_wrap_point(my_start, Bias::Left);
+ let mut my_end = wrap_snapshot.make_wrap_point(my_end, Bias::Left);
+ if my_end.column() > 0 {
+ my_end.0.row += 1;
+ my_end.0.column = 0;
+ }
+
+ WrapEdit {
+ old: my_start.row()..my_end.row(),
+ new: my_start.row()..my_end.row(),
+ }
+ })
+ .collect();
+
+ companion_edits_in_my_space.sort_by_key(|edit| edit.old.start);
+ let mut merged_edits: Vec<WrapEdit> = Vec::new();
+ for edit in companion_edits_in_my_space {
+ if let Some(last) = merged_edits.last_mut() {
+ if edit.old.start <= last.old.end {
+ last.old.end = last.old.end.max(edit.old.end);
+ last.new.end = last.new.end.max(edit.new.end);
+ continue;
+ }
+ }
+ merged_edits.push(edit);
+ }
+
+ edits = edits.compose(merged_edits);
+ }
+
let edits = edits.into_inner();
if edits.is_empty() {
return;
@@ -591,7 +733,7 @@ impl BlockMap {
new_transforms.push(transform.clone(), ());
cursor.next();
- // Preserve below blocks at end of edit
+ // Preserve below blocks at start of edit
while let Some(transform) = cursor.item() {
if transform.block.as_ref().is_some_and(|b| b.place_below()) {
new_transforms.push(transform.clone(), ());
@@ -662,7 +804,11 @@ impl BlockMap {
// Discard below blocks at the end of the edit. They'll be reconstructed.
while let Some(transform) = cursor.item() {
- if transform.block.as_ref().is_some_and(|b| b.place_below()) {
+ if transform
+ .block
+ .as_ref()
+ .is_some_and(|b| b.place_below() || matches!(b, Block::Spacer { .. }))
+ {
cursor.next();
} else {
break;
@@ -715,7 +861,6 @@ impl BlockMap {
let placement = block.placement.to_wrap_row(wrap_snapshot)?;
if let BlockPlacement::Above(row) = placement
&& row < new_start
- // this will be true more often now
{
return None;
}
@@ -736,6 +881,18 @@ impl BlockMap {
},
));
+ if let Some((companion_snapshot, _)) = companion_wrap_edits
+ && let Some((companion, display_map_id)) = companion
+ {
+ blocks_in_edit.extend(self.spacer_blocks(
+ (start_bound, end_bound),
+ wrap_snapshot,
+ companion_snapshot,
+ companion,
+ display_map_id,
+ ));
+ }
+
BlockMap::sort_blocks(&mut blocks_in_edit);
// For each of these blocks, insert a new isomorphic transform preceding the block,
@@ -769,7 +926,7 @@ impl BlockMap {
rows_before_block =
(position + RowDelta(1)) - new_transforms.summary().input_rows;
}
- BlockPlacement::Replace(range) => {
+ BlockPlacement::Replace(ref range) => {
rows_before_block = *range.start() - new_transforms.summary().input_rows;
summary.input_rows = WrapRow(1) + (*range.end() - *range.start());
just_processed_folded_buffer = matches!(block, Block::FoldedBuffer { .. });
@@ -795,7 +952,7 @@ impl BlockMap {
new_transforms.append(cursor.suffix(), ());
debug_assert_eq!(
new_transforms.summary().input_rows,
- wrap_snapshot.max_point().row() + WrapRow(1)
+ wrap_snapshot.max_point().row() + WrapRow(1),
);
drop(cursor);
@@ -900,6 +1057,187 @@ impl BlockMap {
})
}
+ fn spacer_blocks(
+ &self,
+ bounds: (Bound<MultiBufferPoint>, Bound<MultiBufferPoint>),
+ wrap_snapshot: &WrapSnapshot,
+ companion_snapshot: &WrapSnapshot,
+ companion: &Companion,
+ display_map_id: EntityId,
+ ) -> Vec<(BlockPlacement<WrapRow>, Block)> {
+ let our_buffer = wrap_snapshot.buffer_snapshot();
+ let companion_buffer = companion_snapshot.buffer_snapshot();
+
+ let row_mappings = companion.convert_rows_to_companion(
+ display_map_id,
+ companion_buffer,
+ our_buffer,
+ bounds,
+ );
+
+ let determine_spacer = |our_point: Point, their_point: Point, delta: i32| {
+ let our_wrap = wrap_snapshot.make_wrap_point(our_point, Bias::Left).row();
+ let companion_wrap = companion_snapshot
+ .make_wrap_point(their_point, Bias::Left)
+ .row();
+ let new_delta = companion_wrap.0 as i32 - our_wrap.0 as i32;
+
+ let spacer = if new_delta > delta {
+ let height = (new_delta - delta) as u32;
+ Some((our_wrap, height))
+ } else {
+ None
+ };
+ (new_delta, spacer)
+ };
+
+ let mut result = Vec::new();
+
+ for row_mapping in row_mappings {
+ let mut iter = row_mapping.boundaries.iter().cloned().peekable();
+
+ let Some(((first_boundary, first_range), first_group)) =
+ iter.peek().cloned().zip(row_mapping.first_group.clone())
+ else {
+ continue;
+ };
+
+ // Because we calculate spacers based on differences in wrap row
+ // counts between the RHS and LHS for corresponding buffer points,
+ // we need to calibrate our expectations based on the difference
+ // in counts before the start of the edit. This difference in
+ // counts should have been balanced already by spacers above this
+ // edit, so we only need to insert spacers for when the difference
+ // in counts diverges from that baseline value.
+ let (our_baseline, their_baseline) = if first_group.start < first_boundary {
+ (first_group.start, first_range.start)
+ } else if let Some((prev_boundary, prev_range)) = row_mapping.prev_boundary {
+ (prev_boundary, prev_range.end)
+ } else {
+ (first_boundary, first_range.start)
+ };
+ let our_baseline = wrap_snapshot
+ .make_wrap_point(our_baseline, Bias::Left)
+ .row();
+ let their_baseline = companion_snapshot
+ .make_wrap_point(their_baseline, Bias::Left)
+ .row();
+
+ let mut delta = their_baseline.0 as i32 - our_baseline.0 as i32;
+
+ if first_group.start < first_boundary {
+ let mut current_boundary = first_boundary;
+ let current_range = first_range;
+ while let Some((next_boundary, next_range)) = iter.peek().cloned()
+ && next_range.end <= current_range.end
+ {
+ iter.next();
+ current_boundary = next_boundary;
+ }
+
+ let (new_delta, spacer) =
+ determine_spacer(current_boundary, current_range.end, delta);
+
+ delta = new_delta;
+ if let Some((wrap_row, height)) = spacer {
+ result.push((
+ BlockPlacement::Above(wrap_row),
+ Block::Spacer {
+ id: SpacerId(self.next_block_id.fetch_add(1, SeqCst)),
+ height,
+ is_below: false,
+ },
+ ));
+ }
+ }
+
+ while let Some((boundary, range)) = iter.next() {
+ let mut current_boundary = boundary;
+ let current_range = range;
+
+ // This can only occur at the end of an excerpt.
+ if current_boundary.column > 0 {
+ debug_assert_eq!(current_boundary, row_mapping.source_excerpt_end);
+ break;
+ }
+
+ // Align the two sides at the start of this group.
+ let (delta_at_start, mut spacer_at_start) =
+ determine_spacer(current_boundary, current_range.start, delta);
+ delta = delta_at_start;
+
+ while let Some((next_boundary, next_range)) = iter.peek()
+ && next_range.end <= current_range.end
+ {
+ if let Some((wrap_row, height)) = spacer_at_start.take() {
+ result.push((
+ BlockPlacement::Above(wrap_row),
+ Block::Spacer {
+ id: SpacerId(self.next_block_id.fetch_add(1, SeqCst)),
+ height,
+ is_below: false,
+ },
+ ));
+ }
+
+ current_boundary = *next_boundary;
+ iter.next();
+ }
+
+ // This can only occur at the end of an excerpt.
+ if current_boundary.column > 0 {
+ debug_assert_eq!(current_boundary, row_mapping.source_excerpt_end);
+ break;
+ }
+
+ let (delta_at_end, spacer_at_end) =
+ determine_spacer(current_boundary, current_range.end, delta);
+ delta = delta_at_end;
+
+ if let Some((wrap_row, mut height)) = spacer_at_start {
+ if let Some((_, additional_height)) = spacer_at_end {
+ height += additional_height;
+ }
+ result.push((
+ BlockPlacement::Above(wrap_row),
+ Block::Spacer {
+ id: SpacerId(self.next_block_id.fetch_add(1, SeqCst)),
+ height,
+ is_below: false,
+ },
+ ));
+ } else if let Some((wrap_row, height)) = spacer_at_end {
+ result.push((
+ BlockPlacement::Above(wrap_row),
+ Block::Spacer {
+ id: SpacerId(self.next_block_id.fetch_add(1, SeqCst)),
+ height,
+ is_below: false,
+ },
+ ));
+ }
+ }
+
+ let (last_boundary, _last_range) = row_mapping.boundaries.last().cloned().unwrap();
+ if last_boundary == row_mapping.source_excerpt_end {
+ let (_new_delta, spacer) =
+ determine_spacer(last_boundary, row_mapping.target_excerpt_end, delta);
+ if let Some((wrap_row, height)) = spacer {
+ result.push((
+ BlockPlacement::Below(wrap_row),
+ Block::Spacer {
+ id: SpacerId(self.next_block_id.fetch_add(1, SeqCst)),
+ height,
+ is_below: true,
+ },
+ ));
+ }
+ }
+ }
+
+ result
+ }
+
#[ztracing::instrument(skip_all)]
fn sort_blocks(blocks: &mut Vec<(BlockPlacement<WrapRow>, Block)>) {
blocks.sort_unstable_by(|(placement_a, block_a), (placement_b, block_b)| {
@@ -934,18 +1272,20 @@ impl BlockMap {
) => Some(excerpt_a.id).cmp(&Some(excerpt_b.id)),
(
Block::ExcerptBoundary { .. } | Block::BufferHeader { .. },
- Block::Custom(_),
+ Block::Spacer { .. } | Block::Custom(_),
) => Ordering::Less,
(
- Block::Custom(_),
+ Block::Spacer { .. } | Block::Custom(_),
Block::ExcerptBoundary { .. } | Block::BufferHeader { .. },
) => Ordering::Greater,
+ (Block::Spacer { .. }, Block::Custom(_)) => Ordering::Less,
+ (Block::Custom(_), Block::Spacer { .. }) => Ordering::Greater,
(Block::Custom(block_a), Block::Custom(block_b)) => block_a
.priority
.cmp(&block_b.priority)
.then_with(|| block_a.id.cmp(&block_b.id)),
_ => {
- unreachable!()
+ unreachable!("comparing blocks: {block_a:?} vs {block_b:?}")
}
})
});
@@ -1085,7 +1425,7 @@ impl BlockMapWriter<'_> {
let blocks = blocks.into_iter();
let mut ids = Vec::with_capacity(blocks.size_hint().1.unwrap_or(0));
let mut edits = Patch::default();
- let wrap_snapshot = &*self.0.wrap_snapshot.borrow();
+ let wrap_snapshot = &*self.block_map.wrap_snapshot.borrow();
let buffer = wrap_snapshot.buffer_snapshot();
let mut previous_wrap_row_range: Option<Range<WrapRow>> = None;
@@ -1094,7 +1434,7 @@ impl BlockMapWriter<'_> {
debug_assert!(block.height.unwrap() > 0);
}
- let id = CustomBlockId(self.0.next_block_id.fetch_add(1, SeqCst));
+ let id = CustomBlockId(self.block_map.next_block_id.fetch_add(1, SeqCst));
ids.push(id);
let start = block.placement.start().to_point(buffer);
@@ -1117,7 +1457,7 @@ impl BlockMapWriter<'_> {
(range.start, range.end)
};
let block_ix = match self
- .0
+ .block_map
.custom_blocks
.binary_search_by(|probe| probe.placement.cmp(&block.placement, buffer))
{
@@ -1131,8 +1471,10 @@ impl BlockMapWriter<'_> {
style: block.style,
priority: block.priority,
});
- self.0.custom_blocks.insert(block_ix, new_block.clone());
- self.0.custom_blocks_by_id.insert(id, new_block);
+ self.block_map
+ .custom_blocks
+ .insert(block_ix, new_block.clone());
+ self.block_map.custom_blocks_by_id.insert(id, new_block);
edits = edits.compose([Edit {
old: start_row..end_row,
@@ -1140,18 +1482,30 @@ impl BlockMapWriter<'_> {
}]);
}
- self.0.sync(wrap_snapshot, edits);
+ let default_patch = Patch::default();
+ let (companion_snapshot, companion) = self
+ .companion
+ .as_ref()
+ .map(|companion| {
+ (
+ (companion.snapshot, &default_patch),
+ (companion.companion, companion.entity),
+ )
+ })
+ .unzip();
+ self.block_map
+ .sync(wrap_snapshot, edits, companion_snapshot, companion);
ids
}
#[ztracing::instrument(skip_all)]
pub fn resize(&mut self, mut heights: HashMap<CustomBlockId, u32>) {
- let wrap_snapshot = &*self.0.wrap_snapshot.borrow();
+ let wrap_snapshot = &*self.block_map.wrap_snapshot.borrow();
let buffer = wrap_snapshot.buffer_snapshot();
let mut edits = Patch::default();
let mut last_block_buffer_row = None;
- for block in &mut self.0.custom_blocks {
+ for block in &mut self.block_map.custom_blocks {
if let Some(new_height) = heights.remove(&block.id) {
if let BlockPlacement::Replace(_) = &block.placement {
debug_assert!(new_height > 0);
@@ -1168,7 +1522,9 @@ impl BlockMapWriter<'_> {
};
let new_block = Arc::new(new_block);
*block = new_block.clone();
- self.0.custom_blocks_by_id.insert(block.id, new_block);
+ self.block_map
+ .custom_blocks_by_id
+ .insert(block.id, new_block);
let start_row = block.placement.start().to_point(buffer).row;
let end_row = block.placement.end().to_point(buffer).row;
@@ -1194,17 +1550,29 @@ impl BlockMapWriter<'_> {
}
}
- self.0.sync(wrap_snapshot, edits);
+ let default_patch = Patch::default();
+ let (companion_snapshot, companion) = self
+ .companion
+ .as_ref()
+ .map(|companion| {
+ (
+ (companion.snapshot, &default_patch),
+ (companion.companion, companion.entity),
+ )
+ })
+ .unzip();
+ self.block_map
+ .sync(wrap_snapshot, edits, companion_snapshot, companion);
}
#[ztracing::instrument(skip_all)]
pub fn remove(&mut self, block_ids: HashSet<CustomBlockId>) {
- let wrap_snapshot = &*self.0.wrap_snapshot.borrow();
+ let wrap_snapshot = &*self.block_map.wrap_snapshot.borrow();
let buffer = wrap_snapshot.buffer_snapshot();
let mut edits = Patch::default();
let mut last_block_buffer_row = None;
let mut previous_wrap_row_range: Option<Range<WrapRow>> = None;
- self.0.custom_blocks.retain(|block| {
+ self.block_map.custom_blocks.retain(|block| {
if block_ids.contains(&block.id) {
let start = block.placement.start().to_point(buffer);
let end = block.placement.end().to_point(buffer);
@@ -1237,10 +1605,23 @@ impl BlockMapWriter<'_> {
true
}
});
- self.0
+ self.block_map
.custom_blocks_by_id
.retain(|id, _| !block_ids.contains(id));
- self.0.sync(wrap_snapshot, edits);
+ let default_patch = Patch::default();
+ let (companion_snapshot, companion) = self
+ .companion
+ .as_ref()
+ .map(|companion| {
+ (
+ (companion.snapshot, &default_patch),
+ (companion.companion, companion.entity),
+ )
+ })
+ .unzip();
+
+ self.block_map
+ .sync(wrap_snapshot, edits, companion_snapshot, companion);
}
#[ztracing::instrument(skip_all)]
@@ -1249,7 +1630,7 @@ impl BlockMapWriter<'_> {
ranges: impl IntoIterator<Item = Range<MultiBufferOffset>>,
inclusive: bool,
) {
- let wrap_snapshot = self.0.wrap_snapshot.borrow();
+ let wrap_snapshot = self.block_map.wrap_snapshot.borrow();
let mut blocks_to_remove = HashSet::default();
for range in ranges {
for block in self.blocks_intersecting_buffer_range(range, inclusive) {
@@ -1263,7 +1644,9 @@ impl BlockMapWriter<'_> {
}
pub fn disable_header_for_buffer(&mut self, buffer_id: BufferId) {
- self.0.buffers_with_disabled_headers.insert(buffer_id);
+ self.block_map
+ .buffers_with_disabled_headers
+ .insert(buffer_id);
}
#[ztracing::instrument(skip_all)]
@@ -1297,16 +1680,16 @@ impl BlockMapWriter<'_> {
let mut ranges = Vec::new();
for buffer_id in buffer_ids {
if fold {
- self.0.folded_buffers.insert(buffer_id);
+ self.block_map.folded_buffers.insert(buffer_id);
} else {
- self.0.folded_buffers.remove(&buffer_id);
+ self.block_map.folded_buffers.remove(&buffer_id);
}
ranges.extend(multi_buffer.excerpt_ranges_for_buffer(buffer_id, cx));
}
ranges.sort_unstable_by_key(|range| range.start);
let mut edits = Patch::default();
- let wrap_snapshot = self.0.wrap_snapshot.borrow().clone();
+ let wrap_snapshot = self.block_map.wrap_snapshot.borrow().clone();
for range in ranges {
let last_edit_row = cmp::min(
wrap_snapshot.make_wrap_point(range.end, Bias::Right).row() + WrapRow(1),
@@ -1319,7 +1702,19 @@ impl BlockMapWriter<'_> {
});
}
- self.0.sync(&wrap_snapshot, edits);
+ let default_patch = Patch::default();
+ let (companion_snapshot, companion) = self
+ .companion
+ .as_ref()
+ .map(|companion| {
+ (
+ (companion.snapshot, &default_patch),
+ (companion.companion, companion.entity),
+ )
+ })
+ .unzip();
+ self.block_map
+ .sync(&wrap_snapshot, edits, companion_snapshot, companion);
}
#[ztracing::instrument(skip_all)]
@@ -1331,27 +1726,28 @@ impl BlockMapWriter<'_> {
if range.is_empty() && !inclusive {
return &[];
}
- let wrap_snapshot = self.0.wrap_snapshot.borrow();
+ let wrap_snapshot = self.block_map.wrap_snapshot.borrow();
let buffer = wrap_snapshot.buffer_snapshot();
- let start_block_ix = match self.0.custom_blocks.binary_search_by(|block| {
+ let start_block_ix = match self.block_map.custom_blocks.binary_search_by(|block| {
let block_end = block.end().to_offset(buffer);
block_end.cmp(&range.start).then(Ordering::Greater)
}) {
Ok(ix) | Err(ix) => ix,
};
- let end_block_ix = match self.0.custom_blocks[start_block_ix..].binary_search_by(|block| {
- let block_start = block.start().to_offset(buffer);
- block_start.cmp(&range.end).then(if inclusive {
- Ordering::Less
- } else {
- Ordering::Greater
- })
- }) {
- Ok(ix) | Err(ix) => ix,
- };
+ let end_block_ix =
+ match self.block_map.custom_blocks[start_block_ix..].binary_search_by(|block| {
+ let block_start = block.start().to_offset(buffer);
+ block_start.cmp(&range.end).then(if inclusive {
+ Ordering::Less
+ } else {
+ Ordering::Greater
+ })
+ }) {
+ Ok(ix) | Err(ix) => ix,
+ };
- &self.0.custom_blocks[start_block_ix..][..end_block_ix]
+ &self.block_map.custom_blocks[start_block_ix..][..end_block_ix]
}
}
@@ -1507,6 +1903,7 @@ impl BlockSnapshot {
BlockId::FoldedBuffer(excerpt_id) => self
.wrap_snapshot
.make_wrap_point(buffer.range_for_excerpt(excerpt_id)?.start, Bias::Left),
+ BlockId::Spacer(_) => return None,
};
let wrap_row = wrap_point.row();
@@ -2064,9 +2461,13 @@ fn offset_for_row(s: &str, target: RowDelta) -> (RowDelta, usize) {
mod tests {
use super::*;
use crate::{
- display_map::{fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap, wrap_map::WrapMap},
+ display_map::{
+ Companion, fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap, wrap_map::WrapMap,
+ },
+ split::{convert_lhs_rows_to_rhs, convert_rhs_rows_to_lhs},
test::test_font,
};
+ use buffer_diff::BufferDiff;
use gpui::{App, AppContext as _, Element, div, font, px};
use itertools::Itertools;
use language::{Buffer, Capability};
@@ -2118,7 +2519,7 @@ mod tests {
cx.update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), px(14.0), None, cx));
let mut block_map = BlockMap::new(wraps_snapshot.clone(), 1, 1);
- let mut writer = block_map.write(wraps_snapshot.clone(), Default::default());
+ let mut writer = block_map.write(wraps_snapshot.clone(), Default::default(), None, None);
let block_ids = writer.insert(vec![
BlockProperties {
style: BlockStyle::Fixed,
@@ -2143,7 +2544,7 @@ mod tests {
},
]);
- let snapshot = block_map.read(wraps_snapshot, Default::default());
+ let snapshot = block_map.read(wraps_snapshot, Default::default(), None, None);
assert_eq!(snapshot.text(), "aaa\n\n\n\nbbb\nccc\nddd\n\n\n");
let blocks = snapshot
@@ -2268,7 +2669,7 @@ mod tests {
let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
wrap_map.sync(tab_snapshot, tab_edits, cx)
});
- let snapshot = block_map.read(wraps_snapshot, wrap_edits);
+ let snapshot = block_map.read(wraps_snapshot, wrap_edits, None, None);
assert_eq!(snapshot.text(), "aaa\n\nb!!!\n\n\nbb\nccc\nddd\n\n\n");
}
@@ -2321,7 +2722,7 @@ mod tests {
let (_, wraps_snapshot) = WrapMap::new(tab_snapshot, font, font_size, Some(wrap_width), cx);
let block_map = BlockMap::new(wraps_snapshot.clone(), 1, 1);
- let snapshot = block_map.read(wraps_snapshot, Default::default());
+ let snapshot = block_map.read(wraps_snapshot, Default::default(), None, None);
// Each excerpt has a header above and footer below. Excerpts are also *separated* by a newline.
assert_eq!(snapshot.text(), "\nBuff\ner 1\n\nBuff\ner 2\n\nBuff\ner 3");
@@ -2356,7 +2757,7 @@ mod tests {
cx.update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), px(14.0), None, cx));
let mut block_map = BlockMap::new(wraps_snapshot.clone(), 1, 1);
- let mut writer = block_map.write(wraps_snapshot.clone(), Default::default());
+ let mut writer = block_map.write(wraps_snapshot.clone(), Default::default(), None, None);
let block_ids = writer.insert(vec![
BlockProperties {
style: BlockStyle::Fixed,
@@ -2382,59 +2783,64 @@ mod tests {
]);
{
- let snapshot = block_map.read(wraps_snapshot.clone(), Default::default());
+ let snapshot = block_map.read(wraps_snapshot.clone(), Default::default(), None, None);
assert_eq!(snapshot.text(), "aaa\n\n\n\nbbb\nccc\nddd\n\n\n");
- let mut block_map_writer = block_map.write(wraps_snapshot.clone(), Default::default());
+ let mut block_map_writer =
+ block_map.write(wraps_snapshot.clone(), Default::default(), None, None);
let mut new_heights = HashMap::default();
new_heights.insert(block_ids[0], 2);
block_map_writer.resize(new_heights);
- let snapshot = block_map.read(wraps_snapshot.clone(), Default::default());
+ let snapshot = block_map.read(wraps_snapshot.clone(), Default::default(), None, None);
assert_eq!(snapshot.text(), "aaa\n\n\n\n\nbbb\nccc\nddd\n\n\n");
}
{
- let mut block_map_writer = block_map.write(wraps_snapshot.clone(), Default::default());
+ let mut block_map_writer =
+ block_map.write(wraps_snapshot.clone(), Default::default(), None, None);
let mut new_heights = HashMap::default();
new_heights.insert(block_ids[0], 1);
block_map_writer.resize(new_heights);
- let snapshot = block_map.read(wraps_snapshot.clone(), Default::default());
+ let snapshot = block_map.read(wraps_snapshot.clone(), Default::default(), None, None);
assert_eq!(snapshot.text(), "aaa\n\n\n\nbbb\nccc\nddd\n\n\n");
}
{
- let mut block_map_writer = block_map.write(wraps_snapshot.clone(), Default::default());
+ let mut block_map_writer =
+ block_map.write(wraps_snapshot.clone(), Default::default(), None, None);
let mut new_heights = HashMap::default();
new_heights.insert(block_ids[0], 0);
block_map_writer.resize(new_heights);
- let snapshot = block_map.read(wraps_snapshot.clone(), Default::default());
+ let snapshot = block_map.read(wraps_snapshot.clone(), Default::default(), None, None);
assert_eq!(snapshot.text(), "aaa\n\n\nbbb\nccc\nddd\n\n\n");
}
{
- let mut block_map_writer = block_map.write(wraps_snapshot.clone(), Default::default());
+ let mut block_map_writer =
+ block_map.write(wraps_snapshot.clone(), Default::default(), None, None);
let mut new_heights = HashMap::default();
new_heights.insert(block_ids[0], 3);
block_map_writer.resize(new_heights);
- let snapshot = block_map.read(wraps_snapshot.clone(), Default::default());
+ let snapshot = block_map.read(wraps_snapshot.clone(), Default::default(), None, None);
assert_eq!(snapshot.text(), "aaa\n\n\n\n\n\nbbb\nccc\nddd\n\n\n");
}
{
- let mut block_map_writer = block_map.write(wraps_snapshot.clone(), Default::default());
+ let mut block_map_writer =
+ block_map.write(wraps_snapshot.clone(), Default::default(), None, None);
let mut new_heights = HashMap::default();
new_heights.insert(block_ids[0], 3);
block_map_writer.resize(new_heights);
- let snapshot = block_map.read(wraps_snapshot, Default::default());
+ let snapshot = block_map.read(wraps_snapshot, Default::default(), None, None);
// Same height as before, should remain the same
assert_eq!(snapshot.text(), "aaa\n\n\n\n\n\nbbb\nccc\nddd\n\n\n");
}
@@ -37,6 +37,7 @@ mod rust_analyzer_ext;
pub mod scroll;
mod selections_collection;
mod split;
+pub mod split_editor_view;
pub mod tasks;
#[cfg(test)]
@@ -57,8 +58,8 @@ pub use editor_settings::{
HideMouseMode, ScrollBeyondLastLine, ScrollbarAxes, SearchSettings, ShowMinimap,
};
pub use element::{
- CursorLayout, EditorElement, HighlightedRange, HighlightedRangeLine, PointForPosition,
- render_breadcrumb_text,
+ CursorLayout, EditorElement, HighlightedRange, HighlightedRangeLine, OverlayPainter,
+ OverlayPainterData, PointForPosition, render_breadcrumb_text,
};
pub use git::blame::BlameRenderer;
pub use hover_popover::hover_markdown_style;
@@ -71,7 +72,8 @@ pub use multi_buffer::{
MultiBufferOffset, MultiBufferOffsetUtf16, MultiBufferSnapshot, PathKey, RowInfo, ToOffset,
ToPoint,
};
-pub use split::SplittableEditor;
+pub use split::{SplittableEditor, ToggleLockedCursors, ToggleSplitDiff};
+pub use split_editor_view::SplitEditorView;
pub use text::Bias;
use ::git::{Restore, blame::BlameEntry, commit::ParsedCommitMessage, status::FileStatus};
@@ -1310,6 +1312,10 @@ pub struct Editor {
folding_newlines: Task<()>,
select_next_is_case_sensitive: Option<bool>,
pub lookup_key: Option<Box<dyn Any + Send + Sync>>,
+ scroll_companion: Option<WeakEntity<Editor>>,
+ on_local_selections_changed:
+ Option<Box<dyn Fn(Point, &mut Window, &mut Context<Self>) + 'static>>,
+ suppress_selection_callback: bool,
applicable_language_settings: HashMap<Option<LanguageName>, LanguageSettings>,
accent_data: Option<AccentData>,
fetched_tree_sitter_chunks: HashMap<ExcerptId, HashSet<Range<BufferRow>>>,
@@ -1808,7 +1814,7 @@ pub(crate) struct FocusedBlock {
}
#[derive(Clone, Debug)]
-enum JumpData {
+pub enum JumpData {
MultiBufferRow {
row: MultiBufferRow,
line_offset_from_top: u32,
@@ -2515,6 +2521,9 @@ impl Editor {
folding_newlines: Task::ready(()),
lookup_key: None,
select_next_is_case_sensitive: None,
+ scroll_companion: None,
+ on_local_selections_changed: None,
+ suppress_selection_callback: false,
applicable_language_settings: HashMap::default(),
accent_data: None,
fetched_tree_sitter_chunks: HashMap::default(),
@@ -3522,6 +3531,14 @@ impl Editor {
}
self.blink_manager.update(cx, BlinkManager::pause_blinking);
+
+ if local && !self.suppress_selection_callback {
+ if let Some(callback) = self.on_local_selections_changed.as_ref() {
+ let cursor_position = self.selections.newest::<Point>(&display_map).head();
+ callback(cursor_position, window, cx);
+ }
+ }
+
cx.emit(EditorEvent::SelectionsChanged { local });
let selections = &self.selections.disjoint_anchors_arc();
@@ -5544,7 +5561,7 @@ impl Editor {
Bias::Left,
);
multi_buffer_snapshot
- .range_to_buffer_ranges(multi_buffer_visible_start..multi_buffer_visible_end)
+ .range_to_buffer_ranges(multi_buffer_visible_start..=multi_buffer_visible_end)
.into_iter()
.filter(|(_, excerpt_visible_range, _)| !excerpt_visible_range.is_empty())
.filter_map(|(buffer, excerpt_visible_range, excerpt_id)| {
@@ -7346,7 +7363,9 @@ impl Editor {
}
let match_task = cx.background_spawn(async move {
let buffer_ranges = multi_buffer_snapshot
- .range_to_buffer_ranges(multi_buffer_range_to_query)
+ .range_to_buffer_ranges(
+ multi_buffer_range_to_query.start..=multi_buffer_range_to_query.end,
+ )
.into_iter()
.filter(|(_, excerpt_visible_range, _)| !excerpt_visible_range.is_empty());
let mut match_ranges = Vec::new();
@@ -8522,7 +8541,7 @@ impl Editor {
..snapshot.display_point_to_point(DisplayPoint::new(range.end, 0), Bias::Right);
for (buffer_snapshot, range, excerpt_id) in
- multi_buffer_snapshot.range_to_buffer_ranges(range)
+ multi_buffer_snapshot.range_to_buffer_ranges(range.start..=range.end)
{
let Some(buffer) = project
.read(cx)
@@ -18743,7 +18762,7 @@ impl Editor {
BTreeMap::new();
for selection_range in selection_ranges {
for (buffer, buffer_range, _) in
- snapshot.range_to_buffer_ranges(selection_range)
+ snapshot.range_to_buffer_ranges(selection_range.start..=selection_range.end)
{
let buffer_id = buffer.remote_id();
let start = buffer.anchor_before(buffer_range.start);
@@ -20779,7 +20798,6 @@ impl Editor {
pub fn set_soft_wrap_mode(
&mut self,
mode: language_settings::SoftWrap,
-
cx: &mut Context<Self>,
) {
self.soft_wrap_mode_override = Some(mode);
@@ -21021,6 +21039,25 @@ impl Editor {
self.delegate_expand_excerpts = delegate;
}
+ pub fn set_scroll_companion(&mut self, companion: Option<WeakEntity<Editor>>) {
+ self.scroll_companion = companion;
+ }
+
+ pub fn scroll_companion(&self) -> Option<&WeakEntity<Editor>> {
+ self.scroll_companion.as_ref()
+ }
+
+ pub fn set_on_local_selections_changed(
+ &mut self,
+ callback: Option<Box<dyn Fn(Point, &mut Window, &mut Context<Self>) + 'static>>,
+ ) {
+ self.on_local_selections_changed = callback;
+ }
+
+ pub fn set_suppress_selection_callback(&mut self, suppress: bool) {
+ self.suppress_selection_callback = suppress;
+ }
+
pub fn set_show_git_diff_gutter(&mut self, show_git_diff_gutter: bool, cx: &mut Context<Self>) {
self.show_git_diff_gutter = Some(show_git_diff_gutter);
cx.notify();
@@ -22524,7 +22561,8 @@ impl Editor {
let multi_buffer = self.buffer().read(cx);
let multi_buffer_snapshot = multi_buffer.snapshot(cx);
- let buffer_ranges = multi_buffer_snapshot.range_to_buffer_ranges(selection_range);
+ let buffer_ranges = multi_buffer_snapshot
+ .range_to_buffer_ranges(selection_range.start..=selection_range.end);
let (buffer, range, _) = if selection.reversed {
buffer_ranges.first()
@@ -22542,15 +22580,19 @@ impl Editor {
};
let buffer_diff_snapshot = buffer_diff.read(cx).snapshot(cx);
+ let (mut translated, _, _) = buffer_diff_snapshot.points_to_base_text_points(
+ [
+ Point::new(start_row_in_buffer, 0),
+ Point::new(end_row_in_buffer, 0),
+ ],
+ buffer,
+ );
+ let start_row = translated.next().unwrap().start.row;
+ let end_row = translated.next().unwrap().end.row;
Some((
multi_buffer.buffer(buffer.remote_id()).unwrap(),
- buffer_diff_snapshot.row_to_base_text_row(start_row_in_buffer, Bias::Left, buffer)
- ..buffer_diff_snapshot.row_to_base_text_row(
- end_row_in_buffer,
- Bias::Left,
- buffer,
- ),
+ start_row..end_row,
))
});
@@ -23839,7 +23881,7 @@ impl Editor {
self.open_excerpts_common(None, false, window, cx)
}
- fn open_excerpts_common(
+ pub(crate) fn open_excerpts_common(
&mut self,
jump_data: Option<JumpData>,
split: bool,
@@ -24331,6 +24373,7 @@ impl Editor {
modifiers: window.modifiers(),
},
&position_map,
+ None,
window,
cx,
);
@@ -24794,7 +24837,7 @@ impl Editor {
self.active_diagnostics == ActiveDiagnostic::All || !self.mode().is_full()
}
- fn create_style(&self, cx: &App) -> EditorStyle {
+ pub(crate) fn create_style(&self, cx: &App) -> EditorStyle {
let settings = ThemeSettings::get_global(cx);
let mut text_style = match self.mode {
@@ -25480,7 +25523,10 @@ impl NewlineConfig {
buffer: &MultiBufferSnapshot,
range: Range<MultiBufferOffset>,
) -> bool {
- let (buffer, range) = match buffer.range_to_buffer_ranges(range).as_slice() {
+ let (buffer, range) = match buffer
+ .range_to_buffer_ranges(range.start..=range.end)
+ .as_slice()
+ {
[(buffer, range, _)] => (*buffer, range.clone()),
_ => return false,
};
@@ -49,7 +49,8 @@ use gpui::{
Pixels, PressureStage, ScrollDelta, ScrollHandle, ScrollWheelEvent, ShapedLine, SharedString,
Size, StatefulInteractiveElement, Style, Styled, StyledText, TextAlign, TextRun,
TextStyleRefinement, WeakEntity, Window, anchored, deferred, div, fill, linear_color_stop,
- linear_gradient, outline, point, px, quad, relative, size, solid_background, transparent_black,
+ linear_gradient, outline, pattern_slash, point, px, quad, relative, rgba, size,
+ solid_background, transparent_black,
};
use itertools::Itertools;
use language::{IndentGuideSettings, language_settings::ShowWhitespaceSetting};
@@ -191,9 +192,29 @@ struct RenderBlocksOutput {
resized_blocks: Option<HashMap<CustomBlockId, u32>>,
}
+/// Data passed to overlay painters during the paint phase.
+pub struct OverlayPainterData<'a> {
+ pub editor: &'a Entity<Editor>,
+ pub snapshot: &'a EditorSnapshot,
+ pub scroll_position: gpui::Point<ScrollOffset>,
+ pub line_height: Pixels,
+ pub visible_row_range: Range<DisplayRow>,
+ pub hitbox: &'a Hitbox,
+}
+
+pub type OverlayPainter = Box<dyn FnOnce(OverlayPainterData<'_>, &mut Window, &mut App)>;
+
pub struct EditorElement {
editor: Entity<Editor>,
style: EditorStyle,
+ split_side: Option<SplitSide>,
+ overlay_painter: Option<OverlayPainter>,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum SplitSide {
+ Left,
+ Right,
}
impl EditorElement {
@@ -203,9 +224,23 @@ impl EditorElement {
Self {
editor: editor.clone(),
style,
+ split_side: None,
+ overlay_painter: None,
}
}
+ pub fn set_split_side(&mut self, side: SplitSide) {
+ self.split_side = Some(side);
+ }
+
+ pub fn set_overlay_painter(&mut self, painter: OverlayPainter) {
+ self.overlay_painter = Some(painter);
+ }
+
+ fn should_show_buffer_headers(&self) -> bool {
+ self.split_side.is_none()
+ }
+
fn register_actions(&self, window: &mut Window, cx: &mut App) {
let editor = &self.editor;
editor.update(cx, |editor, cx| {
@@ -1201,6 +1236,7 @@ impl EditorElement {
editor: &mut Editor,
event: &MouseMoveEvent,
position_map: &PositionMap,
+ split_side: Option<SplitSide>,
window: &mut Window,
cx: &mut Context<Editor>,
) {
@@ -1328,7 +1364,10 @@ impl EditorElement {
indicator.is_active && indicator.display_row == valid_point.row()
});
- let breakpoint_indicator = if gutter_hovered && !is_on_diff_review_button_row {
+ let breakpoint_indicator = if gutter_hovered
+ && !is_on_diff_review_button_row
+ && split_side != Some(SplitSide::Left)
+ {
let buffer_anchor = position_map
.snapshot
.display_point_to_anchor(valid_point, Bias::Left);
@@ -1908,6 +1947,10 @@ impl EditorElement {
window: &mut Window,
cx: &mut App,
) -> Option<EditorScrollbars> {
+ if self.split_side == Some(SplitSide::Left) {
+ return None;
+ }
+
let show_scrollbars = self.editor.read(cx).show_scrollbars;
if (!show_scrollbars.horizontal && !show_scrollbars.vertical)
|| self.style.scrollbar_width.is_zero()
@@ -2466,6 +2509,11 @@ impl EditorElement {
window: &mut Window,
cx: &mut App,
) -> Option<AnyElement> {
+ // Don't show code actions in split diff view
+ if self.split_side.is_some() {
+ return None;
+ }
+
if !snapshot
.show_code_actions
.unwrap_or(EditorSettings::get_global(cx).inline_code_actions)
@@ -3045,6 +3093,10 @@ impl EditorElement {
window: &mut Window,
cx: &mut App,
) -> Vec<AnyElement> {
+ if self.split_side == Some(SplitSide::Left) {
+ return Vec::new();
+ }
+
self.editor.update(cx, |editor, cx| {
breakpoints
.into_iter()
@@ -3145,6 +3197,10 @@ impl EditorElement {
window: &mut Window,
cx: &mut App,
) -> Vec<AnyElement> {
+ if self.split_side == Some(SplitSide::Left) {
+ return Vec::new();
+ }
+
self.editor.update(cx, |editor, cx| {
let active_task_indicator_row =
// TODO: add edit button on the right side of each row in the context menu
@@ -3848,18 +3904,18 @@ impl EditorElement {
height,
..
} => {
- let selected = selected_buffer_ids.contains(&first_excerpt.buffer_id);
- let result = v_flex().id(block_id).w_full().pr(editor_margins.right);
-
- let jump_data = header_jump_data(
- snapshot,
- block_row_start,
- *height,
- first_excerpt,
- latest_selection_anchors,
- );
- result
- .child(self.render_buffer_header(
+ let mut result = v_flex().id(block_id).w_full().pr(editor_margins.right);
+
+ if self.should_show_buffer_headers() {
+ let selected = selected_buffer_ids.contains(&first_excerpt.buffer_id);
+ let jump_data = header_jump_data(
+ snapshot,
+ block_row_start,
+ *height,
+ first_excerpt,
+ latest_selection_anchors,
+ );
+ result = result.child(self.render_buffer_header(
first_excerpt,
true,
selected,
@@ -3867,8 +3923,13 @@ impl EditorElement {
jump_data,
window,
cx,
- ))
- .into_any_element()
+ ));
+ } else {
+ result =
+ result.child(div().h(FILE_HEADER_HEIGHT as f32 * window.line_height()));
+ }
+
+ result.into_any_element()
}
Block::ExcerptBoundary { .. } => {
@@ -3892,22 +3953,27 @@ impl EditorElement {
Block::BufferHeader { excerpt, height } => {
let mut result = v_flex().id(block_id).w_full();
- let jump_data = header_jump_data(
- snapshot,
- block_row_start,
- *height,
- excerpt,
- latest_selection_anchors,
- );
+ if self.should_show_buffer_headers() {
+ let jump_data = header_jump_data(
+ snapshot,
+ block_row_start,
+ *height,
+ excerpt,
+ latest_selection_anchors,
+ );
- if sticky_header_excerpt_id != Some(excerpt.id) {
- let selected = selected_buffer_ids.contains(&excerpt.buffer_id);
+ if sticky_header_excerpt_id != Some(excerpt.id) {
+ let selected = selected_buffer_ids.contains(&excerpt.buffer_id);
- result = result.child(div().pr(editor_margins.right).child(
- self.render_buffer_header(
- excerpt, false, selected, false, jump_data, window, cx,
- ),
- ));
+ result = result.child(div().pr(editor_margins.right).child(
+ self.render_buffer_header(
+ excerpt, false, selected, false, jump_data, window, cx,
+ ),
+ ));
+ } else {
+ result =
+ result.child(div().h(FILE_HEADER_HEIGHT as f32 * window.line_height()));
+ }
} else {
result =
result.child(div().h(FILE_HEADER_HEIGHT as f32 * window.line_height()));
@@ -3915,6 +3981,13 @@ impl EditorElement {
result.into_any()
}
+
+ Block::Spacer { height, .. } => div()
+ .id(block_id)
+ .w_full()
+ .h((*height as f32) * line_height)
+ .bg(pattern_slash(rgba(0xFFFFFF10), 8.0, 8.0))
+ .into_any(),
};
// Discover the element's content height, then round up to the nearest multiple of line height.
@@ -6613,7 +6686,7 @@ impl EditorElement {
GitGutterSetting::TrackedFiles
)
});
- if show_git_gutter {
+ if show_git_gutter && self.split_side.is_none() {
Self::paint_gutter_diff_hunks(layout, window, cx)
}
@@ -7949,6 +8022,7 @@ impl EditorElement {
window.on_mouse_event({
let position_map = layout.position_map.clone();
let editor = self.editor.clone();
+ let split_side = self.split_side;
move |event: &MouseMoveEvent, phase, window, cx| {
if phase == DispatchPhase::Bubble {
@@ -7962,7 +8036,7 @@ impl EditorElement {
Self::mouse_dragged(editor, event, &position_map, window, cx)
}
- Self::mouse_moved(editor, event, &position_map, window, cx)
+ Self::mouse_moved(editor, event, &position_map, split_side, window, cx)
});
}
}
@@ -8013,7 +8087,7 @@ impl EditorElement {
}
let buffer_snapshot = &display_snapshot.buffer_snapshot();
for (buffer, buffer_range, excerpt_id) in
- buffer_snapshot.range_to_buffer_ranges(anchor_range)
+ buffer_snapshot.range_to_buffer_ranges(anchor_range.start..=anchor_range.end)
{
let buffer_range =
buffer.anchor_after(buffer_range.start)..buffer.anchor_before(buffer_range.end);
@@ -8264,7 +8338,7 @@ fn file_status_label_color(file_status: Option<FileStatus>) -> Color {
})
}
-fn header_jump_data(
+pub(crate) fn header_jump_data(
editor_snapshot: &EditorSnapshot,
block_row_start: DisplayRow,
height: u32,
@@ -9528,6 +9602,38 @@ impl Element for EditorElement {
}
};
+ // When jumping from one side of a side-by-side diff to the
+ // other, we autoscroll autoscroll to keep the target range in view.
+ //
+ // If our scroll companion has a pending autoscroll request, process it
+ // first so that both editors render with synchronized scroll positions.
+ // This is important for split diff views where one editor may prepaint
+ // before the other.
+ if let Some(companion) = self
+ .editor
+ .read(cx)
+ .scroll_companion()
+ .and_then(|c| c.upgrade())
+ {
+ if companion.read(cx).scroll_manager.has_autoscroll_request() {
+ companion.update(cx, |companion_editor, cx| {
+ let companion_autoscroll_request =
+ companion_editor.scroll_manager.take_autoscroll_request();
+ companion_editor.autoscroll_vertically(
+ bounds,
+ line_height,
+ max_scroll_top,
+ companion_autoscroll_request,
+ window,
+ cx,
+ );
+ });
+ snapshot = self
+ .editor
+ .update(cx, |editor, cx| editor.snapshot(window, cx));
+ }
+ }
+
let (
autoscroll_request,
autoscroll_containing_element,
@@ -10070,23 +10176,27 @@ impl Element for EditorElement {
}
}
- let sticky_buffer_header = sticky_header_excerpt.map(|sticky_header_excerpt| {
- window.with_element_namespace("blocks", |window| {
- self.layout_sticky_buffer_header(
- sticky_header_excerpt,
- scroll_position,
- line_height,
- right_margin,
- &snapshot,
- &hitbox,
- &selected_buffer_ids,
- &blocks,
- &latest_selection_anchors,
- window,
- cx,
- )
+ let sticky_buffer_header = if self.should_show_buffer_headers() {
+ sticky_header_excerpt.map(|sticky_header_excerpt| {
+ window.with_element_namespace("blocks", |window| {
+ self.layout_sticky_buffer_header(
+ sticky_header_excerpt,
+ scroll_position,
+ line_height,
+ right_margin,
+ &snapshot,
+ &hitbox,
+ &selected_buffer_ids,
+ &blocks,
+ &latest_selection_anchors,
+ window,
+ cx,
+ )
+ })
})
- });
+ } else {
+ None
+ };
let start_buffer_row =
MultiBufferRow(start_anchor.to_point(&snapshot.buffer_snapshot()).row);
@@ -10753,6 +10863,18 @@ impl Element for EditorElement {
self.paint_scrollbars(layout, window, cx);
self.paint_edit_prediction_popover(layout, window, cx);
self.paint_mouse_context_menu(layout, window, cx);
+
+ if let Some(overlay_painter) = self.overlay_painter.take() {
+ let data = OverlayPainterData {
+ editor: &self.editor,
+ snapshot: &layout.position_map.snapshot,
+ scroll_position: layout.position_map.snapshot.scroll_position(),
+ line_height: layout.position_map.line_height,
+ visible_row_range: layout.visible_display_row_range.clone(),
+ hitbox: &layout.hitbox,
+ };
+ overlay_painter(data, window, cx);
+ }
});
})
})
@@ -11563,15 +11685,15 @@ impl PositionMap {
}
}
-struct BlockLayout {
- id: BlockId,
- x_offset: Pixels,
- row: Option<DisplayRow>,
- element: AnyElement,
- available_space: Size<AvailableSpace>,
- style: BlockStyle,
- overlaps_gutter: bool,
- is_buffer_header: bool,
+pub(crate) struct BlockLayout {
+ pub(crate) id: BlockId,
+ pub(crate) x_offset: Pixels,
+ pub(crate) row: Option<DisplayRow>,
+ pub(crate) element: AnyElement,
+ pub(crate) available_space: Size<AvailableSpace>,
+ pub(crate) style: BlockStyle,
+ pub(crate) overlaps_gutter: bool,
+ pub(crate) is_buffer_header: bool,
}
pub fn layout_line(
@@ -365,6 +365,10 @@ impl ScrollManager {
self.show_scrollbars
}
+ pub fn has_autoscroll_request(&self) -> bool {
+ self.autoscroll_request.is_some()
+ }
+
pub fn take_autoscroll_request(&mut self) -> Option<(Autoscroll, bool)> {
self.autoscroll_request.take()
}
@@ -588,14 +592,30 @@ impl Editor {
cx: &mut Context<Self>,
) -> WasScrolled {
let map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
- self.set_scroll_position_taking_display_map(
+ let was_scrolled = self.set_scroll_position_taking_display_map(
scroll_position,
local,
autoscroll,
map,
window,
cx,
- )
+ );
+
+ if local && was_scrolled.0 {
+ if let Some(companion) = self.scroll_companion.as_ref().and_then(|c| c.upgrade()) {
+ companion.update(cx, |companion_editor, cx| {
+ companion_editor.set_scroll_position_internal(
+ scroll_position,
+ false,
+ false,
+ window,
+ cx,
+ );
+ });
+ }
+ }
+
+ was_scrolled
}
fn set_scroll_position_taking_display_map(
@@ -1,25 +1,241 @@
-use std::ops::Range;
+use std::ops::{Bound, Range};
-use buffer_diff::BufferDiff;
+use buffer_diff::{BufferDiff, BufferDiffSnapshot};
use collections::HashMap;
use feature_flags::{FeatureFlag, FeatureFlagAppExt as _};
use gpui::{
Action, AppContext as _, Entity, EventEmitter, Focusable, NoAction, Subscription, WeakEntity,
};
use language::{Buffer, Capability};
-use multi_buffer::{Anchor, ExcerptId, ExcerptRange, ExpandExcerptDirection, MultiBuffer, PathKey};
+use multi_buffer::{
+ Anchor, BufferOffset, ExcerptId, ExcerptRange, ExpandExcerptDirection, MultiBuffer,
+ MultiBufferPoint, MultiBufferSnapshot, PathKey,
+};
use project::Project;
use rope::Point;
-use text::{Bias, OffsetRangeExt as _};
+use text::{OffsetRangeExt as _, ToPoint as _};
use ui::{
App, Context, InteractiveElement as _, IntoElement as _, ParentElement as _, Render,
Styled as _, Window, div,
};
+
+use crate::{
+ display_map::MultiBufferRowMapping,
+ split_editor_view::{SplitEditorState, SplitEditorView},
+};
use workspace::{
- ActivePaneDecorator, Item, ItemHandle, Pane, PaneGroup, SplitDirection, Workspace,
+ ActivatePaneLeft, ActivatePaneRight, Item, ItemHandle, Pane, PaneGroup, SplitDirection,
+ Workspace,
+};
+
+use crate::{
+ Autoscroll, DisplayMap, Editor, EditorEvent, ToggleCodeActions, ToggleSoftWrap,
+ actions::{DisableBreakpoint, EditLogBreakpoint, EnableBreakpoint, ToggleBreakpoint},
+ display_map::Companion,
};
+use zed_actions::assistant::InlineAssist;
+
+pub(crate) fn convert_lhs_rows_to_rhs(
+ lhs_excerpt_to_rhs_excerpt: &HashMap<ExcerptId, ExcerptId>,
+ rhs_snapshot: &MultiBufferSnapshot,
+ lhs_snapshot: &MultiBufferSnapshot,
+ lhs_bounds: (Bound<MultiBufferPoint>, Bound<MultiBufferPoint>),
+) -> Vec<MultiBufferRowMapping> {
+ convert_rows(
+ lhs_excerpt_to_rhs_excerpt,
+ lhs_snapshot,
+ rhs_snapshot,
+ lhs_bounds,
+ |diff, points, buffer| {
+ let (points, first_group, prev_boundary) =
+ diff.base_text_points_to_points(points, buffer);
+ (points.collect(), first_group, prev_boundary)
+ },
+ )
+}
+
+pub(crate) fn convert_rhs_rows_to_lhs(
+ rhs_excerpt_to_lhs_excerpt: &HashMap<ExcerptId, ExcerptId>,
+ lhs_snapshot: &MultiBufferSnapshot,
+ rhs_snapshot: &MultiBufferSnapshot,
+ rhs_bounds: (Bound<MultiBufferPoint>, Bound<MultiBufferPoint>),
+) -> Vec<MultiBufferRowMapping> {
+ convert_rows(
+ rhs_excerpt_to_lhs_excerpt,
+ rhs_snapshot,
+ lhs_snapshot,
+ rhs_bounds,
+ |diff, points, buffer| {
+ let (points, first_group, prev_boundary) =
+ diff.points_to_base_text_points(points, buffer);
+ (points.collect(), first_group, prev_boundary)
+ },
+ )
+}
-use crate::{Editor, EditorEvent};
+fn convert_rows<F>(
+ excerpt_map: &HashMap<ExcerptId, ExcerptId>,
+ source_snapshot: &MultiBufferSnapshot,
+ target_snapshot: &MultiBufferSnapshot,
+ source_bounds: (Bound<MultiBufferPoint>, Bound<MultiBufferPoint>),
+ translate_fn: F,
+) -> Vec<MultiBufferRowMapping>
+where
+ F: Fn(
+ &BufferDiffSnapshot,
+ Vec<Point>,
+ &text::BufferSnapshot,
+ ) -> (
+ Vec<Range<Point>>,
+ Option<Range<Point>>,
+ Option<(Point, Range<Point>)>,
+ ),
+{
+ let mut result = Vec::new();
+
+ for (buffer, buffer_offset_range, source_excerpt_id) in
+ source_snapshot.range_to_buffer_ranges(source_bounds)
+ {
+ if let Some(translation) = convert_excerpt_rows(
+ excerpt_map,
+ source_snapshot,
+ target_snapshot,
+ source_excerpt_id,
+ buffer,
+ buffer_offset_range,
+ &translate_fn,
+ ) {
+ result.push(translation);
+ }
+ }
+
+ result
+}
+
+fn convert_excerpt_rows<F>(
+ excerpt_map: &HashMap<ExcerptId, ExcerptId>,
+ source_snapshot: &MultiBufferSnapshot,
+ target_snapshot: &MultiBufferSnapshot,
+ source_excerpt_id: ExcerptId,
+ source_buffer: &text::BufferSnapshot,
+ source_buffer_range: Range<BufferOffset>,
+ translate_fn: F,
+) -> Option<MultiBufferRowMapping>
+where
+ F: Fn(
+ &BufferDiffSnapshot,
+ Vec<Point>,
+ &text::BufferSnapshot,
+ ) -> (
+ Vec<Range<Point>>,
+ Option<Range<Point>>,
+ Option<(Point, Range<Point>)>,
+ ),
+{
+ let target_excerpt_id = excerpt_map.get(&source_excerpt_id).copied()?;
+ let target_buffer = target_snapshot.buffer_for_excerpt(target_excerpt_id)?;
+
+ let diff = source_snapshot.diff_for_buffer_id(source_buffer.remote_id())?;
+ let rhs_buffer = if source_buffer.remote_id() == diff.base_text().remote_id() {
+ &target_buffer
+ } else {
+ source_buffer
+ };
+
+ let local_start = source_buffer.offset_to_point(source_buffer_range.start.0);
+ let local_end = source_buffer.offset_to_point(source_buffer_range.end.0);
+
+ let mut input_points: Vec<Point> = (local_start.row..=local_end.row)
+ .map(|row| Point::new(row, 0))
+ .collect();
+ if local_end.column > 0 {
+ input_points.push(local_end);
+ }
+
+ let (translated_ranges, first_group, prev_boundary) =
+ translate_fn(&diff, input_points.clone(), rhs_buffer);
+
+ let source_multibuffer_range = source_snapshot.range_for_excerpt(source_excerpt_id)?;
+ let source_excerpt_start_in_multibuffer = source_multibuffer_range.start;
+ let source_context_range = source_snapshot.context_range_for_excerpt(source_excerpt_id)?;
+ let source_excerpt_start_in_buffer = source_context_range.start.to_point(&source_buffer);
+ let source_excerpt_end_in_buffer = source_context_range.end.to_point(&source_buffer);
+ let target_multibuffer_range = target_snapshot.range_for_excerpt(target_excerpt_id)?;
+ let target_excerpt_start_in_multibuffer = target_multibuffer_range.start;
+ let target_context_range = target_snapshot.context_range_for_excerpt(target_excerpt_id)?;
+ let target_excerpt_start_in_buffer = target_context_range.start.to_point(&target_buffer);
+ let target_excerpt_end_in_buffer = target_context_range.end.to_point(&target_buffer);
+
+ let boundaries: Vec<_> = input_points
+ .into_iter()
+ .zip(translated_ranges)
+ .map(|(source_buffer_point, target_range)| {
+ let source_multibuffer_point = source_excerpt_start_in_multibuffer
+ + (source_buffer_point - source_excerpt_start_in_buffer.min(source_buffer_point));
+
+ let clamped_target_start = target_range
+ .start
+ .max(target_excerpt_start_in_buffer)
+ .min(target_excerpt_end_in_buffer);
+ let clamped_target_end = target_range
+ .end
+ .max(target_excerpt_start_in_buffer)
+ .min(target_excerpt_end_in_buffer);
+
+ let target_multibuffer_start = target_excerpt_start_in_multibuffer
+ + (clamped_target_start - target_excerpt_start_in_buffer);
+
+ let target_multibuffer_end = target_excerpt_start_in_multibuffer
+ + (clamped_target_end - target_excerpt_start_in_buffer);
+
+ (
+ source_multibuffer_point,
+ target_multibuffer_start..target_multibuffer_end,
+ )
+ })
+ .collect();
+ let first_group = first_group.map(|first_group| {
+ let start = source_excerpt_start_in_multibuffer
+ + (first_group.start - source_excerpt_start_in_buffer.min(first_group.start));
+ let end = source_excerpt_start_in_multibuffer
+ + (first_group.end - source_excerpt_start_in_buffer.min(first_group.end));
+ start..end
+ });
+
+ let prev_boundary = prev_boundary.map(|(source_buffer_point, target_range)| {
+ let source_multibuffer_point = source_excerpt_start_in_multibuffer
+ + (source_buffer_point - source_excerpt_start_in_buffer.min(source_buffer_point));
+
+ let clamped_target_start = target_range
+ .start
+ .max(target_excerpt_start_in_buffer)
+ .min(target_excerpt_end_in_buffer);
+ let clamped_target_end = target_range
+ .end
+ .max(target_excerpt_start_in_buffer)
+ .min(target_excerpt_end_in_buffer);
+
+ let target_multibuffer_start = target_excerpt_start_in_multibuffer
+ + (clamped_target_start - target_excerpt_start_in_buffer);
+ let target_multibuffer_end = target_excerpt_start_in_multibuffer
+ + (clamped_target_end - target_excerpt_start_in_buffer);
+
+ (
+ source_multibuffer_point,
+ target_multibuffer_start..target_multibuffer_end,
+ )
+ });
+
+ Some(MultiBufferRowMapping {
+ boundaries,
+ first_group,
+ prev_boundary,
+ source_excerpt_end: source_excerpt_start_in_multibuffer
+ + (source_excerpt_end_in_buffer - source_excerpt_start_in_buffer),
+ target_excerpt_end: target_excerpt_start_in_multibuffer
+ + (target_excerpt_end_in_buffer - target_excerpt_start_in_buffer),
+ })
+}
struct SplitDiffFeatureFlag;
@@ -39,12 +255,28 @@ struct SplitDiff;
#[action(namespace = editor)]
struct UnsplitDiff;
+#[derive(Clone, Copy, PartialEq, Eq, Action, Default)]
+#[action(namespace = editor)]
+pub struct ToggleSplitDiff;
+
+#[derive(Clone, Copy, PartialEq, Eq, Action, Default)]
+#[action(namespace = editor)]
+struct JumpToCorrespondingRow;
+
+/// When locked cursors mode is enabled, cursor movements in one editor will
+/// update the cursor position in the other editor to the corresponding row.
+#[derive(Clone, Copy, PartialEq, Eq, Action, Default)]
+#[action(namespace = editor)]
+pub struct ToggleLockedCursors;
+
pub struct SplittableEditor {
primary_multibuffer: Entity<MultiBuffer>,
primary_editor: Entity<Editor>,
secondary: Option<SecondaryEditor>,
panes: PaneGroup,
workspace: WeakEntity<Workspace>,
+ split_state: Entity<SplitEditorState>,
+ locked_cursors: bool,
_subscriptions: Vec<Subscription>,
}
@@ -53,8 +285,6 @@ struct SecondaryEditor {
editor: Entity<Editor>,
pane: Entity<Pane>,
has_latest_selection: bool,
- primary_to_secondary: HashMap<ExcerptId, ExcerptId>,
- secondary_to_primary: HashMap<ExcerptId, ExcerptId>,
_subscriptions: Vec<Subscription>,
}
@@ -63,6 +293,14 @@ impl SplittableEditor {
&self.primary_editor
}
+ pub fn secondary_editor(&self) -> Option<&Entity<Editor>> {
+ self.secondary.as_ref().map(|s| &s.editor)
+ }
+
+ pub fn is_split(&self) -> bool {
+ self.secondary.is_some()
+ }
+
pub fn last_selected_editor(&self) -> &Entity<Editor> {
if let Some(secondary) = &self.secondary
&& secondary.has_latest_selection
@@ -140,12 +378,15 @@ impl SplittableEditor {
.ok();
}
});
+ let split_state = cx.new(|cx| SplitEditorState::new(cx));
Self {
primary_editor,
primary_multibuffer,
secondary: None,
panes,
workspace: workspace.downgrade(),
+ split_state,
+ locked_cursors: false,
_subscriptions: subscriptions,
}
}
@@ -209,10 +450,13 @@ impl SplittableEditor {
lines,
direction,
} => {
- if let Some(secondary) = &this.secondary {
+ if this.secondary.is_some() {
+ let primary_display_map = this.primary_editor.read(cx).display_map.read(cx);
let primary_ids: Vec<_> = excerpt_ids
.iter()
- .filter_map(|id| secondary.secondary_to_primary.get(id).copied())
+ .filter_map(|id| {
+ primary_display_map.companion_excerpt_to_my_excerpt(*id, cx)
+ })
.collect();
this.expand_excerpts(primary_ids.into_iter(), *lines, *direction, cx);
}
@@ -231,27 +475,116 @@ impl SplittableEditor {
multibuffer: secondary_multibuffer,
pane: secondary_pane.clone(),
has_latest_selection: false,
- primary_to_secondary: HashMap::default(),
- secondary_to_primary: HashMap::default(),
_subscriptions: subscriptions,
};
+ let primary_display_map = self.primary_editor.read(cx).display_map.clone();
+ let secondary_display_map = secondary.editor.read(cx).display_map.clone();
+ let rhs_display_map_id = primary_display_map.entity_id();
+
self.primary_editor.update(cx, |editor, cx| {
editor.set_delegate_expand_excerpts(true);
editor.buffer().update(cx, |primary_multibuffer, cx| {
primary_multibuffer.set_show_deleted_hunks(false, cx);
- let paths = primary_multibuffer.paths().cloned().collect::<Vec<_>>();
- for path in paths {
- let Some(excerpt_id) = primary_multibuffer.excerpts_for_path(&path).next()
- else {
- continue;
- };
- let snapshot = primary_multibuffer.snapshot(cx);
- let buffer = snapshot.buffer_for_excerpt(excerpt_id).unwrap();
- let diff = primary_multibuffer.diff_for(buffer.remote_id()).unwrap();
- secondary.sync_path_excerpts(path.clone(), primary_multibuffer, diff, cx);
- }
+ primary_multibuffer.set_use_extended_diff_range(true, cx);
})
});
+
+ let path_diffs: Vec<_> = {
+ let primary_multibuffer = self.primary_multibuffer.read(cx);
+ primary_multibuffer
+ .paths()
+ .filter_map(|path| {
+ let excerpt_id = primary_multibuffer.excerpts_for_path(path).next()?;
+ let snapshot = primary_multibuffer.snapshot(cx);
+ let buffer = snapshot.buffer_for_excerpt(excerpt_id)?;
+ let diff = primary_multibuffer.diff_for(buffer.remote_id())?;
+ Some((path.clone(), diff))
+ })
+ .collect()
+ };
+
+ let mut companion = Companion::new(
+ rhs_display_map_id,
+ convert_rhs_rows_to_lhs,
+ convert_lhs_rows_to_rhs,
+ );
+
+ for (path, diff) in path_diffs {
+ for (lhs, rhs) in secondary.update_path_excerpts_from_primary(
+ path,
+ &self.primary_multibuffer,
+ diff.clone(),
+ cx,
+ ) {
+ companion.add_excerpt_mapping(lhs, rhs);
+ }
+ companion.add_buffer_mapping(
+ diff.read(cx).base_text(cx).remote_id(),
+ diff.read(cx).buffer_id,
+ );
+ }
+
+ let companion = cx.new(|_| companion);
+
+ primary_display_map.update(cx, |dm, cx| {
+ dm.set_companion(
+ Some((secondary_display_map.downgrade(), companion.clone())),
+ cx,
+ );
+ });
+ secondary_display_map.update(cx, |dm, cx| {
+ dm.set_companion(Some((primary_display_map.downgrade(), companion)), cx);
+ });
+
+ let primary_weak = self.primary_editor.downgrade();
+ let secondary_weak = secondary.editor.downgrade();
+
+ let this = cx.entity().downgrade();
+ self.primary_editor.update(cx, |editor, _cx| {
+ editor.set_scroll_companion(Some(secondary_weak));
+ let this = this.clone();
+ editor.set_on_local_selections_changed(Some(Box::new(
+ move |cursor_position, window, cx| {
+ if let Some(this) = this.upgrade() {
+ this.update(cx, |this, cx| {
+ if this.locked_cursors {
+ this.sync_cursor_to_other_side(true, cursor_position, window, cx);
+ }
+ });
+ }
+ },
+ )));
+ });
+ secondary.editor.update(cx, |editor, _cx| {
+ editor.set_scroll_companion(Some(primary_weak));
+ let this = this.clone();
+ editor.set_on_local_selections_changed(Some(Box::new(
+ move |cursor_position, window, cx| {
+ if let Some(this) = this.upgrade() {
+ this.update(cx, |this, cx| {
+ if this.locked_cursors {
+ this.sync_cursor_to_other_side(false, cursor_position, window, cx);
+ }
+ });
+ }
+ },
+ )));
+ });
+
+ let primary_scroll_position = self
+ .primary_editor
+ .update(cx, |editor, cx| editor.scroll_position(cx));
+ secondary.editor.update(cx, |editor, cx| {
+ editor.set_scroll_position_internal(primary_scroll_position, false, false, window, cx);
+ });
+
+ // Copy soft wrap state from primary (source of truth) to secondary
+ let primary_soft_wrap_override = self.primary_editor.read(cx).soft_wrap_mode_override;
+ secondary.editor.update(cx, |editor, cx| {
+ editor.soft_wrap_mode_override = primary_soft_wrap_override;
+ cx.notify();
+ });
+
self.secondary = Some(secondary);
let primary_pane = self.panes.first_pane();
@@ -261,16 +594,282 @@ impl SplittableEditor {
cx.notify();
}
+ fn activate_pane_left(
+ &mut self,
+ _: &ActivatePaneLeft,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ if let Some(secondary) = &mut self.secondary {
+ if !secondary.has_latest_selection {
+ secondary.editor.read(cx).focus_handle(cx).focus(window, cx);
+ secondary.editor.update(cx, |editor, cx| {
+ editor.request_autoscroll(Autoscroll::fit(), cx);
+ });
+ secondary.has_latest_selection = true;
+ cx.notify();
+ } else {
+ cx.propagate();
+ }
+ } else {
+ cx.propagate();
+ }
+ }
+
+ fn activate_pane_right(
+ &mut self,
+ _: &ActivatePaneRight,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ if let Some(secondary) = &mut self.secondary {
+ if secondary.has_latest_selection {
+ self.primary_editor
+ .read(cx)
+ .focus_handle(cx)
+ .focus(window, cx);
+ self.primary_editor.update(cx, |editor, cx| {
+ editor.request_autoscroll(Autoscroll::fit(), cx);
+ });
+ secondary.has_latest_selection = false;
+ cx.notify();
+ } else {
+ cx.propagate();
+ }
+ } else {
+ cx.propagate();
+ }
+ }
+
+ fn toggle_locked_cursors(
+ &mut self,
+ _: &ToggleLockedCursors,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ self.locked_cursors = !self.locked_cursors;
+ cx.notify();
+ }
+
+ pub fn locked_cursors(&self) -> bool {
+ self.locked_cursors
+ }
+
+ fn sync_cursor_to_other_side(
+ &mut self,
+ from_primary: bool,
+ source_point: Point,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let Some(secondary) = &self.secondary else {
+ return;
+ };
+
+ let target_editor = if from_primary {
+ &secondary.editor
+ } else {
+ &self.primary_editor
+ };
+
+ let (source_multibuffer, target_multibuffer) = if from_primary {
+ (&self.primary_multibuffer, &secondary.multibuffer)
+ } else {
+ (&secondary.multibuffer, &self.primary_multibuffer)
+ };
+
+ let source_snapshot = source_multibuffer.read(cx).snapshot(cx);
+ let target_snapshot = target_multibuffer.read(cx).snapshot(cx);
+
+ let target_point = target_editor.update(cx, |target_editor, cx| {
+ target_editor.display_map.update(cx, |display_map, cx| {
+ let display_map_id = cx.entity_id();
+ display_map.companion().unwrap().update(cx, |companion, _| {
+ companion
+ .convert_rows_from_companion(
+ display_map_id,
+ &target_snapshot,
+ &source_snapshot,
+ (Bound::Included(source_point), Bound::Included(source_point)),
+ )
+ .first()
+ .unwrap()
+ .boundaries
+ .first()
+ .unwrap()
+ .1
+ .start
+ })
+ })
+ });
+
+ target_editor.update(cx, |editor, cx| {
+ editor.set_suppress_selection_callback(true);
+ editor.change_selections(crate::SelectionEffects::no_scroll(), window, cx, |s| {
+ s.select_ranges([target_point..target_point]);
+ });
+ editor.set_suppress_selection_callback(false);
+ });
+ }
+
+ fn toggle_split(&mut self, _: &ToggleSplitDiff, window: &mut Window, cx: &mut Context<Self>) {
+ if self.secondary.is_some() {
+ self.unsplit(&UnsplitDiff, window, cx);
+ } else {
+ self.split(&SplitDiff, window, cx);
+ }
+ }
+
+ fn intercept_toggle_code_actions(
+ &mut self,
+ _: &ToggleCodeActions,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ if self.secondary.is_some() {
+ cx.stop_propagation();
+ } else {
+ cx.propagate();
+ }
+ }
+
+ fn intercept_toggle_breakpoint(
+ &mut self,
+ _: &ToggleBreakpoint,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ // Only block breakpoint actions when the left (secondary) editor has focus
+ if let Some(secondary) = &self.secondary {
+ if secondary.has_latest_selection {
+ cx.stop_propagation();
+ } else {
+ cx.propagate();
+ }
+ } else {
+ cx.propagate();
+ }
+ }
+
+ fn intercept_enable_breakpoint(
+ &mut self,
+ _: &EnableBreakpoint,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ // Only block breakpoint actions when the left (secondary) editor has focus
+ if let Some(secondary) = &self.secondary {
+ if secondary.has_latest_selection {
+ cx.stop_propagation();
+ } else {
+ cx.propagate();
+ }
+ } else {
+ cx.propagate();
+ }
+ }
+
+ fn intercept_disable_breakpoint(
+ &mut self,
+ _: &DisableBreakpoint,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ // Only block breakpoint actions when the left (secondary) editor has focus
+ if let Some(secondary) = &self.secondary {
+ if secondary.has_latest_selection {
+ cx.stop_propagation();
+ } else {
+ cx.propagate();
+ }
+ } else {
+ cx.propagate();
+ }
+ }
+
+ fn intercept_edit_log_breakpoint(
+ &mut self,
+ _: &EditLogBreakpoint,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ // Only block breakpoint actions when the left (secondary) editor has focus
+ if let Some(secondary) = &self.secondary {
+ if secondary.has_latest_selection {
+ cx.stop_propagation();
+ } else {
+ cx.propagate();
+ }
+ } else {
+ cx.propagate();
+ }
+ }
+
+ fn intercept_inline_assist(
+ &mut self,
+ _: &InlineAssist,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ if self.secondary.is_some() {
+ cx.stop_propagation();
+ } else {
+ cx.propagate();
+ }
+ }
+
+ fn toggle_soft_wrap(
+ &mut self,
+ _: &ToggleSoftWrap,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ if let Some(secondary) = &self.secondary {
+ cx.stop_propagation();
+
+ let is_secondary_focused = secondary.has_latest_selection;
+ let (focused_editor, other_editor) = if is_secondary_focused {
+ (&secondary.editor, &self.primary_editor)
+ } else {
+ (&self.primary_editor, &secondary.editor)
+ };
+
+ // Toggle the focused editor
+ focused_editor.update(cx, |editor, cx| {
+ editor.toggle_soft_wrap(&ToggleSoftWrap, window, cx);
+ });
+
+ // Copy the soft wrap state from the focused editor to the other editor
+ let soft_wrap_override = focused_editor.read(cx).soft_wrap_mode_override;
+ other_editor.update(cx, |editor, cx| {
+ editor.soft_wrap_mode_override = soft_wrap_override;
+ cx.notify();
+ });
+ } else {
+ cx.propagate();
+ }
+ }
+
fn unsplit(&mut self, _: &UnsplitDiff, _: &mut Window, cx: &mut Context<Self>) {
let Some(secondary) = self.secondary.take() else {
return;
};
self.panes.remove(&secondary.pane, cx).unwrap();
self.primary_editor.update(cx, |primary, cx| {
+ primary.set_on_local_selections_changed(None);
+ primary.set_scroll_companion(None);
primary.set_delegate_expand_excerpts(false);
primary.buffer().update(cx, |buffer, cx| {
buffer.set_show_deleted_hunks(true, cx);
+ buffer.set_use_extended_diff_range(false, cx);
});
+ primary.display_map.update(cx, |dm, cx| {
+ dm.set_companion(None, cx);
+ });
+ });
+ secondary.editor.update(cx, |editor, _cx| {
+ editor.set_on_local_selections_changed(None);
+ editor.set_scroll_companion(None);
});
cx.notify();
}
@@ -301,27 +900,46 @@ impl SplittableEditor {
diff: Entity<BufferDiff>,
cx: &mut Context<Self>,
) -> (Vec<Range<Anchor>>, bool) {
- self.primary_multibuffer
- .update(cx, |primary_multibuffer, cx| {
- let (anchors, added_a_new_excerpt) = primary_multibuffer.set_excerpts_for_path(
- path.clone(),
- buffer.clone(),
- ranges,
- context_line_count,
+ let primary_display_map = self.primary_editor.read(cx).display_map.clone();
+ let secondary_display_map = self
+ .secondary
+ .as_ref()
+ .map(|s| s.editor.read(cx).display_map.clone());
+
+ let (anchors, added_a_new_excerpt) =
+ self.primary_multibuffer
+ .update(cx, |primary_multibuffer, cx| {
+ let (anchors, added_a_new_excerpt) = primary_multibuffer.set_excerpts_for_path(
+ path.clone(),
+ buffer.clone(),
+ ranges,
+ context_line_count,
+ cx,
+ );
+ if !anchors.is_empty()
+ && primary_multibuffer
+ .diff_for(buffer.read(cx).remote_id())
+ .is_none_or(|old_diff| old_diff.entity_id() != diff.entity_id())
+ {
+ primary_multibuffer.add_diff(diff.clone(), cx);
+ }
+ (anchors, added_a_new_excerpt)
+ });
+
+ if let Some(secondary) = &mut self.secondary {
+ if let Some(secondary_display_map) = &secondary_display_map {
+ secondary.sync_path_excerpts(
+ path,
+ &self.primary_multibuffer,
+ diff,
+ &primary_display_map,
+ secondary_display_map,
cx,
);
- if !anchors.is_empty()
- && primary_multibuffer
- .diff_for(buffer.read(cx).remote_id())
- .is_none_or(|old_diff| old_diff.entity_id() != diff.entity_id())
- {
- primary_multibuffer.add_diff(diff.clone(), cx);
- }
- if let Some(secondary) = &mut self.secondary {
- secondary.sync_path_excerpts(path, primary_multibuffer, diff, cx);
- }
- (anchors, added_a_new_excerpt)
- })
+ }
+ }
+
+ (anchors, added_a_new_excerpt)
}
fn expand_excerpts(
@@ -349,11 +967,18 @@ impl SplittableEditor {
});
if let Some(secondary) = &mut self.secondary {
- self.primary_multibuffer.update(cx, |multibuffer, cx| {
- for (path, diff) in corresponding_paths {
- secondary.sync_path_excerpts(path, multibuffer, diff, cx);
- }
- })
+ let primary_display_map = self.primary_editor.read(cx).display_map.clone();
+ let secondary_display_map = secondary.editor.read(cx).display_map.clone();
+ for (path, diff) in corresponding_paths {
+ secondary.sync_path_excerpts(
+ path,
+ &self.primary_multibuffer,
+ diff,
+ &primary_display_map,
+ &secondary_display_map,
+ cx,
+ );
+ }
}
}
@@ -361,8 +986,16 @@ impl SplittableEditor {
self.primary_multibuffer.update(cx, |buffer, cx| {
buffer.remove_excerpts_for_path(path.clone(), cx)
});
- if let Some(secondary) = &mut self.secondary {
- secondary.remove_mappings_for_path(&path, cx);
+ if let Some(secondary) = &self.secondary {
+ let primary_display_map = self.primary_editor.read(cx).display_map.clone();
+ let secondary_display_map = secondary.editor.read(cx).display_map.clone();
+ secondary.remove_mappings_for_path(
+ &path,
+ &self.primary_multibuffer,
+ &primary_display_map,
+ &secondary_display_map,
+ cx,
+ );
secondary
.multibuffer
.update(cx, |buffer, cx| buffer.remove_excerpts_for_path(path, cx))
@@ -372,144 +1005,400 @@ impl SplittableEditor {
#[cfg(test)]
impl SplittableEditor {
- fn check_invariants(&self, quiesced: bool, cx: &App) {
- use buffer_diff::DiffHunkStatusKind;
- use collections::HashSet;
- use multi_buffer::MultiBufferOffset;
+ fn check_invariants(&self, quiesced: bool, cx: &mut App) {
use multi_buffer::MultiBufferRow;
- use multi_buffer::MultiBufferSnapshot;
-
- fn format_diff(snapshot: &MultiBufferSnapshot) -> String {
- let text = snapshot.text();
- let row_infos = snapshot.row_infos(MultiBufferRow(0)).collect::<Vec<_>>();
- let boundary_rows = snapshot
- .excerpt_boundaries_in_range(MultiBufferOffset(0)..)
- .map(|b| b.row)
- .collect::<HashSet<_>>();
-
- text.split('\n')
- .enumerate()
- .zip(row_infos)
- .map(|((ix, line), info)| {
- let marker = match info.diff_status.map(|status| status.kind) {
- Some(DiffHunkStatusKind::Added) => "+ ",
- Some(DiffHunkStatusKind::Deleted) => "- ",
- Some(DiffHunkStatusKind::Modified) => unreachable!(),
- None => {
- if !line.is_empty() {
- " "
- } else {
- ""
- }
- }
- };
- let boundary_row = if boundary_rows.contains(&MultiBufferRow(ix as u32)) {
- " ----------\n"
- } else {
- ""
- };
- let expand = info
- .expand_info
- .map(|expand_info| match expand_info.direction {
- ExpandExcerptDirection::Up => " [โ]",
- ExpandExcerptDirection::Down => " [โ]",
- ExpandExcerptDirection::UpAndDown => " [โ]",
- })
- .unwrap_or_default();
-
- format!("{boundary_row}{marker}{line}{expand}")
- })
- .collect::<Vec<_>>()
- .join("\n")
- }
-
- let Some(secondary) = &self.secondary else {
- return;
- };
+ use text::Bias;
- log::info!(
- "primary:\n\n{}",
- format_diff(&self.primary_multibuffer.read(cx).snapshot(cx))
- );
+ use crate::display_map::Block;
+ use crate::display_map::DisplayRow;
- log::info!(
- "secondary:\n\n{}",
- format_diff(&secondary.multibuffer.read(cx).snapshot(cx))
- );
+ self.debug_print(cx);
+ let secondary = self.secondary.as_ref().unwrap();
let primary_excerpts = self.primary_multibuffer.read(cx).excerpt_ids();
let secondary_excerpts = secondary.multibuffer.read(cx).excerpt_ids();
- assert_eq!(primary_excerpts.len(), secondary_excerpts.len());
-
assert_eq!(
- secondary.primary_to_secondary.len(),
- primary_excerpts.len(),
- "primary_to_secondary mapping count should match excerpt count"
- );
- assert_eq!(
- secondary.secondary_to_primary.len(),
secondary_excerpts.len(),
- "secondary_to_primary mapping count should match excerpt count"
+ primary_excerpts.len(),
+ "mismatch in excerpt count"
);
- for primary_id in &primary_excerpts {
- assert!(
- secondary.primary_to_secondary.contains_key(primary_id),
- "primary excerpt {:?} should have a mapping to secondary",
- primary_id
- );
- }
- for secondary_id in &secondary_excerpts {
- assert!(
- secondary.secondary_to_primary.contains_key(secondary_id),
- "secondary excerpt {:?} should have a mapping to primary",
- secondary_id
- );
- }
-
- for (primary_id, secondary_id) in &secondary.primary_to_secondary {
- assert_eq!(
- secondary.secondary_to_primary.get(secondary_id),
- Some(primary_id),
- "mappings should be bijective"
- );
- }
-
if quiesced {
- let primary_snapshot = self.primary_multibuffer.read(cx).snapshot(cx);
- let secondary_snapshot = secondary.multibuffer.read(cx).snapshot(cx);
- let primary_diff_hunks = primary_snapshot
- .diff_hunks()
- .map(|hunk| hunk.diff_base_byte_range)
+ let rhs_snapshot = secondary
+ .editor
+ .update(cx, |editor, cx| editor.display_snapshot(cx));
+ let lhs_snapshot = self
+ .primary_editor
+ .update(cx, |editor, cx| editor.display_snapshot(cx));
+
+ let lhs_max_row = lhs_snapshot.max_point().row();
+ let rhs_max_row = rhs_snapshot.max_point().row();
+ assert_eq!(lhs_max_row, rhs_max_row, "mismatch in display row count");
+
+ let lhs_excerpt_block_rows = lhs_snapshot
+ .blocks_in_range(DisplayRow(0)..lhs_max_row + 1)
+ .filter(|(_, block)| {
+ matches!(
+ block,
+ Block::BufferHeader { .. } | Block::ExcerptBoundary { .. }
+ )
+ })
+ .map(|(row, _)| row)
.collect::<Vec<_>>();
- let secondary_diff_hunks = secondary_snapshot
- .diff_hunks()
- .map(|hunk| hunk.diff_base_byte_range)
+ let rhs_excerpt_block_rows = rhs_snapshot
+ .blocks_in_range(DisplayRow(0)..rhs_max_row + 1)
+ .filter(|(_, block)| {
+ matches!(
+ block,
+ Block::BufferHeader { .. } | Block::ExcerptBoundary { .. }
+ )
+ })
+ .map(|(row, _)| row)
.collect::<Vec<_>>();
- pretty_assertions::assert_eq!(primary_diff_hunks, secondary_diff_hunks);
+ assert_eq!(lhs_excerpt_block_rows, rhs_excerpt_block_rows);
- // Filtering out empty lines is a bit of a hack, to work around a case where
- // the base text has a trailing newline but the current text doesn't, or vice versa.
+ for (lhs_hunk, rhs_hunk) in lhs_snapshot.diff_hunks().zip(rhs_snapshot.diff_hunks()) {
+ assert_eq!(
+ lhs_hunk.diff_base_byte_range, rhs_hunk.diff_base_byte_range,
+ "mismatch in hunks"
+ );
+ assert_eq!(
+ lhs_hunk.status, rhs_hunk.status,
+ "mismatch in hunk statuses"
+ );
+
+ let (lhs_point, rhs_point) =
+ if lhs_hunk.row_range.is_empty() || rhs_hunk.row_range.is_empty() {
+ (
+ Point::new(lhs_hunk.row_range.end.0, 0),
+ Point::new(rhs_hunk.row_range.end.0, 0),
+ )
+ } else {
+ (
+ Point::new(lhs_hunk.row_range.start.0, 0),
+ Point::new(rhs_hunk.row_range.start.0, 0),
+ )
+ };
+ let lhs_point = lhs_snapshot.point_to_display_point(lhs_point, Bias::Left);
+ let rhs_point = rhs_snapshot.point_to_display_point(rhs_point, Bias::Left);
+ assert_eq!(
+ lhs_point.row(),
+ rhs_point.row(),
+ "mismatch in hunk position"
+ );
+ }
+
+ // Filtering out empty lines is a bit of a hack, to work around a case where
+ // the base text has a trailing newline but the current text doesn't, or vice versa.
// In this case, we get the additional newline on one side, but that line is not
// marked as added/deleted by rowinfos.
- let primary_unmodified_rows = primary_snapshot
- .text()
- .split("\n")
- .zip(primary_snapshot.row_infos(MultiBufferRow(0)))
- .filter(|(line, row_info)| !line.is_empty() && row_info.diff_status.is_none())
- .map(|(line, _)| line.to_owned())
- .collect::<Vec<_>>();
- let secondary_unmodified_rows = secondary_snapshot
- .text()
- .split("\n")
- .zip(secondary_snapshot.row_infos(MultiBufferRow(0)))
- .filter(|(line, row_info)| !line.is_empty() && row_info.diff_status.is_none())
- .map(|(line, _)| line.to_owned())
- .collect::<Vec<_>>();
- pretty_assertions::assert_eq!(primary_unmodified_rows, secondary_unmodified_rows);
+ self.check_sides_match(cx, |snapshot| {
+ snapshot
+ .buffer_snapshot()
+ .text()
+ .split("\n")
+ .zip(snapshot.buffer_snapshot().row_infos(MultiBufferRow(0)))
+ .filter(|(line, row_info)| !line.is_empty() && row_info.diff_status.is_none())
+ .map(|(line, _)| line.to_owned())
+ .collect::<Vec<_>>()
+ });
}
}
+ #[track_caller]
+ fn check_sides_match<T: std::fmt::Debug + PartialEq>(
+ &self,
+ cx: &mut App,
+ mut extract: impl FnMut(&crate::DisplaySnapshot) -> T,
+ ) {
+ let secondary = self.secondary.as_ref().expect("requires split");
+ let primary_snapshot = self.primary_editor.update(cx, |editor, cx| {
+ editor.display_map.update(cx, |map, cx| map.snapshot(cx))
+ });
+ let secondary_snapshot = secondary.editor.update(cx, |editor, cx| {
+ editor.display_map.update(cx, |map, cx| map.snapshot(cx))
+ });
+
+ let primary_t = extract(&primary_snapshot);
+ let secondary_t = extract(&secondary_snapshot);
+
+ if primary_t != secondary_t {
+ self.debug_print(cx);
+ pretty_assertions::assert_eq!(primary_t, secondary_t);
+ }
+ }
+
+ fn debug_print(&self, cx: &mut App) {
+ use crate::DisplayRow;
+ use crate::display_map::Block;
+ use buffer_diff::DiffHunkStatusKind;
+
+ assert!(
+ self.secondary.is_some(),
+ "debug_print is only useful when secondary editor exists"
+ );
+
+ let secondary = self.secondary.as_ref().unwrap();
+
+ // Get terminal width, default to 80 if unavailable
+ let terminal_width = std::env::var("COLUMNS")
+ .ok()
+ .and_then(|s| s.parse::<usize>().ok())
+ .unwrap_or(80);
+
+ // Each side gets half the terminal width minus the separator
+ let separator = " โ ";
+ let side_width = (terminal_width - separator.len()) / 2;
+
+ // Get display snapshots for both editors
+ let secondary_snapshot = secondary.editor.update(cx, |editor, cx| {
+ editor.display_map.update(cx, |map, cx| map.snapshot(cx))
+ });
+ let primary_snapshot = self.primary_editor.update(cx, |editor, cx| {
+ editor.display_map.update(cx, |map, cx| map.snapshot(cx))
+ });
+
+ let secondary_max_row = secondary_snapshot.max_point().row().0;
+ let primary_max_row = primary_snapshot.max_point().row().0;
+ let max_row = secondary_max_row.max(primary_max_row);
+
+ // Build a map from display row -> block type string
+ // Each row of a multi-row block gets an entry with the same block type
+ // For spacers, the ID is included in brackets
+ fn build_block_map(
+ snapshot: &crate::DisplaySnapshot,
+ max_row: u32,
+ ) -> std::collections::HashMap<u32, String> {
+ let mut block_map = std::collections::HashMap::new();
+ for (start_row, block) in
+ snapshot.blocks_in_range(DisplayRow(0)..DisplayRow(max_row + 1))
+ {
+ let (block_type, height) = match block {
+ Block::Spacer {
+ id,
+ height,
+ is_below: _,
+ } => (format!("SPACER[{}]", id.0), *height),
+ Block::ExcerptBoundary { height, .. } => {
+ ("EXCERPT_BOUNDARY".to_string(), *height)
+ }
+ Block::BufferHeader { height, .. } => ("BUFFER_HEADER".to_string(), *height),
+ Block::FoldedBuffer { height, .. } => ("FOLDED_BUFFER".to_string(), *height),
+ Block::Custom(custom) => {
+ ("CUSTOM_BLOCK".to_string(), custom.height.unwrap_or(1))
+ }
+ };
+ for offset in 0..height {
+ block_map.insert(start_row.0 + offset, block_type.clone());
+ }
+ }
+ block_map
+ }
+
+ let secondary_blocks = build_block_map(&secondary_snapshot, secondary_max_row);
+ let primary_blocks = build_block_map(&primary_snapshot, primary_max_row);
+
+ fn display_width(s: &str) -> usize {
+ unicode_width::UnicodeWidthStr::width(s)
+ }
+
+ fn truncate_line(line: &str, max_width: usize) -> String {
+ let line_width = display_width(line);
+ if line_width <= max_width {
+ return line.to_string();
+ }
+ if max_width < 9 {
+ let mut result = String::new();
+ let mut width = 0;
+ for c in line.chars() {
+ let c_width = unicode_width::UnicodeWidthChar::width(c).unwrap_or(0);
+ if width + c_width > max_width {
+ break;
+ }
+ result.push(c);
+ width += c_width;
+ }
+ return result;
+ }
+ let ellipsis = "...";
+ let target_prefix_width = 3;
+ let target_suffix_width = 3;
+
+ let mut prefix = String::new();
+ let mut prefix_width = 0;
+ for c in line.chars() {
+ let c_width = unicode_width::UnicodeWidthChar::width(c).unwrap_or(0);
+ if prefix_width + c_width > target_prefix_width {
+ break;
+ }
+ prefix.push(c);
+ prefix_width += c_width;
+ }
+
+ let mut suffix_chars: Vec<char> = Vec::new();
+ let mut suffix_width = 0;
+ for c in line.chars().rev() {
+ let c_width = unicode_width::UnicodeWidthChar::width(c).unwrap_or(0);
+ if suffix_width + c_width > target_suffix_width {
+ break;
+ }
+ suffix_chars.push(c);
+ suffix_width += c_width;
+ }
+ suffix_chars.reverse();
+ let suffix: String = suffix_chars.into_iter().collect();
+
+ format!("{}{}{}", prefix, ellipsis, suffix)
+ }
+
+ fn pad_to_width(s: &str, target_width: usize) -> String {
+ let current_width = display_width(s);
+ if current_width >= target_width {
+ s.to_string()
+ } else {
+ format!("{}{}", s, " ".repeat(target_width - current_width))
+ }
+ }
+
+ // Helper to format a single row for one side
+ // Format: "ln# diff bytes(cumul) text" or block info
+ // Line numbers come from buffer_row in RowInfo (1-indexed for display)
+ fn format_row(
+ row: u32,
+ max_row: u32,
+ snapshot: &crate::DisplaySnapshot,
+ blocks: &std::collections::HashMap<u32, String>,
+ row_infos: &[multi_buffer::RowInfo],
+ cumulative_bytes: &[usize],
+ side_width: usize,
+ ) -> String {
+ // Get row info if available
+ let row_info = row_infos.get(row as usize);
+
+ // Line number prefix (3 chars + space)
+ // Use buffer_row from RowInfo, which is None for block rows
+ let line_prefix = if row > max_row {
+ " ".to_string()
+ } else if let Some(buffer_row) = row_info.and_then(|info| info.buffer_row) {
+ format!("{:>3} ", buffer_row + 1) // 1-indexed for display
+ } else {
+ " ".to_string() // block rows have no line number
+ };
+ let content_width = side_width.saturating_sub(line_prefix.len());
+
+ if row > max_row {
+ return format!("{}{}", line_prefix, " ".repeat(content_width));
+ }
+
+ // Check if this row is a block row
+ if let Some(block_type) = blocks.get(&row) {
+ let block_str = format!("~~~[{}]~~~", block_type);
+ let formatted = format!("{:^width$}", block_str, width = content_width);
+ return format!(
+ "{}{}",
+ line_prefix,
+ truncate_line(&formatted, content_width)
+ );
+ }
+
+ // Get line text
+ let line_text = snapshot.line(DisplayRow(row));
+ let line_bytes = line_text.len();
+
+ // Diff status marker
+ let diff_marker = match row_info.and_then(|info| info.diff_status.as_ref()) {
+ Some(status) => match status.kind {
+ DiffHunkStatusKind::Added => "+",
+ DiffHunkStatusKind::Deleted => "-",
+ DiffHunkStatusKind::Modified => "~",
+ },
+ None => " ",
+ };
+
+ // Cumulative bytes
+ let cumulative = cumulative_bytes.get(row as usize).copied().unwrap_or(0);
+
+ // Format: "diff bytes(cumul) text" - use 3 digits for bytes, 4 for cumulative
+ let info_prefix = format!("{}{:>3}({:>4}) ", diff_marker, line_bytes, cumulative);
+ let text_width = content_width.saturating_sub(info_prefix.len());
+ let truncated_text = truncate_line(&line_text, text_width);
+
+ let text_part = pad_to_width(&truncated_text, text_width);
+ format!("{}{}{}", line_prefix, info_prefix, text_part)
+ }
+
+ // Collect row infos for both sides
+ let secondary_row_infos: Vec<_> = secondary_snapshot
+ .row_infos(DisplayRow(0))
+ .take((secondary_max_row + 1) as usize)
+ .collect();
+ let primary_row_infos: Vec<_> = primary_snapshot
+ .row_infos(DisplayRow(0))
+ .take((primary_max_row + 1) as usize)
+ .collect();
+
+ // Calculate cumulative bytes for each side (only counting non-block rows)
+ let mut secondary_cumulative = Vec::with_capacity((secondary_max_row + 1) as usize);
+ let mut cumulative = 0usize;
+ for row in 0..=secondary_max_row {
+ if !secondary_blocks.contains_key(&row) {
+ cumulative += secondary_snapshot.line(DisplayRow(row)).len() + 1; // +1 for newline
+ }
+ secondary_cumulative.push(cumulative);
+ }
+
+ let mut primary_cumulative = Vec::with_capacity((primary_max_row + 1) as usize);
+ cumulative = 0;
+ for row in 0..=primary_max_row {
+ if !primary_blocks.contains_key(&row) {
+ cumulative += primary_snapshot.line(DisplayRow(row)).len() + 1;
+ }
+ primary_cumulative.push(cumulative);
+ }
+
+ // Print header
+ eprintln!();
+ eprintln!("{}", "โ".repeat(terminal_width));
+ let header_left = format!("{:^width$}", "SECONDARY (LEFT)", width = side_width);
+ let header_right = format!("{:^width$}", "PRIMARY (RIGHT)", width = side_width);
+ eprintln!("{}{}{}", header_left, separator, header_right);
+ eprintln!(
+ "{:^width$}{}{:^width$}",
+ "ln# diff len(cum) text",
+ separator,
+ "ln# diff len(cum) text",
+ width = side_width
+ );
+ eprintln!("{}", "โ".repeat(terminal_width));
+
+ // Print each row
+ for row in 0..=max_row {
+ let left = format_row(
+ row,
+ secondary_max_row,
+ &secondary_snapshot,
+ &secondary_blocks,
+ &secondary_row_infos,
+ &secondary_cumulative,
+ side_width,
+ );
+ let right = format_row(
+ row,
+ primary_max_row,
+ &primary_snapshot,
+ &primary_blocks,
+ &primary_row_infos,
+ &primary_cumulative,
+ side_width,
+ );
+ eprintln!("{}{}{}", left, separator, right);
+ }
+
+ eprintln!("{}", "โ".repeat(terminal_width));
+ eprintln!("Legend: + added, - deleted, ~ modified, ~~~ block/spacer row");
+ eprintln!();
+ }
+
fn randomly_edit_excerpts(
&mut self,
rng: &mut impl rand::Rng,
@@ -0,0 +1,1086 @@
+use std::{cmp, collections::HashMap, path, path::Path};
+
+use collections::HashSet;
+use file_icons::FileIcons;
+use git::status::FileStatus;
+use gpui::{
+ AbsoluteLength, Action, AnyElement, App, AvailableSpace, Bounds, ClickEvent, ClipboardItem,
+ Context, DragMoveEvent, Element, Entity, Focusable, GlobalElementId, Hsla, InspectorElementId,
+ IntoElement, LayoutId, Length, Modifiers, MouseButton, ParentElement, Pixels,
+ StatefulInteractiveElement, Styled, TextStyleRefinement, Window, div, linear_color_stop,
+ linear_gradient, point, px, size,
+};
+use multi_buffer::{Anchor, ExcerptId, ExcerptInfo};
+use project::Entry;
+use settings::Settings;
+use text::BufferId;
+use theme::ActiveTheme;
+use ui::scrollbars::ShowScrollbar;
+use ui::{
+ Button, ButtonLike, ButtonStyle, ContextMenu, Icon, IconName, Indicator, KeyBinding, Label,
+ Tooltip, h_flex, prelude::*, right_click_menu, text_for_keystroke, v_flex,
+};
+use workspace::{ItemSettings, OpenInTerminal, OpenTerminal, RevealInProjectPanel};
+
+use crate::{
+ DisplayRow, Editor, EditorSettings, EditorSnapshot, EditorStyle, FILE_HEADER_HEIGHT, JumpData,
+ MULTI_BUFFER_EXCERPT_HEADER_HEIGHT, OpenExcerpts, RowExt, StickyHeaderExcerpt, ToggleFold,
+ ToggleFoldAll,
+ display_map::Block,
+ element::{EditorElement, SplitSide},
+ scroll::ScrollOffset,
+ split::SplittableEditor,
+};
+
+const RESIZE_HANDLE_WIDTH: f32 = 8.0;
+
+#[derive(Debug, Clone)]
+struct DraggedSplitHandle;
+
+pub struct SplitEditorState {
+ left_ratio: f32,
+ visible_left_ratio: f32,
+ cached_width: Pixels,
+}
+
+impl SplitEditorState {
+ pub fn new(_cx: &mut App) -> Self {
+ Self {
+ left_ratio: 0.5,
+ visible_left_ratio: 0.5,
+ cached_width: px(0.),
+ }
+ }
+
+ #[allow(clippy::misnamed_getters)]
+ pub fn left_ratio(&self) -> f32 {
+ self.visible_left_ratio
+ }
+
+ pub fn right_ratio(&self) -> f32 {
+ 1.0 - self.visible_left_ratio
+ }
+
+ fn on_drag_move(
+ &mut self,
+ drag_event: &DragMoveEvent<DraggedSplitHandle>,
+ _window: &mut Window,
+ _cx: &mut Context<Self>,
+ ) {
+ let drag_position = drag_event.event.position;
+ let bounds = drag_event.bounds;
+ let bounds_width = bounds.right() - bounds.left();
+
+ if bounds_width > px(0.) {
+ self.cached_width = bounds_width;
+ }
+
+ let min_ratio = 0.1;
+ let max_ratio = 0.9;
+
+ let new_ratio = (drag_position.x - bounds.left()) / bounds_width;
+ self.visible_left_ratio = new_ratio.clamp(min_ratio, max_ratio);
+ }
+
+ fn commit_ratio(&mut self) {
+ self.left_ratio = self.visible_left_ratio;
+ }
+
+ fn on_double_click(&mut self) {
+ self.left_ratio = 0.5;
+ self.visible_left_ratio = 0.5;
+ }
+}
+
+#[derive(IntoElement)]
+pub struct SplitEditorView {
+ splittable_editor: Entity<SplittableEditor>,
+ style: EditorStyle,
+ split_state: Entity<SplitEditorState>,
+}
+
+impl SplitEditorView {
+ pub fn new(
+ splittable_editor: Entity<SplittableEditor>,
+ style: EditorStyle,
+ split_state: Entity<SplitEditorState>,
+ ) -> Self {
+ Self {
+ splittable_editor,
+ style,
+ split_state,
+ }
+ }
+}
+
+fn render_resize_handle(
+ state: &Entity<SplitEditorState>,
+ separator_color: Hsla,
+ _window: &mut Window,
+ _cx: &mut App,
+) -> AnyElement {
+ let state_for_click = state.clone();
+
+ div()
+ .id("split-resize-container")
+ .relative()
+ .h_full()
+ .flex_shrink_0()
+ .w(px(1.))
+ .bg(separator_color)
+ .child(
+ div()
+ .id("split-resize-handle")
+ .absolute()
+ .left(px(-RESIZE_HANDLE_WIDTH / 2.0))
+ .w(px(RESIZE_HANDLE_WIDTH))
+ .h_full()
+ .cursor_col_resize()
+ .block_mouse_except_scroll()
+ .on_click(move |event, _, cx| {
+ if event.click_count() >= 2 {
+ state_for_click.update(cx, |state, _| {
+ state.on_double_click();
+ });
+ }
+ cx.stop_propagation();
+ })
+ .on_drag(DraggedSplitHandle, |_, _, _, cx| cx.new(|_| gpui::Empty)),
+ )
+ .into_any_element()
+}
+
+impl RenderOnce for SplitEditorView {
+ fn render(self, window: &mut Window, cx: &mut App) -> impl IntoElement {
+ let splittable_editor = self.splittable_editor.read(cx);
+
+ assert!(
+ splittable_editor.secondary_editor().is_some(),
+ "`SplitEditorView` requires `SplittableEditor` to be in split mode"
+ );
+
+ let lhs_editor = splittable_editor.secondary_editor().unwrap().clone();
+ let rhs_editor = splittable_editor.primary_editor().clone();
+
+ let mut lhs = EditorElement::new(&lhs_editor, self.style.clone());
+ let mut rhs = EditorElement::new(&rhs_editor, self.style.clone());
+
+ lhs.set_split_side(SplitSide::Left);
+ rhs.set_split_side(SplitSide::Right);
+
+ let left_ratio = self.split_state.read(cx).left_ratio();
+ let right_ratio = self.split_state.read(cx).right_ratio();
+
+ let separator_color = cx.theme().colors().border_variant;
+
+ let resize_handle = render_resize_handle(&self.split_state, separator_color, window, cx);
+
+ let state_for_drag = self.split_state.downgrade();
+ let state_for_drop = self.split_state.downgrade();
+
+ let buffer_headers = SplitBufferHeadersElement::new(rhs_editor, self.style.clone());
+
+ div()
+ .id("split-editor-view-container")
+ .size_full()
+ .relative()
+ .child(
+ h_flex()
+ .id("split-editor-view")
+ .size_full()
+ .on_drag_move::<DraggedSplitHandle>(move |event, window, cx| {
+ state_for_drag
+ .update(cx, |state, cx| {
+ state.on_drag_move(event, window, cx);
+ })
+ .ok();
+ })
+ .on_drop::<DraggedSplitHandle>(move |_, _, cx| {
+ state_for_drop
+ .update(cx, |state, _| {
+ state.commit_ratio();
+ })
+ .ok();
+ })
+ .child(
+ div()
+ .id("split-editor-left")
+ .flex_shrink()
+ .min_w_0()
+ .h_full()
+ .flex_basis(DefiniteLength::Fraction(left_ratio))
+ .overflow_hidden()
+ .child(lhs),
+ )
+ .child(resize_handle)
+ .child(
+ div()
+ .id("split-editor-right")
+ .flex_shrink()
+ .min_w_0()
+ .h_full()
+ .flex_basis(DefiniteLength::Fraction(right_ratio))
+ .overflow_hidden()
+ .child(rhs),
+ ),
+ )
+ .child(buffer_headers)
+ }
+}
+
+struct SplitBufferHeadersElement {
+ editor: Entity<Editor>,
+ style: EditorStyle,
+}
+
+impl SplitBufferHeadersElement {
+ fn new(editor: Entity<Editor>, style: EditorStyle) -> Self {
+ Self { editor, style }
+ }
+}
+
+struct BufferHeaderLayout {
+ element: AnyElement,
+}
+
+struct SplitBufferHeadersPrepaintState {
+ sticky_header: Option<AnyElement>,
+ non_sticky_headers: Vec<BufferHeaderLayout>,
+}
+
+impl IntoElement for SplitBufferHeadersElement {
+ type Element = Self;
+
+ fn into_element(self) -> Self::Element {
+ self
+ }
+}
+
+impl Element for SplitBufferHeadersElement {
+ type RequestLayoutState = ();
+ type PrepaintState = SplitBufferHeadersPrepaintState;
+
+ fn id(&self) -> Option<gpui::ElementId> {
+ Some("split-buffer-headers".into())
+ }
+
+ fn source_location(&self) -> Option<&'static core::panic::Location<'static>> {
+ None
+ }
+
+ fn request_layout(
+ &mut self,
+ _id: Option<&GlobalElementId>,
+ _inspector_id: Option<&InspectorElementId>,
+ window: &mut Window,
+ _cx: &mut App,
+ ) -> (LayoutId, Self::RequestLayoutState) {
+ let mut style = gpui::Style::default();
+ style.position = gpui::Position::Absolute;
+ style.inset.top = DefiniteLength::Fraction(0.0).into();
+ style.inset.left = DefiniteLength::Fraction(0.0).into();
+ style.size.width = Length::Definite(DefiniteLength::Fraction(1.0));
+ style.size.height = Length::Definite(DefiniteLength::Fraction(1.0));
+ let layout_id = window.request_layout(style, [], _cx);
+ (layout_id, ())
+ }
+
+ fn prepaint(
+ &mut self,
+ _id: Option<&GlobalElementId>,
+ _inspector_id: Option<&InspectorElementId>,
+ bounds: Bounds<Pixels>,
+ _request_layout: &mut Self::RequestLayoutState,
+ window: &mut Window,
+ cx: &mut App,
+ ) -> Self::PrepaintState {
+ if bounds.size.width <= px(0.) || bounds.size.height <= px(0.) {
+ return SplitBufferHeadersPrepaintState {
+ sticky_header: None,
+ non_sticky_headers: Vec::new(),
+ };
+ }
+
+ let rem_size = self.rem_size();
+ let text_style = TextStyleRefinement {
+ font_size: Some(self.style.text.font_size),
+ line_height: Some(self.style.text.line_height),
+ ..Default::default()
+ };
+
+ window.with_rem_size(rem_size, |window| {
+ window.with_text_style(Some(text_style), |window| {
+ Self::prepaint_inner(self, bounds, window, cx)
+ })
+ })
+ }
+
+ fn paint(
+ &mut self,
+ _id: Option<&GlobalElementId>,
+ _inspector_id: Option<&InspectorElementId>,
+ _bounds: Bounds<Pixels>,
+ _request_layout: &mut Self::RequestLayoutState,
+ prepaint: &mut Self::PrepaintState,
+ window: &mut Window,
+ cx: &mut App,
+ ) {
+ let rem_size = self.rem_size();
+ let text_style = TextStyleRefinement {
+ font_size: Some(self.style.text.font_size),
+ line_height: Some(self.style.text.line_height),
+ ..Default::default()
+ };
+
+ window.with_rem_size(rem_size, |window| {
+ window.with_text_style(Some(text_style), |window| {
+ for header_layout in &mut prepaint.non_sticky_headers {
+ header_layout.element.paint(window, cx);
+ }
+
+ if let Some(mut sticky_header) = prepaint.sticky_header.take() {
+ sticky_header.paint(window, cx);
+ }
+ });
+ });
+ }
+}
+
+impl SplitBufferHeadersElement {
+ fn rem_size(&self) -> Option<Pixels> {
+ match self.style.text.font_size {
+ AbsoluteLength::Pixels(pixels) => {
+ let rem_size_scale = {
+ let default_font_size_scale = 14. / ui::BASE_REM_SIZE_IN_PX;
+ let default_font_size_delta = 1. - default_font_size_scale;
+ 1. + default_font_size_delta
+ };
+
+ Some(pixels * rem_size_scale)
+ }
+ AbsoluteLength::Rems(rems) => Some(rems.to_pixels(ui::BASE_REM_SIZE_IN_PX.into())),
+ }
+ }
+
+ fn prepaint_inner(
+ &mut self,
+ bounds: Bounds<Pixels>,
+ window: &mut Window,
+ cx: &mut App,
+ ) -> SplitBufferHeadersPrepaintState {
+ let line_height = window.line_height();
+
+ let snapshot = self
+ .editor
+ .update(cx, |editor, cx| editor.snapshot(window, cx));
+ let scroll_position = snapshot.scroll_position();
+
+ // Compute right margin to avoid overlapping the scrollbar
+ let settings = EditorSettings::get_global(cx);
+ let scrollbars_shown = settings.scrollbar.show != ShowScrollbar::Never;
+ let vertical_scrollbar_width = (scrollbars_shown
+ && settings.scrollbar.axes.vertical
+ && self.editor.read(cx).show_scrollbars.vertical)
+ .then_some(EditorElement::SCROLLBAR_WIDTH)
+ .unwrap_or_default();
+ let available_width = bounds.size.width - vertical_scrollbar_width;
+
+ let visible_height_in_lines = bounds.size.height / line_height;
+ let max_row = snapshot.max_point().row();
+ let start_row = cmp::min(DisplayRow(scroll_position.y.floor() as u32), max_row);
+ let end_row = cmp::min(
+ (scroll_position.y + visible_height_in_lines as f64).ceil() as u32,
+ max_row.next_row().0,
+ );
+ let end_row = DisplayRow(end_row);
+
+ let (selected_buffer_ids, latest_selection_anchors) =
+ self.compute_selection_info(&snapshot, cx);
+
+ let sticky_header = if snapshot.buffer_snapshot().show_headers() {
+ snapshot
+ .sticky_header_excerpt(scroll_position.y)
+ .map(|sticky_excerpt| {
+ self.build_sticky_header(
+ sticky_excerpt,
+ &snapshot,
+ scroll_position,
+ bounds,
+ available_width,
+ line_height,
+ &selected_buffer_ids,
+ &latest_selection_anchors,
+ start_row,
+ end_row,
+ window,
+ cx,
+ )
+ })
+ } else {
+ None
+ };
+
+ let sticky_header_excerpt_id = snapshot
+ .sticky_header_excerpt(scroll_position.y)
+ .map(|e| e.excerpt.id);
+
+ let non_sticky_headers = self.build_non_sticky_headers(
+ &snapshot,
+ scroll_position,
+ bounds,
+ available_width,
+ line_height,
+ start_row,
+ end_row,
+ &selected_buffer_ids,
+ &latest_selection_anchors,
+ sticky_header_excerpt_id,
+ window,
+ cx,
+ );
+
+ SplitBufferHeadersPrepaintState {
+ sticky_header,
+ non_sticky_headers,
+ }
+ }
+
+ fn compute_selection_info(
+ &self,
+ snapshot: &EditorSnapshot,
+ cx: &App,
+ ) -> (HashSet<BufferId>, HashMap<BufferId, Anchor>) {
+ let editor = self.editor.read(cx);
+ let all_selections = editor
+ .selections
+ .all::<crate::Point>(&snapshot.display_snapshot);
+ let all_anchor_selections = editor.selections.all_anchors(&snapshot.display_snapshot);
+
+ let mut selected_buffer_ids = HashSet::default();
+ for selection in &all_selections {
+ for buffer_id in snapshot
+ .buffer_snapshot()
+ .buffer_ids_for_range(selection.range())
+ {
+ selected_buffer_ids.insert(buffer_id);
+ }
+ }
+
+ let mut anchors_by_buffer: HashMap<BufferId, (usize, Anchor)> = HashMap::default();
+ for selection in all_anchor_selections.iter() {
+ let head = selection.head();
+ if let Some(buffer_id) = head.text_anchor.buffer_id {
+ anchors_by_buffer
+ .entry(buffer_id)
+ .and_modify(|(latest_id, latest_anchor)| {
+ if selection.id > *latest_id {
+ *latest_id = selection.id;
+ *latest_anchor = head;
+ }
+ })
+ .or_insert((selection.id, head));
+ }
+ }
+ let latest_selection_anchors = anchors_by_buffer
+ .into_iter()
+ .map(|(buffer_id, (_, anchor))| (buffer_id, anchor))
+ .collect();
+
+ (selected_buffer_ids, latest_selection_anchors)
+ }
+
+ fn build_sticky_header(
+ &self,
+ StickyHeaderExcerpt { excerpt }: StickyHeaderExcerpt<'_>,
+ snapshot: &EditorSnapshot,
+ scroll_position: gpui::Point<ScrollOffset>,
+ bounds: Bounds<Pixels>,
+ available_width: Pixels,
+ line_height: Pixels,
+ selected_buffer_ids: &HashSet<BufferId>,
+ latest_selection_anchors: &HashMap<BufferId, Anchor>,
+ start_row: DisplayRow,
+ end_row: DisplayRow,
+ window: &mut Window,
+ cx: &mut App,
+ ) -> AnyElement {
+ let jump_data = header_jump_data(
+ snapshot,
+ DisplayRow(scroll_position.y as u32),
+ FILE_HEADER_HEIGHT + MULTI_BUFFER_EXCERPT_HEADER_HEIGHT,
+ excerpt,
+ latest_selection_anchors,
+ );
+
+ let editor_bg_color = cx.theme().colors().editor_background;
+ let selected = selected_buffer_ids.contains(&excerpt.buffer_id);
+
+ let mut header = v_flex()
+ .id("sticky-buffer-header")
+ .w(available_width)
+ .relative()
+ .child(
+ div()
+ .w(available_width)
+ .h(FILE_HEADER_HEIGHT as f32 * line_height)
+ .bg(linear_gradient(
+ 0.,
+ linear_color_stop(editor_bg_color.opacity(0.), 0.),
+ linear_color_stop(editor_bg_color, 0.6),
+ ))
+ .absolute()
+ .top_0(),
+ )
+ .child(
+ self.render_buffer_header(excerpt, false, selected, true, jump_data, window, cx)
+ .into_any_element(),
+ )
+ .into_any_element();
+
+ let mut origin = bounds.origin;
+
+ for (block_row, block) in snapshot.blocks_in_range(start_row..end_row) {
+ if !block.is_buffer_header() {
+ continue;
+ }
+
+ if block_row.0 <= scroll_position.y as u32 {
+ continue;
+ }
+
+ let max_row = block_row.0.saturating_sub(FILE_HEADER_HEIGHT);
+ let offset = scroll_position.y - max_row as f64;
+
+ if offset > 0.0 {
+ origin.y -= Pixels::from(offset * f64::from(line_height));
+ }
+ break;
+ }
+
+ let available_size = size(
+ AvailableSpace::Definite(available_width),
+ AvailableSpace::MinContent,
+ );
+
+ header.prepaint_as_root(origin, available_size, window, cx);
+
+ header
+ }
+
+ fn build_non_sticky_headers(
+ &self,
+ snapshot: &EditorSnapshot,
+ scroll_position: gpui::Point<ScrollOffset>,
+ bounds: Bounds<Pixels>,
+ available_width: Pixels,
+ line_height: Pixels,
+ start_row: DisplayRow,
+ end_row: DisplayRow,
+ selected_buffer_ids: &HashSet<BufferId>,
+ latest_selection_anchors: &HashMap<BufferId, Anchor>,
+ sticky_header_excerpt_id: Option<ExcerptId>,
+ window: &mut Window,
+ cx: &mut App,
+ ) -> Vec<BufferHeaderLayout> {
+ let mut headers = Vec::new();
+
+ for (block_row, block) in snapshot.blocks_in_range(start_row..end_row) {
+ let (excerpt, is_folded) = match block {
+ Block::BufferHeader { excerpt, .. } => {
+ if sticky_header_excerpt_id == Some(excerpt.id) {
+ continue;
+ }
+ (excerpt, false)
+ }
+ Block::FoldedBuffer { first_excerpt, .. } => (first_excerpt, true),
+ // ExcerptBoundary is just a separator line, not a buffer header
+ Block::ExcerptBoundary { .. } | Block::Custom(_) | Block::Spacer { .. } => continue,
+ };
+
+ let selected = selected_buffer_ids.contains(&excerpt.buffer_id);
+ let jump_data = header_jump_data(
+ snapshot,
+ block_row,
+ block.height(),
+ excerpt,
+ latest_selection_anchors,
+ );
+
+ let mut header = self
+ .render_buffer_header(excerpt, is_folded, selected, false, jump_data, window, cx)
+ .into_any_element();
+
+ let y_offset = (block_row.0 as f64 - scroll_position.y) * f64::from(line_height);
+ let origin = point(bounds.origin.x, bounds.origin.y + Pixels::from(y_offset));
+
+ let available_size = size(
+ AvailableSpace::Definite(available_width),
+ AvailableSpace::MinContent,
+ );
+
+ header.prepaint_as_root(origin, available_size, window, cx);
+
+ headers.push(BufferHeaderLayout { element: header });
+ }
+
+ headers
+ }
+
+ fn render_buffer_header(
+ &self,
+ for_excerpt: &ExcerptInfo,
+ is_folded: bool,
+ is_selected: bool,
+ is_sticky: bool,
+ jump_data: JumpData,
+ window: &mut Window,
+ cx: &mut App,
+ ) -> impl IntoElement {
+ let editor = self.editor.read(cx);
+ let multi_buffer = editor.buffer.read(cx);
+ let is_read_only = self.editor.read(cx).read_only(cx);
+
+ let file_status = multi_buffer
+ .all_diff_hunks_expanded()
+ .then(|| editor.status_for_buffer_id(for_excerpt.buffer_id, cx))
+ .flatten();
+ let indicator = multi_buffer
+ .buffer(for_excerpt.buffer_id)
+ .and_then(|buffer| {
+ let buffer = buffer.read(cx);
+ let indicator_color = match (buffer.has_conflict(), buffer.is_dirty()) {
+ (true, _) => Some(Color::Warning),
+ (_, true) => Some(Color::Accent),
+ (false, false) => None,
+ };
+ indicator_color.map(|indicator_color| Indicator::dot().color(indicator_color))
+ });
+
+ let include_root = editor
+ .project
+ .as_ref()
+ .map(|project| project.read(cx).visible_worktrees(cx).count() > 1)
+ .unwrap_or_default();
+ let file = for_excerpt.buffer.file();
+ let can_open_excerpts = file.is_none_or(|file| file.can_open());
+ let path_style = file.map(|file| file.path_style(cx));
+ let relative_path = for_excerpt.buffer.resolve_file_path(include_root, cx);
+ let (parent_path, filename) = if let Some(path) = &relative_path {
+ if let Some(path_style) = path_style {
+ let (dir, file_name) = path_style.split(path);
+ (dir.map(|dir| dir.to_owned()), Some(file_name.to_owned()))
+ } else {
+ (None, Some(path.clone()))
+ }
+ } else {
+ (None, None)
+ };
+ let focus_handle = self.editor.read(cx).focus_handle(cx);
+ let colors = cx.theme().colors();
+
+ let header = div()
+ .p_1()
+ .w_full()
+ .h(FILE_HEADER_HEIGHT as f32 * window.line_height())
+ .child(
+ h_flex()
+ .size_full()
+ .flex_basis(Length::Definite(DefiniteLength::Fraction(0.667)))
+ .pl_1()
+ .pr_2()
+ .rounded_sm()
+ .gap_1p5()
+ .when(is_sticky, |el| el.shadow_md())
+ .border_1()
+ .map(|border| {
+ let border_color = if !is_sticky
+ && is_selected
+ && is_folded
+ && focus_handle.contains_focused(window, cx)
+ {
+ colors.border_focused
+ } else {
+ colors.border
+ };
+ border.border_color(border_color)
+ })
+ .bg(colors.editor_subheader_background)
+ .hover(|style| style.bg(colors.element_hover))
+ .map(|header| {
+ let editor = self.editor.clone();
+ let buffer_id = for_excerpt.buffer_id;
+ let toggle_chevron_icon =
+ FileIcons::get_chevron_icon(!is_folded, cx).map(Icon::from_path);
+ let button_size = rems_from_px(28.);
+
+ header.child(
+ div()
+ .hover(|style| style.bg(colors.element_selected))
+ .rounded_xs()
+ .child(
+ ButtonLike::new("toggle-buffer-fold")
+ .style(ButtonStyle::Transparent)
+ .height(button_size.into())
+ .width(button_size)
+ .children(toggle_chevron_icon)
+ .tooltip({
+ let focus_handle = focus_handle.clone();
+ let is_folded_for_tooltip = is_folded;
+ move |_window, cx| {
+ Tooltip::with_meta_in(
+ if is_folded_for_tooltip {
+ "Unfold Excerpt"
+ } else {
+ "Fold Excerpt"
+ },
+ Some(&ToggleFold),
+ format!(
+ "{} to toggle all",
+ text_for_keystroke(
+ &Modifiers::alt(),
+ "click",
+ cx
+ )
+ ),
+ &focus_handle,
+ cx,
+ )
+ }
+ })
+ .on_click(move |event, window, cx| {
+ if event.modifiers().alt {
+ editor.update(cx, |editor, cx| {
+ editor.toggle_fold_all(
+ &ToggleFoldAll,
+ window,
+ cx,
+ );
+ });
+ } else {
+ if is_folded {
+ editor.update(cx, |editor, cx| {
+ editor.unfold_buffer(buffer_id, cx);
+ });
+ } else {
+ editor.update(cx, |editor, cx| {
+ editor.fold_buffer(buffer_id, cx);
+ });
+ }
+ }
+ }),
+ ),
+ )
+ })
+ .children(
+ editor
+ .addons
+ .values()
+ .filter_map(|addon| {
+ addon.render_buffer_header_controls(for_excerpt, window, cx)
+ })
+ .take(1),
+ )
+ .when(!is_read_only, |this| {
+ this.child(
+ h_flex()
+ .size_3()
+ .justify_center()
+ .flex_shrink_0()
+ .children(indicator),
+ )
+ })
+ .child(
+ h_flex()
+ .cursor_pointer()
+ .id("path_header_block")
+ .min_w_0()
+ .size_full()
+ .justify_between()
+ .overflow_hidden()
+ .child(h_flex().min_w_0().flex_1().gap_0p5().map(|path_header| {
+ let filename = filename
+ .map(SharedString::from)
+ .unwrap_or_else(|| "untitled".into());
+
+ path_header
+ .when(ItemSettings::get_global(cx).file_icons, |el| {
+ let path = path::Path::new(filename.as_str());
+ let icon =
+ FileIcons::get_icon(path, cx).unwrap_or_default();
+
+ el.child(Icon::from_path(icon).color(Color::Muted))
+ })
+ .child(
+ ButtonLike::new("filename-button")
+ .child(
+ Label::new(filename)
+ .single_line()
+ .color(file_status_label_color(file_status))
+ .when(
+ file_status.is_some_and(|s| s.is_deleted()),
+ |label| label.strikethrough(),
+ ),
+ )
+ .on_click(window.listener_for(&self.editor, {
+ let jump_data = jump_data.clone();
+ move |editor, e: &ClickEvent, window, cx| {
+ editor.open_excerpts_common(
+ Some(jump_data.clone()),
+ e.modifiers().secondary(),
+ window,
+ cx,
+ );
+ }
+ })),
+ )
+ .when(!for_excerpt.buffer.capability.editable(), |el| {
+ el.child(Icon::new(IconName::FileLock).color(Color::Muted))
+ })
+ .when_some(parent_path, |then, path| {
+ then.child(Label::new(path).truncate().color(
+ if file_status.is_some_and(FileStatus::is_deleted) {
+ Color::Custom(colors.text_disabled)
+ } else {
+ Color::Custom(colors.text_muted)
+ },
+ ))
+ })
+ }))
+ .when(
+ can_open_excerpts && is_selected && relative_path.is_some(),
+ |el| {
+ el.child(
+ Button::new("open-file-button", "Open File")
+ .style(ButtonStyle::OutlinedGhost)
+ .key_binding(KeyBinding::for_action_in(
+ &OpenExcerpts,
+ &focus_handle,
+ cx,
+ ))
+ .on_click(window.listener_for(&self.editor, {
+ let jump_data = jump_data.clone();
+ move |editor, e: &ClickEvent, window, cx| {
+ editor.open_excerpts_common(
+ Some(jump_data.clone()),
+ e.modifiers().secondary(),
+ window,
+ cx,
+ );
+ }
+ })),
+ )
+ },
+ )
+ .on_mouse_down(MouseButton::Left, |_, _, cx| cx.stop_propagation())
+ .on_click(window.listener_for(&self.editor, {
+ let buffer_id = for_excerpt.buffer_id;
+ move |editor, e: &ClickEvent, window, cx| {
+ if e.modifiers().alt {
+ editor.open_excerpts_common(
+ Some(jump_data.clone()),
+ e.modifiers().secondary(),
+ window,
+ cx,
+ );
+ return;
+ }
+
+ if is_folded {
+ editor.unfold_buffer(buffer_id, cx);
+ } else {
+ editor.fold_buffer(buffer_id, cx);
+ }
+ }
+ })),
+ ),
+ );
+
+ let file = for_excerpt.buffer.file().cloned();
+ let editor = self.editor.clone();
+
+ right_click_menu("buffer-header-context-menu")
+ .trigger(move |_, _, _| header)
+ .menu(move |window, cx| {
+ let menu_context = focus_handle.clone();
+ let editor = editor.clone();
+ let file = file.clone();
+ ContextMenu::build(window, cx, move |mut menu, window, cx| {
+ if let Some(file) = file
+ && let Some(project) = editor.read(cx).project()
+ && let Some(worktree) =
+ project.read(cx).worktree_for_id(file.worktree_id(cx), cx)
+ {
+ let path_style = file.path_style(cx);
+ let worktree = worktree.read(cx);
+ let relative_path = file.path();
+ let entry_for_path = worktree.entry_for_path(relative_path);
+ let abs_path = entry_for_path.map(|e| {
+ e.canonical_path.as_deref().map_or_else(
+ || worktree.absolutize(relative_path),
+ Path::to_path_buf,
+ )
+ });
+ let has_relative_path = worktree.root_entry().is_some_and(Entry::is_dir);
+
+ let parent_abs_path = abs_path
+ .as_ref()
+ .and_then(|abs_path| Some(abs_path.parent()?.to_path_buf()));
+ let relative_path = has_relative_path
+ .then_some(relative_path)
+ .map(ToOwned::to_owned);
+
+ let visible_in_project_panel =
+ relative_path.is_some() && worktree.is_visible();
+ let reveal_in_project_panel = entry_for_path
+ .filter(|_| visible_in_project_panel)
+ .map(|entry| entry.id);
+ menu = menu
+ .when_some(abs_path, |menu, abs_path| {
+ menu.entry(
+ "Copy Path",
+ Some(Box::new(zed_actions::workspace::CopyPath)),
+ window.handler_for(&editor, move |_, _, cx| {
+ cx.write_to_clipboard(ClipboardItem::new_string(
+ abs_path.to_string_lossy().into_owned(),
+ ));
+ }),
+ )
+ })
+ .when_some(relative_path, |menu, relative_path| {
+ menu.entry(
+ "Copy Relative Path",
+ Some(Box::new(zed_actions::workspace::CopyRelativePath)),
+ window.handler_for(&editor, move |_, _, cx| {
+ cx.write_to_clipboard(ClipboardItem::new_string(
+ relative_path.display(path_style).to_string(),
+ ));
+ }),
+ )
+ })
+ .when(
+ reveal_in_project_panel.is_some() || parent_abs_path.is_some(),
+ |menu| menu.separator(),
+ )
+ .when_some(reveal_in_project_panel, |menu, entry_id| {
+ menu.entry(
+ "Reveal In Project Panel",
+ Some(Box::new(RevealInProjectPanel::default())),
+ window.handler_for(&editor, move |editor, _, cx| {
+ if let Some(project) = &mut editor.project {
+ project.update(cx, |_, cx| {
+ cx.emit(project::Event::RevealInProjectPanel(
+ entry_id,
+ ))
+ });
+ }
+ }),
+ )
+ })
+ .when_some(parent_abs_path, |menu, parent_abs_path| {
+ menu.entry(
+ "Open in Terminal",
+ Some(Box::new(OpenInTerminal)),
+ window.handler_for(&editor, move |_, window, cx| {
+ window.dispatch_action(
+ OpenTerminal {
+ working_directory: parent_abs_path.clone(),
+ local: false,
+ }
+ .boxed_clone(),
+ cx,
+ );
+ }),
+ )
+ });
+ }
+
+ menu.context(menu_context)
+ })
+ })
+ }
+}
+
+fn header_jump_data(
+ editor_snapshot: &EditorSnapshot,
+ block_row_start: DisplayRow,
+ height: u32,
+ first_excerpt: &ExcerptInfo,
+ latest_selection_anchors: &HashMap<BufferId, Anchor>,
+) -> JumpData {
+ let jump_target = if let Some(anchor) = latest_selection_anchors.get(&first_excerpt.buffer_id)
+ && let Some(range) = editor_snapshot.context_range_for_excerpt(anchor.excerpt_id)
+ && let Some(buffer) = editor_snapshot
+ .buffer_snapshot()
+ .buffer_for_excerpt(anchor.excerpt_id)
+ {
+ JumpTargetInExcerptInput {
+ id: anchor.excerpt_id,
+ buffer,
+ excerpt_start_anchor: range.start,
+ jump_anchor: anchor.text_anchor,
+ }
+ } else {
+ JumpTargetInExcerptInput {
+ id: first_excerpt.id,
+ buffer: &first_excerpt.buffer,
+ excerpt_start_anchor: first_excerpt.range.context.start,
+ jump_anchor: first_excerpt.range.primary.start,
+ }
+ };
+ header_jump_data_inner(editor_snapshot, block_row_start, height, &jump_target)
+}
+
+struct JumpTargetInExcerptInput<'a> {
+ id: ExcerptId,
+ buffer: &'a language::BufferSnapshot,
+ excerpt_start_anchor: text::Anchor,
+ jump_anchor: text::Anchor,
+}
+
+fn header_jump_data_inner(
+ snapshot: &EditorSnapshot,
+ block_row_start: DisplayRow,
+ height: u32,
+ for_excerpt: &JumpTargetInExcerptInput,
+) -> JumpData {
+ let buffer = &for_excerpt.buffer;
+ let jump_position = language::ToPoint::to_point(&for_excerpt.jump_anchor, buffer);
+ let excerpt_start = for_excerpt.excerpt_start_anchor;
+ let rows_from_excerpt_start = if for_excerpt.jump_anchor == excerpt_start {
+ 0
+ } else {
+ let excerpt_start_point = language::ToPoint::to_point(&excerpt_start, buffer);
+ jump_position.row.saturating_sub(excerpt_start_point.row)
+ };
+
+ let line_offset_from_top = (block_row_start.0 + height + rows_from_excerpt_start)
+ .saturating_sub(
+ snapshot
+ .scroll_anchor
+ .scroll_position(&snapshot.display_snapshot)
+ .y as u32,
+ );
+
+ JumpData::MultiBufferPoint {
+ excerpt_id: for_excerpt.id,
+ anchor: for_excerpt.jump_anchor,
+ position: jump_position,
+ line_offset_from_top,
+ }
+}
+
+fn file_status_label_color(file_status: Option<FileStatus>) -> Color {
+ file_status.map_or(Color::Default, |status| {
+ if status.is_conflicted() {
+ Color::Conflict
+ } else if status.is_modified() {
+ Color::Modified
+ } else if status.is_deleted() {
+ Color::Disabled
+ } else if status.is_created() {
+ Color::Created
+ } else {
+ Color::Default
+ }
+ })
+}
@@ -176,7 +176,15 @@ pub fn block_content_for_tests(
}
pub fn editor_content_with_blocks(editor: &Entity<Editor>, cx: &mut VisualTestContext) -> String {
- let draw_size = size(px(3000.0), px(3000.0));
+ editor_content_with_blocks_and_width(editor, px(3000.), cx)
+}
+
+pub fn editor_content_with_blocks_and_width(
+ editor: &Entity<Editor>,
+ width: Pixels,
+ cx: &mut VisualTestContext,
+) -> String {
+ let draw_size = size(width, px(3000.0));
cx.simulate_resize(draw_size);
cx.draw(gpui::Point::default(), draw_size, |_, _| editor.clone());
let (snapshot, mut lines, blocks) = editor.update_in(cx, |editor, window, cx| {
@@ -224,7 +232,14 @@ pub fn editor_content_with_blocks(editor: &Entity<Editor>, cx: &mut VisualTestCo
height,
} => {
lines[row.0 as usize].push_str(&cx.update(|_, cx| {
- format!("ยง {}", first_excerpt.buffer.file().unwrap().file_name(cx))
+ format!(
+ "ยง {}",
+ first_excerpt
+ .buffer
+ .file()
+ .map(|file| file.file_name(cx))
+ .unwrap_or("<no file>")
+ )
}));
for row in row.0 + 1..row.0 + height {
lines[row as usize].push_str("ยง -----");
@@ -236,15 +251,28 @@ pub fn editor_content_with_blocks(editor: &Entity<Editor>, cx: &mut VisualTestCo
}
}
Block::BufferHeader { excerpt, height } => {
- lines[row.0 as usize].push_str(
- &cx.update(|_, cx| {
- format!("ยง {}", excerpt.buffer.file().unwrap().file_name(cx))
- }),
- );
+ lines[row.0 as usize].push_str(&cx.update(|_, cx| {
+ format!(
+ "ยง {}",
+ excerpt
+ .buffer
+ .file()
+ .map(|file| file.file_name(cx))
+ .unwrap_or("<no file>")
+ )
+ }));
for row in row.0 + 1..row.0 + height {
lines[row as usize].push_str("ยง -----");
}
}
+ Block::Spacer { height, .. } => {
+ for row in row.0..row.0 + height {
+ while lines.len() <= row as usize {
+ lines.push(String::new());
+ }
+ lines[row as usize].push_str("ยง spacer");
+ }
+ }
}
}
lines.join("\n")
@@ -884,7 +884,7 @@ async fn build_buffer_diff(
diff.update_diff(
buffer.text.clone(),
old_text.map(|old_text| Arc::from(old_text.as_str())),
- true,
+ Some(true),
language.clone(),
cx,
)
@@ -178,7 +178,7 @@ async fn build_buffer_diff(
diff.update_diff(
new_buffer_snapshot.text.clone(),
Some(old_buffer_snapshot.text().into()),
- true,
+ Some(true),
new_buffer_snapshot.language().cloned(),
cx,
)
@@ -8,7 +8,7 @@ use anyhow::{Context as _, Result, anyhow};
use buffer_diff::{BufferDiff, DiffHunkSecondaryStatus};
use collections::{HashMap, HashSet};
use editor::{
- Addon, Editor, EditorEvent, SelectionEffects, SplittableEditor,
+ Addon, Editor, EditorEvent, SelectionEffects, SplittableEditor, ToggleSplitDiff,
actions::{GoToHunk, GoToPreviousHunk, SendReviewToAgent},
multibuffer_context_lines,
scroll::Autoscroll,
@@ -477,6 +477,7 @@ impl ProjectDiff {
}
fn button_states(&self, cx: &App) -> ButtonStates {
+ let is_split = self.editor.read(cx).is_split();
let editor = self.editor.read(cx).primary_editor().read(cx);
let snapshot = self.multibuffer.read(cx).snapshot(cx);
let prev_next = snapshot.diff_hunks().nth(1).is_some();
@@ -537,6 +538,7 @@ impl ProjectDiff {
selection,
stage_all,
unstage_all,
+ is_split,
}
}
@@ -1293,6 +1295,7 @@ struct ButtonStates {
selection: bool,
stage_all: bool,
unstage_all: bool,
+ is_split: bool,
}
impl Render for ProjectDiffToolbar {
@@ -1432,6 +1435,24 @@ impl Render for ProjectDiffToolbar {
)
},
)
+ .child(
+ Button::new(
+ "toggle-split",
+ if button_states.is_split {
+ "Stacked View"
+ } else {
+ "Split View"
+ },
+ )
+ .tooltip(Tooltip::for_action_title_in(
+ "Toggle Split View",
+ &ToggleSplitDiff,
+ &focus_handle,
+ ))
+ .on_click(cx.listener(|this, _, window, cx| {
+ this.dispatch_action(&ToggleSplitDiff, window, cx);
+ })),
+ )
.child(
Button::new("commit", "Commit")
.tooltip(Tooltip::for_action_title_in(
@@ -268,7 +268,7 @@ async fn update_diff_buffer(
diff.update_diff(
source_buffer_snapshot.text.clone(),
Some(Arc::from(base_text.as_str())),
- true,
+ Some(true),
language.clone(),
cx,
)
@@ -734,14 +734,14 @@ impl Default for Background {
}
/// Creates a hash pattern background
-pub fn pattern_slash(color: Hsla, width: f32, interval: f32) -> Background {
+pub fn pattern_slash(color: impl Into<Hsla>, width: f32, interval: f32) -> Background {
let width_scaled = (width * 255.0) as u32;
let interval_scaled = (interval * 255.0) as u32;
let height = ((width_scaled * 0xFFFF) + interval_scaled) as f32;
Background {
tag: BackgroundTag::PatternSlash,
- solid: color,
+ solid: color.into(),
gradient_angle_or_pattern_height: height,
..Default::default()
}
@@ -586,7 +586,7 @@ pub struct Chunk<'a> {
}
/// A set of edits to a given version of a buffer, computed asynchronously.
-#[derive(Debug)]
+#[derive(Debug, Clone)]
pub struct Diff {
pub base_version: clock::Global,
pub line_ending: LineEnding,
@@ -2148,11 +2148,15 @@ impl Buffer {
/// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
/// and the given new text.
- pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
+ pub fn diff<T>(&self, new_text: T, cx: &App) -> Task<Diff>
+ where
+ T: AsRef<str> + Send + 'static,
+ {
let old_text = self.as_rope().clone();
let base_version = self.version();
cx.background_spawn(async move {
let old_text = old_text.to_string();
+ let mut new_text = new_text.as_ref().to_owned();
let line_ending = LineEnding::detect(&new_text);
LineEnding::normalize(&mut new_text);
let edits = text_diff(&old_text, &new_text);
@@ -259,7 +259,7 @@ impl SyntaxTreeView {
let multi_buffer = editor.buffer().read(cx);
let (buffer, range, excerpt_id) = snapshot
.buffer_snapshot()
- .range_to_buffer_ranges(selection_range)
+ .range_to_buffer_ranges(selection_range.start..=selection_range.end)
.pop()?;
let buffer = multi_buffer.buffer(buffer.remote_id()).unwrap();
Some((buffer, range, excerpt_id))
@@ -10,8 +10,8 @@ pub use anchor::{Anchor, AnchorRangeExt};
use anyhow::{Result, anyhow};
use buffer_diff::{
- BufferDiff, BufferDiffEvent, BufferDiffSnapshot, DiffHunk, DiffHunkSecondaryStatus,
- DiffHunkStatus, DiffHunkStatusKind,
+ BufferDiff, BufferDiffEvent, BufferDiffSnapshot, DiffChanged, DiffHunk,
+ DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
};
use clock::ReplicaId;
use collections::{BTreeMap, Bound, HashMap, HashSet};
@@ -537,12 +537,19 @@ impl DiffState {
fn new(diff: Entity<BufferDiff>, cx: &mut Context<MultiBuffer>) -> Self {
DiffState {
_subscription: cx.subscribe(&diff, |this, diff, event, cx| match event {
- BufferDiffEvent::DiffChanged {
+ BufferDiffEvent::DiffChanged(DiffChanged {
changed_range,
base_text_changed_range: _,
- } => {
- if let Some(changed_range) = changed_range.clone() {
- this.buffer_diff_changed(diff, changed_range, cx)
+ extended_range,
+ }) => {
+ let use_extended = this.snapshot.borrow().use_extended_diff_range;
+ let range = if use_extended {
+ extended_range.clone()
+ } else {
+ changed_range.clone()
+ };
+ if let Some(range) = range {
+ this.buffer_diff_changed(diff, range, cx)
}
cx.emit(Event::BufferDiffChanged);
}
@@ -564,10 +571,11 @@ impl DiffState {
_subscription: cx.subscribe(&diff, {
let main_buffer = main_buffer.clone();
move |this, diff, event, cx| match event {
- BufferDiffEvent::DiffChanged {
+ BufferDiffEvent::DiffChanged(DiffChanged {
changed_range: _,
base_text_changed_range,
- } => {
+ extended_range: _,
+ }) => {
if let Some(base_text_changed_range) = base_text_changed_range.clone() {
this.inverted_buffer_diff_changed(
diff,
@@ -609,6 +617,7 @@ pub struct MultiBufferSnapshot {
trailing_excerpt_update_count: usize,
all_diff_hunks_expanded: bool,
show_deleted_hunks: bool,
+ use_extended_diff_range: bool,
show_headers: bool,
}
@@ -1903,6 +1912,7 @@ impl MultiBuffer {
trailing_excerpt_update_count,
all_diff_hunks_expanded: _,
show_deleted_hunks: _,
+ use_extended_diff_range: _,
show_headers: _,
} = self.snapshot.get_mut();
let start = ExcerptDimension(MultiBufferOffset::ZERO);
@@ -2367,7 +2377,6 @@ impl MultiBuffer {
.get(&buffer_id)
.is_none_or(|old_diff| !new_diff.base_texts_definitely_eq(old_diff));
snapshot.diffs.insert_or_replace(buffer_id, new_diff);
- self.buffer_changed_since_sync.replace(true);
let buffer = buffer_state.buffer.read(cx);
let diff_change_range = range.to_offset(buffer);
@@ -2648,6 +2657,10 @@ impl MultiBuffer {
self.expand_or_collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], true, cx);
}
+ pub fn set_use_extended_diff_range(&mut self, use_extended: bool, _cx: &mut Context<Self>) {
+ self.snapshot.get_mut().use_extended_diff_range = use_extended;
+ }
+
pub fn has_multiple_hunks(&self, cx: &App) -> bool {
self.read(cx)
.diff_hunks_in_range(Anchor::min()..Anchor::max())
@@ -2988,6 +3001,7 @@ impl MultiBuffer {
trailing_excerpt_update_count: _,
all_diff_hunks_expanded: _,
show_deleted_hunks: _,
+ use_extended_diff_range: _,
show_headers: _,
} = snapshot;
*is_dirty = false;
@@ -3117,6 +3131,9 @@ impl MultiBuffer {
&& let Some((old_diff, old_main_buffer, new_main_buffer)) =
inverted_diff_touch_info.get(locator)
{
+ // TODO(split-diff) this iterates over all excerpts for all edited buffers;
+ // it would be nice to be able to skip excerpts that weren't edited using
+ // new_main_buffer.has_edits_since_in_range.
let excerpt_buffer_start = old_excerpt
.range
.context
@@ -3124,13 +3141,27 @@ impl MultiBuffer {
.to_offset(&old_excerpt.buffer);
let excerpt_buffer_end = excerpt_buffer_start + old_excerpt.text_summary.len;
- for hunk in old_diff.hunks_intersecting_base_text_range(
- excerpt_buffer_start..excerpt_buffer_end,
- old_main_buffer,
- ) {
- if hunk.buffer_range.start.is_valid(new_main_buffer) {
- continue;
- }
+ let mut hunks = old_diff
+ .hunks_intersecting_base_text_range(
+ excerpt_buffer_start..excerpt_buffer_end,
+ old_main_buffer,
+ )
+ .chain(old_diff.hunks_intersecting_base_text_range(
+ excerpt_buffer_start..excerpt_buffer_end,
+ new_main_buffer,
+ ))
+ .filter(|hunk| {
+ hunk.buffer_range.start.is_valid(&old_main_buffer)
+ != hunk.buffer_range.start.is_valid(&new_main_buffer)
+ })
+ .collect::<Vec<_>>();
+ hunks.sort_by(|l, r| {
+ l.diff_base_byte_range
+ .start
+ .cmp(&r.diff_base_byte_range.start)
+ });
+
+ for hunk in hunks {
let hunk_buffer_start = hunk.diff_base_byte_range.start;
if hunk_buffer_start >= excerpt_buffer_start
&& hunk_buffer_start <= excerpt_buffer_end
@@ -3406,6 +3437,7 @@ impl MultiBuffer {
edit_buffer_start..edit_buffer_end,
main_buffer,
) {
+ did_expand_hunks = true;
let hunk_buffer_range = hunk.diff_base_byte_range.clone();
if hunk_buffer_range.start < excerpt_buffer_start {
log::trace!("skipping hunk that starts before excerpt");
@@ -4081,27 +4113,52 @@ impl MultiBufferSnapshot {
&self,
ranges: impl Iterator<Item = Range<T>>,
) -> impl Iterator<Item = (&BufferSnapshot, Range<BufferOffset>, ExcerptId)> {
- ranges.flat_map(|range| self.range_to_buffer_ranges(range).into_iter())
+ ranges.flat_map(|range| {
+ self.range_to_buffer_ranges((Bound::Included(range.start), Bound::Included(range.end)))
+ .into_iter()
+ })
}
- pub fn range_to_buffer_ranges<T: ToOffset>(
+ pub fn range_to_buffer_ranges<R, T>(
&self,
- range: Range<T>,
- ) -> Vec<(&BufferSnapshot, Range<BufferOffset>, ExcerptId)> {
- let start = range.start.to_offset(self);
- let end = range.end.to_offset(self);
+ range: R,
+ ) -> Vec<(&BufferSnapshot, Range<BufferOffset>, ExcerptId)>
+ where
+ R: RangeBounds<T>,
+ T: ToOffset,
+ {
+ let start = match range.start_bound() {
+ Bound::Included(start) => start.to_offset(self),
+ Bound::Excluded(_) => panic!("excluded start bound not supported"),
+ Bound::Unbounded => MultiBufferOffset::ZERO,
+ };
+ let end_bound = match range.end_bound() {
+ Bound::Included(end) => Bound::Included(end.to_offset(self)),
+ Bound::Excluded(end) => Bound::Excluded(end.to_offset(self)),
+ Bound::Unbounded => Bound::Unbounded,
+ };
+ let bounds = (Bound::Included(start), end_bound);
let mut cursor = self.cursor::<MultiBufferOffset, BufferOffset>();
cursor.seek(&start);
let mut result: Vec<(&BufferSnapshot, Range<BufferOffset>, ExcerptId)> = Vec::new();
while let Some(region) = cursor.region() {
- if region.range.start > end {
+ let dominated_by_end_bound = match end_bound {
+ Bound::Included(end) => region.range.start > end,
+ Bound::Excluded(end) => region.range.start >= end,
+ Bound::Unbounded => false,
+ };
+ if dominated_by_end_bound {
break;
}
if region.is_main_buffer {
let start_overshoot = start.saturating_sub(region.range.start);
- let end_overshoot = end.saturating_sub(region.range.start);
+ let end_offset = match end_bound {
+ Bound::Included(end) | Bound::Excluded(end) => end,
+ Bound::Unbounded => region.range.end,
+ };
+ let end_overshoot = end_offset.saturating_sub(region.range.start);
let start = region
.buffer_range
.end
@@ -4120,6 +4177,20 @@ impl MultiBufferSnapshot {
}
cursor.next();
}
+
+ if let Some(excerpt) = cursor.excerpt() {
+ let dominated_by_prev_excerpt =
+ result.last().is_some_and(|(_, _, id)| *id == excerpt.id);
+ if !dominated_by_prev_excerpt && excerpt.text_summary.len == 0 {
+ let excerpt_position = self.len();
+ if bounds.contains(&excerpt_position) {
+ let buffer_offset =
+ BufferOffset(excerpt.range.context.start.to_offset(&excerpt.buffer));
+ result.push((&excerpt.buffer, buffer_offset..buffer_offset, excerpt.id));
+ }
+ }
+ }
+
result
}
@@ -6740,6 +6811,15 @@ impl MultiBufferSnapshot {
self.diffs.get(&buffer_id).map(|diff| &diff.diff)
}
+ /// For inverted diffs (used in side-by-side diff view), returns the main buffer
+ /// snapshot that the diff's anchors refer to. Returns `None` if the diff is not
+ /// inverted or if there's no diff for the given buffer ID.
+ pub fn inverted_diff_main_buffer(&self, buffer_id: BufferId) -> Option<&text::BufferSnapshot> {
+ self.diffs
+ .get(&buffer_id)
+ .and_then(|diff| diff.main_buffer.as_ref())
+ }
+
/// Visually annotates a position or range with the `Debug` representation of a value. The
/// callsite of this function is used as a key - previous annotations will be removed.
#[cfg(debug_assertions)]
@@ -6767,11 +6847,11 @@ impl MultiBufferSnapshot {
.to_multi_buffer_debug_ranges(self)
.into_iter()
.flat_map(|range| {
- self.range_to_buffer_ranges(range).into_iter().map(
- |(buffer, range, _excerpt_id)| {
+ self.range_to_buffer_ranges(range.start..=range.end)
+ .into_iter()
+ .map(|(buffer, range, _excerpt_id)| {
buffer.anchor_after(range.start)..buffer.anchor_before(range.end)
- },
- )
+ })
})
.collect();
text::debug::GlobalDebugRanges::with_locked(|debug_ranges| {
@@ -4012,7 +4012,7 @@ async fn test_singleton_with_inverted_diff(cx: &mut TestAppContext) {
diff.update_diff(
buffer.read(cx).text_snapshot(),
Some(base_text.into()),
- false,
+ None,
None,
cx,
)
@@ -4050,7 +4050,7 @@ async fn test_singleton_with_inverted_diff(cx: &mut TestAppContext) {
diff.update_diff(
buffer.read(cx).text_snapshot(),
Some(base_text.into()),
- false,
+ None,
None,
cx,
)
@@ -4890,3 +4890,122 @@ fn test_excerpts_containment_functions(cx: &mut App) {
"excerpt_containing should return None for ranges spanning multiple excerpts"
);
}
+
+#[gpui::test]
+fn test_range_to_buffer_ranges_with_range_bounds(cx: &mut App) {
+ use std::ops::Bound;
+
+ let buffer_1 = cx.new(|cx| Buffer::local("aaa\nbbb", cx));
+ let buffer_2 = cx.new(|cx| Buffer::local("ccc", cx));
+
+ let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
+ let (excerpt_1_id, excerpt_2_id) = multibuffer.update(cx, |multibuffer, cx| {
+ let excerpt_1_id = multibuffer.push_excerpts(
+ buffer_1.clone(),
+ [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 3))],
+ cx,
+ )[0];
+
+ let excerpt_2_id = multibuffer.push_excerpts(
+ buffer_2.clone(),
+ [ExcerptRange::new(Point::new(0, 0)..Point::new(0, 3))],
+ cx,
+ )[0];
+
+ (excerpt_1_id, excerpt_2_id)
+ });
+
+ let snapshot = multibuffer.read(cx).snapshot(cx);
+ assert_eq!(snapshot.text(), "aaa\nbbb\nccc");
+
+ let excerpt_2_start = Point::new(2, 0);
+
+ let ranges_half_open = snapshot.range_to_buffer_ranges(Point::zero()..excerpt_2_start);
+ assert_eq!(
+ ranges_half_open.len(),
+ 1,
+ "Half-open range ending at excerpt start should EXCLUDE that excerpt"
+ );
+ assert_eq!(ranges_half_open[0].2, excerpt_1_id);
+
+ let ranges_inclusive = snapshot.range_to_buffer_ranges(Point::zero()..=excerpt_2_start);
+ assert_eq!(
+ ranges_inclusive.len(),
+ 2,
+ "Inclusive range ending at excerpt start should INCLUDE that excerpt"
+ );
+ assert_eq!(ranges_inclusive[0].2, excerpt_1_id);
+ assert_eq!(ranges_inclusive[1].2, excerpt_2_id);
+
+ let ranges_unbounded =
+ snapshot.range_to_buffer_ranges((Bound::Included(Point::zero()), Bound::Unbounded));
+ assert_eq!(
+ ranges_unbounded.len(),
+ 2,
+ "Unbounded end should include all excerpts"
+ );
+ assert_eq!(ranges_unbounded[0].2, excerpt_1_id);
+ assert_eq!(ranges_unbounded[1].2, excerpt_2_id);
+
+ let ranges_excluded_end = snapshot.range_to_buffer_ranges((
+ Bound::Included(Point::zero()),
+ Bound::Excluded(excerpt_2_start),
+ ));
+ assert_eq!(
+ ranges_excluded_end.len(),
+ 1,
+ "Excluded end bound should exclude excerpt starting at that point"
+ );
+ assert_eq!(ranges_excluded_end[0].2, excerpt_1_id);
+
+ let buffer_empty = cx.new(|cx| Buffer::local("", cx));
+ let multibuffer_trailing_empty = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
+ let (te_excerpt_1_id, te_excerpt_2_id) =
+ multibuffer_trailing_empty.update(cx, |multibuffer, cx| {
+ let excerpt_1_id = multibuffer.push_excerpts(
+ buffer_1.clone(),
+ [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 3))],
+ cx,
+ )[0];
+
+ let excerpt_2_id = multibuffer.push_excerpts(
+ buffer_empty.clone(),
+ [ExcerptRange::new(Point::new(0, 0)..Point::new(0, 0))],
+ cx,
+ )[0];
+
+ (excerpt_1_id, excerpt_2_id)
+ });
+
+ let snapshot_trailing = multibuffer_trailing_empty.read(cx).snapshot(cx);
+ assert_eq!(snapshot_trailing.text(), "aaa\nbbb\n");
+
+ let max_point = snapshot_trailing.max_point();
+
+ let ranges_half_open_max = snapshot_trailing.range_to_buffer_ranges(Point::zero()..max_point);
+ assert_eq!(
+ ranges_half_open_max.len(),
+ 1,
+ "Half-open range to max_point should EXCLUDE trailing empty excerpt at max_point"
+ );
+ assert_eq!(ranges_half_open_max[0].2, te_excerpt_1_id);
+
+ let ranges_inclusive_max = snapshot_trailing.range_to_buffer_ranges(Point::zero()..=max_point);
+ assert_eq!(
+ ranges_inclusive_max.len(),
+ 2,
+ "Inclusive range to max_point should INCLUDE trailing empty excerpt"
+ );
+ assert_eq!(ranges_inclusive_max[0].2, te_excerpt_1_id);
+ assert_eq!(ranges_inclusive_max[1].2, te_excerpt_2_id);
+
+ let ranges_unbounded_trailing = snapshot_trailing
+ .range_to_buffer_ranges((Bound::Included(Point::zero()), Bound::Unbounded));
+ assert_eq!(
+ ranges_unbounded_trailing.len(),
+ 2,
+ "Unbounded end should include trailing empty excerpt"
+ );
+ assert_eq!(ranges_unbounded_trailing[0].2, te_excerpt_1_id);
+ assert_eq!(ranges_unbounded_trailing[1].2, te_excerpt_2_id);
+}
@@ -3115,7 +3115,7 @@ impl BufferGitState {
unstaged_diff.read(cx).update_diff(
buffer.clone(),
index,
- index_changed,
+ index_changed.then_some(false),
language.clone(),
cx,
)
@@ -3138,7 +3138,7 @@ impl BufferGitState {
uncommitted_diff.read(cx).update_diff(
buffer.clone(),
head,
- head_changed,
+ head_changed.then_some(true),
language.clone(),
cx,
)
@@ -9,7 +9,8 @@ use crate::{
};
use async_trait::async_trait;
use buffer_diff::{
- BufferDiffEvent, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind, assert_hunks,
+ BufferDiffEvent, DiffChanged, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
+ assert_hunks,
};
use fs::FakeFs;
use futures::{StreamExt, future};
@@ -8159,10 +8160,11 @@ async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
BufferDiffEvent::HunksStagedOrUnstaged(_)
));
let event = diff_events.next().await.unwrap();
- if let BufferDiffEvent::DiffChanged {
+ if let BufferDiffEvent::DiffChanged(DiffChanged {
changed_range: Some(changed_range),
base_text_changed_range: _,
- } = event
+ extended_range: _,
+ }) = event
{
let changed_range = changed_range.to_point(&snapshot);
assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
@@ -8202,10 +8204,11 @@ async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
// The diff emits a change event for the changed index text.
let event = diff_events.next().await.unwrap();
- if let BufferDiffEvent::DiffChanged {
+ if let BufferDiffEvent::DiffChanged(DiffChanged {
changed_range: Some(changed_range),
base_text_changed_range: _,
- } = event
+ extended_range: _,
+ }) = event
{
let changed_range = changed_range.to_point(&snapshot);
assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
@@ -8260,10 +8263,11 @@ async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
BufferDiffEvent::HunksStagedOrUnstaged(_)
));
let event = diff_events.next().await.unwrap();
- if let BufferDiffEvent::DiffChanged {
+ if let BufferDiffEvent::DiffChanged(DiffChanged {
changed_range: Some(changed_range),
base_text_changed_range: _,
- } = event
+ extended_range: _,
+ }) = event
{
let changed_range = changed_range.to_point(&snapshot);
assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
@@ -8302,10 +8306,11 @@ async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
});
let event = diff_events.next().await.unwrap();
- if let BufferDiffEvent::DiffChanged {
+ if let BufferDiffEvent::DiffChanged(DiffChanged {
changed_range: Some(changed_range),
base_text_changed_range: _,
- } = event
+ extended_range: _,
+ }) = event
{
let changed_range = changed_range.to_point(&snapshot);
assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
@@ -72,7 +72,7 @@ where
}
}
- fn reset(&mut self) {
+ pub fn reset(&mut self) {
self.did_seek = false;
self.at_end = self.tree.is_empty();
self.stack.truncate(0);