Detailed changes
@@ -2437,7 +2437,7 @@ async fn test_git_diff_base_change(
buffer_local_a.read_with(cx_a, |buffer, _| {
assert_eq!(buffer.diff_base(), Some(diff_base.as_ref()));
git::diff::assert_hunks(
- buffer.snapshot().git_diff_hunks_in_row_range(0..4, false),
+ buffer.snapshot().git_diff_hunks_in_row_range(0..4),
&buffer,
&diff_base,
&[(1..2, "", "two\n")],
@@ -2457,7 +2457,7 @@ async fn test_git_diff_base_change(
buffer_remote_a.read_with(cx_b, |buffer, _| {
assert_eq!(buffer.diff_base(), Some(diff_base.as_ref()));
git::diff::assert_hunks(
- buffer.snapshot().git_diff_hunks_in_row_range(0..4, false),
+ buffer.snapshot().git_diff_hunks_in_row_range(0..4),
&buffer,
&diff_base,
&[(1..2, "", "two\n")],
@@ -2481,7 +2481,7 @@ async fn test_git_diff_base_change(
assert_eq!(buffer.diff_base(), Some(new_diff_base.as_ref()));
git::diff::assert_hunks(
- buffer.snapshot().git_diff_hunks_in_row_range(0..4, false),
+ buffer.snapshot().git_diff_hunks_in_row_range(0..4),
&buffer,
&diff_base,
&[(2..3, "", "three\n")],
@@ -2492,7 +2492,7 @@ async fn test_git_diff_base_change(
buffer_remote_a.read_with(cx_b, |buffer, _| {
assert_eq!(buffer.diff_base(), Some(new_diff_base.as_ref()));
git::diff::assert_hunks(
- buffer.snapshot().git_diff_hunks_in_row_range(0..4, false),
+ buffer.snapshot().git_diff_hunks_in_row_range(0..4),
&buffer,
&diff_base,
&[(2..3, "", "three\n")],
@@ -2535,7 +2535,7 @@ async fn test_git_diff_base_change(
buffer_local_b.read_with(cx_a, |buffer, _| {
assert_eq!(buffer.diff_base(), Some(diff_base.as_ref()));
git::diff::assert_hunks(
- buffer.snapshot().git_diff_hunks_in_row_range(0..4, false),
+ buffer.snapshot().git_diff_hunks_in_row_range(0..4),
&buffer,
&diff_base,
&[(1..2, "", "two\n")],
@@ -2555,7 +2555,7 @@ async fn test_git_diff_base_change(
buffer_remote_b.read_with(cx_b, |buffer, _| {
assert_eq!(buffer.diff_base(), Some(diff_base.as_ref()));
git::diff::assert_hunks(
- buffer.snapshot().git_diff_hunks_in_row_range(0..4, false),
+ buffer.snapshot().git_diff_hunks_in_row_range(0..4),
&buffer,
&diff_base,
&[(1..2, "", "two\n")],
@@ -2583,12 +2583,12 @@ async fn test_git_diff_base_change(
"{:?}",
buffer
.snapshot()
- .git_diff_hunks_in_row_range(0..4, false)
+ .git_diff_hunks_in_row_range(0..4)
.collect::<Vec<_>>()
);
git::diff::assert_hunks(
- buffer.snapshot().git_diff_hunks_in_row_range(0..4, false),
+ buffer.snapshot().git_diff_hunks_in_row_range(0..4),
&buffer,
&diff_base,
&[(2..3, "", "three\n")],
@@ -2599,7 +2599,7 @@ async fn test_git_diff_base_change(
buffer_remote_b.read_with(cx_b, |buffer, _| {
assert_eq!(buffer.diff_base(), Some(new_diff_base.as_ref()));
git::diff::assert_hunks(
- buffer.snapshot().git_diff_hunks_in_row_range(0..4, false),
+ buffer.snapshot().git_diff_hunks_in_row_range(0..4),
&buffer,
&diff_base,
&[(2..3, "", "three\n")],
@@ -20,6 +20,7 @@ mod editor_tests;
#[cfg(any(test, feature = "test-support"))]
pub mod test;
+use ::git::diff::DiffHunk;
use aho_corasick::AhoCorasick;
use anyhow::{anyhow, Result};
use blink_manager::BlinkManager;
@@ -527,7 +528,7 @@ pub struct EditorSnapshot {
impl EditorSnapshot {
fn has_scrollbar_info(&self) -> bool {
self.buffer_snapshot
- .git_diff_hunks_in_range(0..self.max_point().row(), false)
+ .git_diff_hunks_in_range(0..self.max_point().row())
.next()
.is_some()
}
@@ -5569,68 +5570,91 @@ impl Editor {
}
fn go_to_hunk(&mut self, _: &GoToHunk, cx: &mut ViewContext<Self>) {
- self.go_to_hunk_impl(Direction::Next, cx)
- }
+ let snapshot = self
+ .display_map
+ .update(cx, |display_map, cx| display_map.snapshot(cx));
+ let selection = self.selections.newest::<Point>(cx);
- fn go_to_prev_hunk(&mut self, _: &GoToPrevHunk, cx: &mut ViewContext<Self>) {
- self.go_to_hunk_impl(Direction::Prev, cx)
+ if !self.seek_in_direction(
+ &snapshot,
+ selection.head(),
+ false,
+ snapshot
+ .buffer_snapshot
+ .git_diff_hunks_in_range((selection.head().row + 1)..u32::MAX),
+ cx,
+ ) {
+ let wrapped_point = Point::zero();
+ self.seek_in_direction(
+ &snapshot,
+ wrapped_point,
+ true,
+ snapshot
+ .buffer_snapshot
+ .git_diff_hunks_in_range((wrapped_point.row + 1)..u32::MAX),
+ cx,
+ );
+ }
}
- pub fn go_to_hunk_impl(&mut self, direction: Direction, cx: &mut ViewContext<Self>) {
+ fn go_to_prev_hunk(&mut self, _: &GoToPrevHunk, cx: &mut ViewContext<Self>) {
let snapshot = self
.display_map
.update(cx, |display_map, cx| display_map.snapshot(cx));
let selection = self.selections.newest::<Point>(cx);
- fn seek_in_direction(
- this: &mut Editor,
- snapshot: &DisplaySnapshot,
- initial_point: Point,
- is_wrapped: bool,
- direction: Direction,
- cx: &mut ViewContext<Editor>,
- ) -> bool {
- let hunks = if direction == Direction::Next {
- snapshot
- .buffer_snapshot
- .git_diff_hunks_in_range(initial_point.row..u32::MAX, false)
- } else {
+ if !self.seek_in_direction(
+ &snapshot,
+ selection.head(),
+ false,
+ snapshot
+ .buffer_snapshot
+ .git_diff_hunks_in_range_rev(0..selection.head().row),
+ cx,
+ ) {
+ let wrapped_point = snapshot.buffer_snapshot.max_point();
+ self.seek_in_direction(
+ &snapshot,
+ wrapped_point,
+ true,
snapshot
.buffer_snapshot
- .git_diff_hunks_in_range(0..initial_point.row, true)
- };
-
- let display_point = initial_point.to_display_point(snapshot);
- let mut hunks = hunks
- .map(|hunk| diff_hunk_to_display(hunk, &snapshot))
- .skip_while(|hunk| {
- if is_wrapped {
- false
- } else {
- hunk.contains_display_row(display_point.row())
- }
- })
- .dedup();
+ .git_diff_hunks_in_range_rev(0..wrapped_point.row),
+ cx,
+ );
+ }
+ }
- if let Some(hunk) = hunks.next() {
- this.change_selections(Some(Autoscroll::fit()), cx, |s| {
- let row = hunk.start_display_row();
- let point = DisplayPoint::new(row, 0);
- s.select_display_ranges([point..point]);
- });
+ fn seek_in_direction(
+ &mut self,
+ snapshot: &DisplaySnapshot,
+ initial_point: Point,
+ is_wrapped: bool,
+ hunks: impl Iterator<Item = DiffHunk<u32>>,
+ cx: &mut ViewContext<Editor>,
+ ) -> bool {
+ let display_point = initial_point.to_display_point(snapshot);
+ let mut hunks = hunks
+ .map(|hunk| diff_hunk_to_display(hunk, &snapshot))
+ .skip_while(|hunk| {
+ if is_wrapped {
+ false
+ } else {
+ hunk.contains_display_row(display_point.row())
+ }
+ })
+ .dedup();
- true
- } else {
- false
- }
- }
+ if let Some(hunk) = hunks.next() {
+ self.change_selections(Some(Autoscroll::fit()), cx, |s| {
+ let row = hunk.start_display_row();
+ let point = DisplayPoint::new(row, 0);
+ s.select_display_ranges([point..point]);
+ });
- if !seek_in_direction(self, &snapshot, selection.head(), false, direction, cx) {
- let wrapped_point = match direction {
- Direction::Next => Point::zero(),
- Direction::Prev => snapshot.buffer_snapshot.max_point(),
- };
- seek_in_direction(self, &snapshot, wrapped_point, true, direction, cx);
+ true
+ } else {
+ false
}
}
@@ -50,6 +50,7 @@ use std::{
ops::Range,
sync::Arc,
};
+use text::Point;
use workspace::{item::Item, GitGutterSetting, WorkspaceSettings};
enum FoldMarkers {}
@@ -651,7 +652,7 @@ impl EditorElement {
//TODO: This rendering is entirely a horrible hack
DiffHunkStatus::Removed => {
- let row = *display_row_range.start();
+ let row = display_row_range.start;
let offset = line_height / 2.;
let start_y = row as f32 * line_height - offset - scroll_top;
@@ -673,11 +674,11 @@ impl EditorElement {
}
};
- let start_row = *display_row_range.start();
- let end_row = *display_row_range.end();
+ let start_row = display_row_range.start;
+ let end_row = display_row_range.end;
let start_y = start_row as f32 * line_height - scroll_top;
- let end_y = end_row as f32 * line_height - scroll_top + line_height;
+ let end_y = end_row as f32 * line_height - scroll_top;
let width = diff_style.width_em * line_height;
let highlight_origin = bounds.origin() + vec2f(-width, start_y);
@@ -1056,13 +1057,17 @@ impl EditorElement {
.position_map
.snapshot
.buffer_snapshot
- .git_diff_hunks_in_range(0..(max_row.floor() as u32), false)
+ .git_diff_hunks_in_range(0..(max_row.floor() as u32))
{
- let start_y = y_for_row(hunk.buffer_range.start as f32);
+ let start_display = Point::new(hunk.buffer_range.start, 0)
+ .to_display_point(&layout.position_map.snapshot.display_snapshot);
+ let end_display = Point::new(hunk.buffer_range.end, 0)
+ .to_display_point(&layout.position_map.snapshot.display_snapshot);
+ let start_y = y_for_row(start_display.row() as f32);
let mut end_y = if hunk.buffer_range.start == hunk.buffer_range.end {
- y_for_row((hunk.buffer_range.end + 1) as f32)
+ y_for_row((end_display.row() + 1) as f32)
} else {
- y_for_row((hunk.buffer_range.end) as f32)
+ y_for_row((end_display.row()) as f32)
};
if end_y - start_y < 1. {
@@ -1269,7 +1274,7 @@ impl EditorElement {
.row;
buffer_snapshot
- .git_diff_hunks_in_range(buffer_start_row..buffer_end_row, false)
+ .git_diff_hunks_in_range(buffer_start_row..buffer_end_row)
.map(|hunk| diff_hunk_to_display(hunk, snapshot))
.dedup()
.collect()
@@ -1,4 +1,4 @@
-use std::ops::RangeInclusive;
+use std::ops::Range;
use git::diff::{DiffHunk, DiffHunkStatus};
use language::Point;
@@ -15,7 +15,7 @@ pub enum DisplayDiffHunk {
},
Unfolded {
- display_row_range: RangeInclusive<u32>,
+ display_row_range: Range<u32>,
status: DiffHunkStatus,
},
}
@@ -26,7 +26,7 @@ impl DisplayDiffHunk {
&DisplayDiffHunk::Folded { display_row } => display_row,
DisplayDiffHunk::Unfolded {
display_row_range, ..
- } => *display_row_range.start(),
+ } => display_row_range.start,
}
}
@@ -36,7 +36,7 @@ impl DisplayDiffHunk {
DisplayDiffHunk::Unfolded {
display_row_range, ..
- } => display_row_range.clone(),
+ } => display_row_range.start..=display_row_range.end - 1,
};
range.contains(&display_row)
@@ -77,16 +77,12 @@ pub fn diff_hunk_to_display(hunk: DiffHunk<u32>, snapshot: &DisplaySnapshot) ->
} else {
let start = hunk_start_point.to_display_point(snapshot).row();
- let hunk_end_row_inclusive = hunk
- .buffer_range
- .end
- .saturating_sub(1)
- .max(hunk.buffer_range.start);
+ let hunk_end_row_inclusive = hunk.buffer_range.end.max(hunk.buffer_range.start);
let hunk_end_point = Point::new(hunk_end_row_inclusive, 0);
let end = hunk_end_point.to_display_point(snapshot).row();
DisplayDiffHunk::Unfolded {
- display_row_range: start..=end,
+ display_row_range: start..end,
status: hunk.status(),
}
}
@@ -2841,20 +2841,15 @@ impl MultiBufferSnapshot {
})
}
- pub fn git_diff_hunks_in_range<'a>(
+ pub fn git_diff_hunks_in_range_rev<'a>(
&'a self,
row_range: Range<u32>,
- reversed: bool,
) -> impl 'a + Iterator<Item = DiffHunk<u32>> {
let mut cursor = self.excerpts.cursor::<Point>();
- if reversed {
- cursor.seek(&Point::new(row_range.end, 0), Bias::Left, &());
- if cursor.item().is_none() {
- cursor.prev(&());
- }
- } else {
- cursor.seek(&Point::new(row_range.start, 0), Bias::Right, &());
+ cursor.seek(&Point::new(row_range.end, 0), Bias::Left, &());
+ if cursor.item().is_none() {
+ cursor.prev(&());
}
std::iter::from_fn(move || {
@@ -2884,7 +2879,7 @@ impl MultiBufferSnapshot {
let buffer_hunks = excerpt
.buffer
- .git_diff_hunks_intersecting_range(buffer_start..buffer_end, reversed)
+ .git_diff_hunks_intersecting_range_rev(buffer_start..buffer_end)
.filter_map(move |hunk| {
let start = multibuffer_start.row
+ hunk
@@ -2904,12 +2899,70 @@ impl MultiBufferSnapshot {
})
});
- if reversed {
- cursor.prev(&());
- } else {
- cursor.next(&());
+ cursor.prev(&());
+
+ Some(buffer_hunks)
+ })
+ .flatten()
+ }
+
+ pub fn git_diff_hunks_in_range<'a>(
+ &'a self,
+ row_range: Range<u32>,
+ ) -> impl 'a + Iterator<Item = DiffHunk<u32>> {
+ let mut cursor = self.excerpts.cursor::<Point>();
+
+ cursor.seek(&Point::new(row_range.start, 0), Bias::Right, &());
+
+ std::iter::from_fn(move || {
+ let excerpt = cursor.item()?;
+ let multibuffer_start = *cursor.start();
+ let multibuffer_end = multibuffer_start + excerpt.text_summary.lines;
+ if multibuffer_start.row >= row_range.end {
+ return None;
}
+ let mut buffer_start = excerpt.range.context.start;
+ let mut buffer_end = excerpt.range.context.end;
+ let excerpt_start_point = buffer_start.to_point(&excerpt.buffer);
+ let excerpt_end_point = excerpt_start_point + excerpt.text_summary.lines;
+
+ if row_range.start > multibuffer_start.row {
+ let buffer_start_point =
+ excerpt_start_point + Point::new(row_range.start - multibuffer_start.row, 0);
+ buffer_start = excerpt.buffer.anchor_before(buffer_start_point);
+ }
+
+ if row_range.end < multibuffer_end.row {
+ let buffer_end_point =
+ excerpt_start_point + Point::new(row_range.end - multibuffer_start.row, 0);
+ buffer_end = excerpt.buffer.anchor_before(buffer_end_point);
+ }
+
+ let buffer_hunks = excerpt
+ .buffer
+ .git_diff_hunks_intersecting_range(buffer_start..buffer_end)
+ .filter_map(move |hunk| {
+ let start = multibuffer_start.row
+ + hunk
+ .buffer_range
+ .start
+ .saturating_sub(excerpt_start_point.row);
+ let end = multibuffer_start.row
+ + hunk
+ .buffer_range
+ .end
+ .min(excerpt_end_point.row + 1)
+ .saturating_sub(excerpt_start_point.row);
+
+ Some(DiffHunk {
+ buffer_range: start..end,
+ diff_base_byte_range: hunk.diff_base_byte_range.clone(),
+ })
+ });
+
+ cursor.next(&());
+
Some(buffer_hunks)
})
.flatten()
@@ -4647,7 +4700,7 @@ mod tests {
assert_eq!(
snapshot
- .git_diff_hunks_in_range(0..12, false)
+ .git_diff_hunks_in_range(0..12)
.map(|hunk| (hunk.status(), hunk.buffer_range))
.collect::<Vec<_>>(),
&expected,
@@ -4655,7 +4708,7 @@ mod tests {
assert_eq!(
snapshot
- .git_diff_hunks_in_range(0..12, true)
+ .git_diff_hunks_in_range_rev(0..12)
.map(|hunk| (hunk.status(), hunk.buffer_range))
.collect::<Vec<_>>(),
expected
@@ -204,6 +204,7 @@ impl<'a> EditorTestContext<'a> {
self.assert_selections(expected_selections, marked_text.to_string())
}
+ #[track_caller]
pub fn assert_editor_background_highlights<Tag: 'static>(&mut self, marked_text: &str) {
let expected_ranges = self.ranges(marked_text);
let actual_ranges: Vec<Range<usize>> = self.update_editor(|editor, cx| {
@@ -220,6 +221,7 @@ impl<'a> EditorTestContext<'a> {
assert_set_eq!(actual_ranges, expected_ranges);
}
+ #[track_caller]
pub fn assert_editor_text_highlights<Tag: ?Sized + 'static>(&mut self, marked_text: &str) {
let expected_ranges = self.ranges(marked_text);
let snapshot = self.update_editor(|editor, cx| editor.snapshot(cx));
@@ -233,12 +235,14 @@ impl<'a> EditorTestContext<'a> {
assert_set_eq!(actual_ranges, expected_ranges);
}
+ #[track_caller]
pub fn assert_editor_selections(&mut self, expected_selections: Vec<Range<usize>>) {
let expected_marked_text =
generate_marked_text(&self.buffer_text(), &expected_selections, true);
self.assert_selections(expected_selections, expected_marked_text)
}
+ #[track_caller]
fn assert_selections(
&mut self,
expected_selections: Vec<Range<usize>>,
@@ -1,4 +1,4 @@
-use std::ops::Range;
+use std::{iter, ops::Range};
use sum_tree::SumTree;
use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point};
@@ -75,18 +75,17 @@ impl BufferDiff {
&'a self,
range: Range<u32>,
buffer: &'a BufferSnapshot,
- reversed: bool,
) -> impl 'a + Iterator<Item = DiffHunk<u32>> {
let start = buffer.anchor_before(Point::new(range.start, 0));
let end = buffer.anchor_after(Point::new(range.end, 0));
- self.hunks_intersecting_range(start..end, buffer, reversed)
+
+ self.hunks_intersecting_range(start..end, buffer)
}
pub fn hunks_intersecting_range<'a>(
&'a self,
range: Range<Anchor>,
buffer: &'a BufferSnapshot,
- reversed: bool,
) -> impl 'a + Iterator<Item = DiffHunk<u32>> {
let mut cursor = self.tree.filter::<_, DiffHunkSummary>(move |summary| {
let before_start = summary.buffer_range.end.cmp(&range.start, buffer).is_lt();
@@ -94,15 +93,51 @@ impl BufferDiff {
!before_start && !after_end
});
- std::iter::from_fn(move || {
- if reversed {
- cursor.prev(buffer);
+ let anchor_iter = std::iter::from_fn(move || {
+ cursor.next(buffer);
+ cursor.item()
+ })
+ .flat_map(move |hunk| {
+ [
+ (&hunk.buffer_range.start, hunk.diff_base_byte_range.start),
+ (&hunk.buffer_range.end, hunk.diff_base_byte_range.end),
+ ]
+ .into_iter()
+ });
+
+ let mut summaries = buffer.summaries_for_anchors_with_payload::<Point, _, _>(anchor_iter);
+ iter::from_fn(move || {
+ let (start_point, start_base) = summaries.next()?;
+ let (end_point, end_base) = summaries.next()?;
+
+ let end_row = if end_point.column > 0 {
+ end_point.row + 1
} else {
- cursor.next(buffer);
- }
+ end_point.row
+ };
- let hunk = cursor.item()?;
+ Some(DiffHunk {
+ buffer_range: start_point.row..end_row,
+ diff_base_byte_range: start_base..end_base,
+ })
+ })
+ }
+ pub fn hunks_intersecting_range_rev<'a>(
+ &'a self,
+ range: Range<Anchor>,
+ buffer: &'a BufferSnapshot,
+ ) -> impl 'a + Iterator<Item = DiffHunk<u32>> {
+ let mut cursor = self.tree.filter::<_, DiffHunkSummary>(move |summary| {
+ let before_start = summary.buffer_range.end.cmp(&range.start, buffer).is_lt();
+ let after_end = summary.buffer_range.start.cmp(&range.end, buffer).is_gt();
+ !before_start && !after_end
+ });
+
+ std::iter::from_fn(move || {
+ cursor.prev(buffer);
+
+ let hunk = cursor.item()?;
let range = hunk.buffer_range.to_point(buffer);
let end_row = if range.end.column > 0 {
range.end.row + 1
@@ -151,7 +186,7 @@ impl BufferDiff {
fn hunks<'a>(&'a self, text: &'a BufferSnapshot) -> impl 'a + Iterator<Item = DiffHunk<u32>> {
let start = text.anchor_before(Point::new(0, 0));
let end = text.anchor_after(Point::new(u32::MAX, u32::MAX));
- self.hunks_intersecting_range(start..end, text, false)
+ self.hunks_intersecting_range(start..end, text)
}
fn diff<'a>(head: &'a str, current: &'a str) -> Option<GitPatch<'a>> {
@@ -279,6 +314,8 @@ pub fn assert_hunks<Iter>(
#[cfg(test)]
mod tests {
+ use std::assert_eq;
+
use super::*;
use text::Buffer;
use unindent::Unindent as _;
@@ -365,7 +402,7 @@ mod tests {
assert_eq!(diff.hunks(&buffer).count(), 8);
assert_hunks(
- diff.hunks_in_row_range(7..12, &buffer, false),
+ diff.hunks_in_row_range(7..12, &buffer),
&buffer,
&diff_base,
&[
@@ -2509,18 +2509,22 @@ impl BufferSnapshot {
pub fn git_diff_hunks_in_row_range<'a>(
&'a self,
range: Range<u32>,
- reversed: bool,
) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
- self.git_diff.hunks_in_row_range(range, self, reversed)
+ self.git_diff.hunks_in_row_range(range, self)
}
pub fn git_diff_hunks_intersecting_range<'a>(
&'a self,
range: Range<Anchor>,
- reversed: bool,
) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
- self.git_diff
- .hunks_intersecting_range(range, self, reversed)
+ self.git_diff.hunks_intersecting_range(range, self)
+ }
+
+ pub fn git_diff_hunks_intersecting_range_rev<'a>(
+ &'a self,
+ range: Range<Anchor>,
+ ) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
+ self.git_diff.hunks_intersecting_range_rev(range, self)
}
pub fn diagnostics_in_range<'a, T, O>(
@@ -2870,10 +2870,8 @@ impl Project {
if let Some(LanguageServerState::Running { watched_paths, .. }) =
self.language_servers.get_mut(&language_server_id)
{
- eprintln!("change watch");
let mut builders = HashMap::default();
for watcher in params.watchers {
- eprintln!(" {}", watcher.glob_pattern);
for worktree in &self.worktrees {
if let Some(worktree) = worktree.upgrade(cx) {
let worktree = worktree.read(cx);
@@ -150,13 +150,6 @@ impl RepositoryEntry {
.map(|entry| RepositoryWorkDirectory(entry.path.clone()))
}
- pub fn status_for_file(&self, snapshot: &Snapshot, path: &Path) -> Option<GitFileStatus> {
- self.work_directory
- .relativize(snapshot, path)
- .and_then(|repo_path| self.statuses.get(&repo_path))
- .cloned()
- }
-
pub fn status_for_path(&self, snapshot: &Snapshot, path: &Path) -> Option<GitFileStatus> {
self.work_directory
.relativize(snapshot, path)
@@ -182,6 +175,14 @@ impl RepositoryEntry {
})
}
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn status_for_file(&self, snapshot: &Snapshot, path: &Path) -> Option<GitFileStatus> {
+ self.work_directory
+ .relativize(snapshot, path)
+ .and_then(|repo_path| (&self.statuses).get(&repo_path))
+ .cloned()
+ }
+
pub fn build_update(&self, other: &Self) -> proto::RepositoryEntry {
let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
let mut removed_statuses: Vec<String> = Vec::new();
@@ -1638,6 +1639,13 @@ impl Snapshot {
.map(|(path, entry)| (&path.0, entry))
}
+ /// Get the repository whose work directory contains the given path.
+ pub fn repository_for_work_directory(&self, path: &Path) -> Option<RepositoryEntry> {
+ self.repository_entries
+ .get(&RepositoryWorkDirectory(path.into()))
+ .cloned()
+ }
+
/// Get the repository whose work directory contains the given path.
pub fn repository_for_path(&self, path: &Path) -> Option<RepositoryEntry> {
let mut max_len = 0;
@@ -1653,7 +1661,7 @@ impl Snapshot {
}
}
- current_candidate.map(|entry| entry.to_owned())
+ current_candidate.cloned()
}
/// Given an ordered iterator of entries, returns an iterator of those entries,
@@ -3105,6 +3113,17 @@ impl BackgroundScanner {
.any(|component| component.as_os_str() == *DOT_GIT)
{
let scan_id = snapshot.scan_id;
+
+ if let Some(repository) = snapshot.repository_for_work_directory(path) {
+ let entry = repository.work_directory.0;
+ snapshot.git_repositories.remove(&entry);
+ snapshot
+ .snapshot
+ .repository_entries
+ .remove(&RepositoryWorkDirectory(path.into()));
+ return Some(());
+ }
+
let repo = snapshot.repository_for_path(&path)?;
let repo_path = repo.work_directory.relativize(&snapshot, &path)?;
@@ -3975,6 +3994,8 @@ mod tests {
#[gpui::test]
async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
+ // .gitignores are handled explicitly by Zed and do not use the git
+ // machinery that the git_tests module checks
let parent_dir = temp_tree(json!({
".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
"tree": {
@@ -4052,402 +4073,6 @@ mod tests {
});
}
- #[gpui::test]
- async fn test_git_repository_for_path(cx: &mut TestAppContext) {
- let root = temp_tree(json!({
- "c.txt": "",
- "dir1": {
- ".git": {},
- "deps": {
- "dep1": {
- ".git": {},
- "src": {
- "a.txt": ""
- }
- }
- },
- "src": {
- "b.txt": ""
- }
- },
- }));
-
- let http_client = FakeHttpClient::with_404_response();
- let client = cx.read(|cx| Client::new(http_client, cx));
- let tree = Worktree::local(
- client,
- root.path(),
- true,
- Arc::new(RealFs),
- Default::default(),
- &mut cx.to_async(),
- )
- .await
- .unwrap();
-
- cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
- .await;
- tree.flush_fs_events(cx).await;
-
- tree.read_with(cx, |tree, _cx| {
- let tree = tree.as_local().unwrap();
-
- assert!(tree.repository_for_path("c.txt".as_ref()).is_none());
-
- let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap();
- assert_eq!(
- entry
- .work_directory(tree)
- .map(|directory| directory.as_ref().to_owned()),
- Some(Path::new("dir1").to_owned())
- );
-
- let entry = tree
- .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref())
- .unwrap();
- assert_eq!(
- entry
- .work_directory(tree)
- .map(|directory| directory.as_ref().to_owned()),
- Some(Path::new("dir1/deps/dep1").to_owned())
- );
-
- let entries = tree.files(false, 0);
-
- let paths_with_repos = tree
- .entries_with_repositories(entries)
- .map(|(entry, repo)| {
- (
- entry.path.as_ref(),
- repo.and_then(|repo| {
- repo.work_directory(&tree)
- .map(|work_directory| work_directory.0.to_path_buf())
- }),
- )
- })
- .collect::<Vec<_>>();
-
- assert_eq!(
- paths_with_repos,
- &[
- (Path::new("c.txt"), None),
- (
- Path::new("dir1/deps/dep1/src/a.txt"),
- Some(Path::new("dir1/deps/dep1").into())
- ),
- (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())),
- ]
- );
- });
-
- let repo_update_events = Arc::new(Mutex::new(vec![]));
- tree.update(cx, |_, cx| {
- let repo_update_events = repo_update_events.clone();
- cx.subscribe(&tree, move |_, _, event, _| {
- if let Event::UpdatedGitRepositories(update) = event {
- repo_update_events.lock().push(update.clone());
- }
- })
- .detach();
- });
-
- std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
- tree.flush_fs_events(cx).await;
-
- assert_eq!(
- repo_update_events.lock()[0]
- .keys()
- .cloned()
- .collect::<Vec<Arc<Path>>>(),
- vec![Path::new("dir1").into()]
- );
-
- std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
- tree.flush_fs_events(cx).await;
-
- tree.read_with(cx, |tree, _cx| {
- let tree = tree.as_local().unwrap();
-
- assert!(tree
- .repository_for_path("dir1/src/b.txt".as_ref())
- .is_none());
- });
- }
-
- #[gpui::test]
- async fn test_git_status(cx: &mut TestAppContext) {
- #[track_caller]
- fn git_init(path: &Path) -> git2::Repository {
- git2::Repository::init(path).expect("Failed to initialize git repository")
- }
-
- #[track_caller]
- fn git_add(path: &Path, repo: &git2::Repository) {
- let mut index = repo.index().expect("Failed to get index");
- index.add_path(path).expect("Failed to add a.txt");
- index.write().expect("Failed to write index");
- }
-
- #[track_caller]
- fn git_remove_index(path: &Path, repo: &git2::Repository) {
- let mut index = repo.index().expect("Failed to get index");
- index.remove_path(path).expect("Failed to add a.txt");
- index.write().expect("Failed to write index");
- }
-
- #[track_caller]
- fn git_commit(msg: &'static str, repo: &git2::Repository) {
- use git2::Signature;
-
- let signature = Signature::now("test", "test@zed.dev").unwrap();
- let oid = repo.index().unwrap().write_tree().unwrap();
- let tree = repo.find_tree(oid).unwrap();
- if let Some(head) = repo.head().ok() {
- let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
-
- let parent_commit = parent_obj.as_commit().unwrap();
-
- repo.commit(
- Some("HEAD"),
- &signature,
- &signature,
- msg,
- &tree,
- &[parent_commit],
- )
- .expect("Failed to commit with parent");
- } else {
- repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
- .expect("Failed to commit");
- }
- }
-
- #[track_caller]
- fn git_stash(repo: &mut git2::Repository) {
- use git2::Signature;
-
- let signature = Signature::now("test", "test@zed.dev").unwrap();
- repo.stash_save(&signature, "N/A", None)
- .expect("Failed to stash");
- }
-
- #[track_caller]
- fn git_reset(offset: usize, repo: &git2::Repository) {
- let head = repo.head().expect("Couldn't get repo head");
- let object = head.peel(git2::ObjectType::Commit).unwrap();
- let commit = object.as_commit().unwrap();
- let new_head = commit
- .parents()
- .inspect(|parnet| {
- parnet.message();
- })
- .skip(offset)
- .next()
- .expect("Not enough history");
- repo.reset(&new_head.as_object(), git2::ResetType::Soft, None)
- .expect("Could not reset");
- }
-
- #[allow(dead_code)]
- #[track_caller]
- fn git_status(repo: &git2::Repository) -> HashMap<String, git2::Status> {
- repo.statuses(None)
- .unwrap()
- .iter()
- .map(|status| (status.path().unwrap().to_string(), status.status()))
- .collect()
- }
-
- const IGNORE_RULE: &'static str = "**/target";
-
- let root = temp_tree(json!({
- "project": {
- "a.txt": "a",
- "b.txt": "bb",
- "c": {
- "d": {
- "e.txt": "eee"
- }
- },
- "f.txt": "ffff",
- "target": {
- "build_file": "???"
- },
- ".gitignore": IGNORE_RULE
- },
-
- }));
-
- let http_client = FakeHttpClient::with_404_response();
- let client = cx.read(|cx| Client::new(http_client, cx));
- let tree = Worktree::local(
- client,
- root.path(),
- true,
- Arc::new(RealFs),
- Default::default(),
- &mut cx.to_async(),
- )
- .await
- .unwrap();
-
- cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
- .await;
-
- const A_TXT: &'static str = "a.txt";
- const B_TXT: &'static str = "b.txt";
- const E_TXT: &'static str = "c/d/e.txt";
- const F_TXT: &'static str = "f.txt";
- const DOTGITIGNORE: &'static str = ".gitignore";
- const BUILD_FILE: &'static str = "target/build_file";
-
- let work_dir = root.path().join("project");
- let mut repo = git_init(work_dir.as_path());
- repo.add_ignore_rule(IGNORE_RULE).unwrap();
- git_add(Path::new(A_TXT), &repo);
- git_add(Path::new(E_TXT), &repo);
- git_add(Path::new(DOTGITIGNORE), &repo);
- git_commit("Initial commit", &repo);
-
- std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
-
- tree.flush_fs_events(cx).await;
-
- // Check that the right git state is observed on startup
- tree.read_with(cx, |tree, _cx| {
- let snapshot = tree.snapshot();
- assert_eq!(snapshot.repository_entries.iter().count(), 1);
- let (dir, repo) = snapshot.repository_entries.iter().next().unwrap();
- assert_eq!(dir.0.as_ref(), Path::new("project"));
-
- assert_eq!(repo.statuses.iter().count(), 3);
- assert_eq!(
- repo.statuses.get(&Path::new(A_TXT).into()),
- Some(&GitFileStatus::Modified)
- );
- assert_eq!(
- repo.statuses.get(&Path::new(B_TXT).into()),
- Some(&GitFileStatus::Added)
- );
- assert_eq!(
- repo.statuses.get(&Path::new(F_TXT).into()),
- Some(&GitFileStatus::Added)
- );
- });
-
- git_add(Path::new(A_TXT), &repo);
- git_add(Path::new(B_TXT), &repo);
- git_commit("Committing modified and added", &repo);
- tree.flush_fs_events(cx).await;
-
- // Check that repo only changes are tracked
- tree.read_with(cx, |tree, _cx| {
- let snapshot = tree.snapshot();
- let (_, repo) = snapshot.repository_entries.iter().next().unwrap();
-
- assert_eq!(repo.statuses.iter().count(), 1);
- assert_eq!(
- repo.statuses.get(&Path::new(F_TXT).into()),
- Some(&GitFileStatus::Added)
- );
- });
-
- git_reset(0, &repo);
- git_remove_index(Path::new(B_TXT), &repo);
- git_stash(&mut repo);
- std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
- std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
- tree.flush_fs_events(cx).await;
-
- // Check that more complex repo changes are tracked
- tree.read_with(cx, |tree, _cx| {
- let snapshot = tree.snapshot();
- let (_, repo) = snapshot.repository_entries.iter().next().unwrap();
-
- assert_eq!(repo.statuses.iter().count(), 3);
- assert_eq!(repo.statuses.get(&Path::new(A_TXT).into()), None);
- assert_eq!(
- repo.statuses.get(&Path::new(B_TXT).into()),
- Some(&GitFileStatus::Added)
- );
- assert_eq!(
- repo.statuses.get(&Path::new(E_TXT).into()),
- Some(&GitFileStatus::Modified)
- );
- assert_eq!(
- repo.statuses.get(&Path::new(F_TXT).into()),
- Some(&GitFileStatus::Added)
- );
- });
-
- std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
- std::fs::remove_dir_all(work_dir.join("c")).unwrap();
- std::fs::write(
- work_dir.join(DOTGITIGNORE),
- [IGNORE_RULE, "f.txt"].join("\n"),
- )
- .unwrap();
-
- git_add(Path::new(DOTGITIGNORE), &repo);
- git_commit("Committing modified git ignore", &repo);
-
- tree.flush_fs_events(cx).await;
-
- // Check that non-repo behavior is tracked
- tree.read_with(cx, |tree, _cx| {
- let snapshot = tree.snapshot();
- let (_, repo) = snapshot.repository_entries.iter().next().unwrap();
-
- assert_eq!(repo.statuses.iter().count(), 0);
- });
-
- let mut renamed_dir_name = "first_directory/second_directory";
- const RENAMED_FILE: &'static str = "rf.txt";
-
- std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
- std::fs::write(
- work_dir.join(renamed_dir_name).join(RENAMED_FILE),
- "new-contents",
- )
- .unwrap();
-
- tree.flush_fs_events(cx).await;
-
- tree.read_with(cx, |tree, _cx| {
- let snapshot = tree.snapshot();
- let (_, repo) = snapshot.repository_entries.iter().next().unwrap();
-
- assert_eq!(repo.statuses.iter().count(), 1);
- assert_eq!(
- repo.statuses
- .get(&Path::new(renamed_dir_name).join(RENAMED_FILE).into()),
- Some(&GitFileStatus::Added)
- );
- });
-
- renamed_dir_name = "new_first_directory/second_directory";
-
- std::fs::rename(
- work_dir.join("first_directory"),
- work_dir.join("new_first_directory"),
- )
- .unwrap();
-
- tree.flush_fs_events(cx).await;
-
- tree.read_with(cx, |tree, _cx| {
- let snapshot = tree.snapshot();
- let (_, repo) = snapshot.repository_entries.iter().next().unwrap();
-
- assert_eq!(repo.statuses.iter().count(), 1);
- assert_eq!(
- repo.statuses
- .get(&Path::new(renamed_dir_name).join(RENAMED_FILE).into()),
- Some(&GitFileStatus::Added)
- );
- });
- }
-
#[gpui::test]
async fn test_write_file(cx: &mut TestAppContext) {
let dir = temp_tree(json!({
@@ -5100,4 +4725,478 @@ mod tests {
paths
}
}
+
+ mod git_tests {
+ use super::*;
+ use pretty_assertions::assert_eq;
+
+ #[gpui::test]
+ async fn test_rename_work_directory(cx: &mut TestAppContext) {
+ let root = temp_tree(json!({
+ "projects": {
+ "project1": {
+ "a": "",
+ "b": "",
+ }
+ },
+
+ }));
+ let root_path = root.path();
+
+ let http_client = FakeHttpClient::with_404_response();
+ let client = cx.read(|cx| Client::new(http_client, cx));
+ let tree = Worktree::local(
+ client,
+ root_path,
+ true,
+ Arc::new(RealFs),
+ Default::default(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+
+ let repo = git_init(&root_path.join("projects/project1"));
+ git_add("a", &repo);
+ git_commit("init", &repo);
+ std::fs::write(root_path.join("projects/project1/a"), "aa").ok();
+
+ cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+ .await;
+
+ tree.flush_fs_events(cx).await;
+
+ cx.read(|cx| {
+ let tree = tree.read(cx);
+ let (work_dir, repo) = tree.repositories().next().unwrap();
+ assert_eq!(work_dir.as_ref(), Path::new("projects/project1"));
+ assert_eq!(
+ repo.status_for_file(tree, Path::new("projects/project1/a")),
+ Some(GitFileStatus::Modified)
+ );
+ assert_eq!(
+ repo.status_for_file(tree, Path::new("projects/project1/b")),
+ Some(GitFileStatus::Added)
+ );
+ });
+
+ std::fs::rename(
+ root_path.join("projects/project1"),
+ root_path.join("projects/project2"),
+ )
+ .ok();
+ tree.flush_fs_events(cx).await;
+
+ cx.read(|cx| {
+ let tree = tree.read(cx);
+ let (work_dir, repo) = tree.repositories().next().unwrap();
+ assert_eq!(work_dir.as_ref(), Path::new("projects/project2"));
+ assert_eq!(
+ repo.status_for_file(tree, Path::new("projects/project2/a")),
+ Some(GitFileStatus::Modified)
+ );
+ assert_eq!(
+ repo.status_for_file(tree, Path::new("projects/project2/b")),
+ Some(GitFileStatus::Added)
+ );
+ });
+ }
+
+ #[gpui::test]
+ async fn test_git_repository_for_path(cx: &mut TestAppContext) {
+ let root = temp_tree(json!({
+ "c.txt": "",
+ "dir1": {
+ ".git": {},
+ "deps": {
+ "dep1": {
+ ".git": {},
+ "src": {
+ "a.txt": ""
+ }
+ }
+ },
+ "src": {
+ "b.txt": ""
+ }
+ },
+ }));
+
+ let http_client = FakeHttpClient::with_404_response();
+ let client = cx.read(|cx| Client::new(http_client, cx));
+ let tree = Worktree::local(
+ client,
+ root.path(),
+ true,
+ Arc::new(RealFs),
+ Default::default(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+
+ cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+ .await;
+ tree.flush_fs_events(cx).await;
+
+ tree.read_with(cx, |tree, _cx| {
+ let tree = tree.as_local().unwrap();
+
+ assert!(tree.repository_for_path("c.txt".as_ref()).is_none());
+
+ let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap();
+ assert_eq!(
+ entry
+ .work_directory(tree)
+ .map(|directory| directory.as_ref().to_owned()),
+ Some(Path::new("dir1").to_owned())
+ );
+
+ let entry = tree
+ .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref())
+ .unwrap();
+ assert_eq!(
+ entry
+ .work_directory(tree)
+ .map(|directory| directory.as_ref().to_owned()),
+ Some(Path::new("dir1/deps/dep1").to_owned())
+ );
+
+ let entries = tree.files(false, 0);
+
+ let paths_with_repos = tree
+ .entries_with_repositories(entries)
+ .map(|(entry, repo)| {
+ (
+ entry.path.as_ref(),
+ repo.and_then(|repo| {
+ repo.work_directory(&tree)
+ .map(|work_directory| work_directory.0.to_path_buf())
+ }),
+ )
+ })
+ .collect::<Vec<_>>();
+
+ assert_eq!(
+ paths_with_repos,
+ &[
+ (Path::new("c.txt"), None),
+ (
+ Path::new("dir1/deps/dep1/src/a.txt"),
+ Some(Path::new("dir1/deps/dep1").into())
+ ),
+ (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())),
+ ]
+ );
+ });
+
+ let repo_update_events = Arc::new(Mutex::new(vec![]));
+ tree.update(cx, |_, cx| {
+ let repo_update_events = repo_update_events.clone();
+ cx.subscribe(&tree, move |_, _, event, _| {
+ if let Event::UpdatedGitRepositories(update) = event {
+ repo_update_events.lock().push(update.clone());
+ }
+ })
+ .detach();
+ });
+
+ std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
+ tree.flush_fs_events(cx).await;
+
+ assert_eq!(
+ repo_update_events.lock()[0]
+ .keys()
+ .cloned()
+ .collect::<Vec<Arc<Path>>>(),
+ vec![Path::new("dir1").into()]
+ );
+
+ std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
+ tree.flush_fs_events(cx).await;
+
+ tree.read_with(cx, |tree, _cx| {
+ let tree = tree.as_local().unwrap();
+
+ assert!(tree
+ .repository_for_path("dir1/src/b.txt".as_ref())
+ .is_none());
+ });
+ }
+
+ #[gpui::test]
+ async fn test_git_status(cx: &mut TestAppContext) {
+ const IGNORE_RULE: &'static str = "**/target";
+
+ let root = temp_tree(json!({
+ "project": {
+ "a.txt": "a",
+ "b.txt": "bb",
+ "c": {
+ "d": {
+ "e.txt": "eee"
+ }
+ },
+ "f.txt": "ffff",
+ "target": {
+ "build_file": "???"
+ },
+ ".gitignore": IGNORE_RULE
+ },
+
+ }));
+
+ let http_client = FakeHttpClient::with_404_response();
+ let client = cx.read(|cx| Client::new(http_client, cx));
+ let tree = Worktree::local(
+ client,
+ root.path(),
+ true,
+ Arc::new(RealFs),
+ Default::default(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+
+ cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+ .await;
+
+ const A_TXT: &'static str = "a.txt";
+ const B_TXT: &'static str = "b.txt";
+ const E_TXT: &'static str = "c/d/e.txt";
+ const F_TXT: &'static str = "f.txt";
+ const DOTGITIGNORE: &'static str = ".gitignore";
+ const BUILD_FILE: &'static str = "target/build_file";
+
+ let work_dir = root.path().join("project");
+ let mut repo = git_init(work_dir.as_path());
+ repo.add_ignore_rule(IGNORE_RULE).unwrap();
+ git_add(Path::new(A_TXT), &repo);
+ git_add(Path::new(E_TXT), &repo);
+ git_add(Path::new(DOTGITIGNORE), &repo);
+ git_commit("Initial commit", &repo);
+
+ std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
+
+ tree.flush_fs_events(cx).await;
+
+ // Check that the right git state is observed on startup
+ tree.read_with(cx, |tree, _cx| {
+ let snapshot = tree.snapshot();
+ assert_eq!(snapshot.repository_entries.iter().count(), 1);
+ let (dir, repo) = snapshot.repository_entries.iter().next().unwrap();
+ assert_eq!(dir.0.as_ref(), Path::new("project"));
+
+ assert_eq!(repo.statuses.iter().count(), 3);
+ assert_eq!(
+ repo.statuses.get(&Path::new(A_TXT).into()),
+ Some(&GitFileStatus::Modified)
+ );
+ assert_eq!(
+ repo.statuses.get(&Path::new(B_TXT).into()),
+ Some(&GitFileStatus::Added)
+ );
+ assert_eq!(
+ repo.statuses.get(&Path::new(F_TXT).into()),
+ Some(&GitFileStatus::Added)
+ );
+ });
+
+ git_add(Path::new(A_TXT), &repo);
+ git_add(Path::new(B_TXT), &repo);
+ git_commit("Committing modified and added", &repo);
+ tree.flush_fs_events(cx).await;
+
+ // Check that repo only changes are tracked
+ tree.read_with(cx, |tree, _cx| {
+ let snapshot = tree.snapshot();
+ let (_, repo) = snapshot.repository_entries.iter().next().unwrap();
+
+ assert_eq!(repo.statuses.iter().count(), 1);
+ assert_eq!(
+ repo.statuses.get(&Path::new(F_TXT).into()),
+ Some(&GitFileStatus::Added)
+ );
+ });
+
+ git_reset(0, &repo);
+ git_remove_index(Path::new(B_TXT), &repo);
+ git_stash(&mut repo);
+ std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
+ std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
+ tree.flush_fs_events(cx).await;
+
+ // Check that more complex repo changes are tracked
+ tree.read_with(cx, |tree, _cx| {
+ let snapshot = tree.snapshot();
+ let (_, repo) = snapshot.repository_entries.iter().next().unwrap();
+
+ assert_eq!(repo.statuses.iter().count(), 3);
+ assert_eq!(repo.statuses.get(&Path::new(A_TXT).into()), None);
+ assert_eq!(
+ repo.statuses.get(&Path::new(B_TXT).into()),
+ Some(&GitFileStatus::Added)
+ );
+ assert_eq!(
+ repo.statuses.get(&Path::new(E_TXT).into()),
+ Some(&GitFileStatus::Modified)
+ );
+ assert_eq!(
+ repo.statuses.get(&Path::new(F_TXT).into()),
+ Some(&GitFileStatus::Added)
+ );
+ });
+
+ std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
+ std::fs::remove_dir_all(work_dir.join("c")).unwrap();
+ std::fs::write(
+ work_dir.join(DOTGITIGNORE),
+ [IGNORE_RULE, "f.txt"].join("\n"),
+ )
+ .unwrap();
+
+ git_add(Path::new(DOTGITIGNORE), &repo);
+ git_commit("Committing modified git ignore", &repo);
+
+ tree.flush_fs_events(cx).await;
+
+ // Check that non-repo behavior is tracked
+ tree.read_with(cx, |tree, _cx| {
+ let snapshot = tree.snapshot();
+ let (_, repo) = snapshot.repository_entries.iter().next().unwrap();
+
+ assert_eq!(repo.statuses.iter().count(), 0);
+ });
+
+ let mut renamed_dir_name = "first_directory/second_directory";
+ const RENAMED_FILE: &'static str = "rf.txt";
+
+ std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
+ std::fs::write(
+ work_dir.join(renamed_dir_name).join(RENAMED_FILE),
+ "new-contents",
+ )
+ .unwrap();
+
+ tree.flush_fs_events(cx).await;
+
+ tree.read_with(cx, |tree, _cx| {
+ let snapshot = tree.snapshot();
+ let (_, repo) = snapshot.repository_entries.iter().next().unwrap();
+
+ assert_eq!(repo.statuses.iter().count(), 1);
+ assert_eq!(
+ repo.statuses
+ .get(&Path::new(renamed_dir_name).join(RENAMED_FILE).into()),
+ Some(&GitFileStatus::Added)
+ );
+ });
+
+ renamed_dir_name = "new_first_directory/second_directory";
+
+ std::fs::rename(
+ work_dir.join("first_directory"),
+ work_dir.join("new_first_directory"),
+ )
+ .unwrap();
+
+ tree.flush_fs_events(cx).await;
+
+ tree.read_with(cx, |tree, _cx| {
+ let snapshot = tree.snapshot();
+ let (_, repo) = snapshot.repository_entries.iter().next().unwrap();
+
+ assert_eq!(repo.statuses.iter().count(), 1);
+ assert_eq!(
+ repo.statuses
+ .get(&Path::new(renamed_dir_name).join(RENAMED_FILE).into()),
+ Some(&GitFileStatus::Added)
+ );
+ });
+ }
+
+ #[track_caller]
+ fn git_init(path: &Path) -> git2::Repository {
+ git2::Repository::init(path).expect("Failed to initialize git repository")
+ }
+
+ #[track_caller]
+ fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
+ let path = path.as_ref();
+ let mut index = repo.index().expect("Failed to get index");
+ index.add_path(path).expect("Failed to add a.txt");
+ index.write().expect("Failed to write index");
+ }
+
+ #[track_caller]
+ fn git_remove_index(path: &Path, repo: &git2::Repository) {
+ let mut index = repo.index().expect("Failed to get index");
+ index.remove_path(path).expect("Failed to add a.txt");
+ index.write().expect("Failed to write index");
+ }
+
+ #[track_caller]
+ fn git_commit(msg: &'static str, repo: &git2::Repository) {
+ use git2::Signature;
+
+ let signature = Signature::now("test", "test@zed.dev").unwrap();
+ let oid = repo.index().unwrap().write_tree().unwrap();
+ let tree = repo.find_tree(oid).unwrap();
+ if let Some(head) = repo.head().ok() {
+ let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
+
+ let parent_commit = parent_obj.as_commit().unwrap();
+
+ repo.commit(
+ Some("HEAD"),
+ &signature,
+ &signature,
+ msg,
+ &tree,
+ &[parent_commit],
+ )
+ .expect("Failed to commit with parent");
+ } else {
+ repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
+ .expect("Failed to commit");
+ }
+ }
+
+ #[track_caller]
+ fn git_stash(repo: &mut git2::Repository) {
+ use git2::Signature;
+
+ let signature = Signature::now("test", "test@zed.dev").unwrap();
+ repo.stash_save(&signature, "N/A", None)
+ .expect("Failed to stash");
+ }
+
+ #[track_caller]
+ fn git_reset(offset: usize, repo: &git2::Repository) {
+ let head = repo.head().expect("Couldn't get repo head");
+ let object = head.peel(git2::ObjectType::Commit).unwrap();
+ let commit = object.as_commit().unwrap();
+ let new_head = commit
+ .parents()
+ .inspect(|parnet| {
+ parnet.message();
+ })
+ .skip(offset)
+ .next()
+ .expect("Not enough history");
+ repo.reset(&new_head.as_object(), git2::ResetType::Soft, None)
+ .expect("Could not reset");
+ }
+
+ #[allow(dead_code)]
+ #[track_caller]
+ fn git_status(repo: &git2::Repository) -> HashMap<String, git2::Status> {
+ repo.statuses(None)
+ .unwrap()
+ .iter()
+ .map(|status| (status.path().unwrap().to_string(), status.status()))
+ .collect()
+ }
+ }
}
@@ -1783,6 +1783,19 @@ impl BufferSnapshot {
where
D: 'a + TextDimension,
A: 'a + IntoIterator<Item = &'a Anchor>,
+ {
+ let anchors = anchors.into_iter();
+ self.summaries_for_anchors_with_payload::<D, _, ()>(anchors.map(|a| (a, ())))
+ .map(|d| d.0)
+ }
+
+ pub fn summaries_for_anchors_with_payload<'a, D, A, T>(
+ &'a self,
+ anchors: A,
+ ) -> impl 'a + Iterator<Item = (D, T)>
+ where
+ D: 'a + TextDimension,
+ A: 'a + IntoIterator<Item = (&'a Anchor, T)>,
{
let anchors = anchors.into_iter();
let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>();
@@ -1790,11 +1803,11 @@ impl BufferSnapshot {
let mut text_cursor = self.visible_text.cursor(0);
let mut position = D::default();
- anchors.map(move |anchor| {
+ anchors.map(move |(anchor, payload)| {
if *anchor == Anchor::MIN {
- return D::default();
+ return (D::default(), payload);
} else if *anchor == Anchor::MAX {
- return D::from_text_summary(&self.visible_text.summary());
+ return (D::from_text_summary(&self.visible_text.summary()), payload);
}
let anchor_key = InsertionFragmentKey {
@@ -1825,7 +1838,7 @@ impl BufferSnapshot {
}
position.add_assign(&text_cursor.summary(fragment_offset));
- position.clone()
+ (position.clone(), payload)
})
}