From 00b7c78e33da3d920f8534ab8d540fde82822ea5 Mon Sep 17 00:00:00 2001 From: Julia Date: Fri, 9 Dec 2022 15:24:13 -0500 Subject: [PATCH 01/17] Initial hacky displaying of git gutter in multi-buffers --- crates/editor/src/multi_buffer.rs | 70 ++++++++++++++++++++++++++++--- crates/git/src/diff.rs | 10 ++--- crates/language/src/buffer.rs | 5 +-- crates/sum_tree/src/cursor.rs | 4 ++ 4 files changed, 76 insertions(+), 13 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index d758792e6c29f88e1b17a344c949d2c50f999454..736db68965e9053d733c8ee058df974578a4e2c3 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -2635,11 +2635,71 @@ impl MultiBufferSnapshot { row_range: Range, reversed: bool, ) -> impl 'a + Iterator> { - self.as_singleton() - .into_iter() - .flat_map(move |(_, _, buffer)| { - buffer.git_diff_hunks_in_range(row_range.clone(), reversed) - }) + // dbg!(&row_range); + let mut lines_advance = 0; + let mut cursor = self.excerpts.filter::<_, ExcerptSummary>(move |summary| { + let filter = summary.text.lines.row + lines_advance >= row_range.start + && lines_advance <= row_range.end; + lines_advance += summary.text.lines.row; + filter + }); + + let mut lines_advance = 0; + std::iter::from_fn(move || { + cursor.next(&()); + let excerpt = cursor.item()?; + let summary = cursor.item_summary()?; + + let range = excerpt.range.context.clone(); + let range_start_row = range.start.to_point(&excerpt.buffer).row; + let range_end_row = range.end.to_point(&excerpt.buffer).row; + // dbg!(range_start_row); + let a = Some(excerpt.buffer.git_diff_hunks_in_range(range, reversed).map( + move |mut hunk| { + hunk.buffer_range.start = hunk.buffer_range.start.max(range_start_row) + - range_start_row + + lines_advance; + hunk.buffer_range.end = hunk.buffer_range.end.max(range_start_row) + - range_start_row + + lines_advance; + hunk + }, + )); + lines_advance += summary.text.lines.row; + a + }) + .flatten() + // let mut cursor = self.excerpts.cursor::(); + // cursor.seek(&Point::new(row_range.start, 0), Bias::Left, &()); + + // let mut is_first = true; + // let mut advance = 0; + // std::iter::from_fn(move || { + // if !is_first { + // cursor.next(&()); + // } + // is_first = false; + + // let (item, summary) = match (cursor.item(), cursor.item_summary()) { + // (Some(item), Some(summary)) => (item, summary), + // _ => return None, + // }; + + // // dbg!(&advance); + // // if advance > row_range.end { + // // println!("returning none"); + // // return None; + // // } + + // // let row_range = row_range.start - advance..row_range.end - advance; + // // println!("returning an iterator, {row_range:?}"); + // // // summary. + // // advance += summary.text.lines.row; + // Some(item.buffer.git_diff_hunks_in_range(row_range, reversed)) + + // item.range + // }) + // .flatten() } pub fn range_for_syntax_ancestor(&self, range: Range) -> Option> { diff --git a/crates/git/src/diff.rs b/crates/git/src/diff.rs index e808eee24f4f850a30c00c6813459a1ec1e243c8..3a818e65055d4dccfe2676ff7e4a6e26f49757bb 100644 --- a/crates/git/src/diff.rs +++ b/crates/git/src/diff.rs @@ -73,16 +73,16 @@ impl BufferDiff { pub fn hunks_in_range<'a>( &'a self, - query_row_range: Range, + range: Range, buffer: &'a BufferSnapshot, reversed: bool, ) -> impl 'a + Iterator> { - let start = buffer.anchor_before(Point::new(query_row_range.start, 0)); - let end = buffer.anchor_after(Point::new(query_row_range.end, 0)); + // let start = buffer.anchor_before(Point::new(query_row_range.start, 0)); + // let end = buffer.anchor_after(Point::new(query_row_range.end, 0)); let mut cursor = self.tree.filter::<_, DiffHunkSummary>(move |summary| { - let before_start = summary.buffer_range.end.cmp(&start, buffer).is_lt(); - let after_end = summary.buffer_range.start.cmp(&end, buffer).is_gt(); + let before_start = summary.buffer_range.end.cmp(&range.start, buffer).is_lt(); + let after_end = summary.buffer_range.start.cmp(&range.end, buffer).is_gt(); !before_start && !after_end }); diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index e8bc2bf314c45784de7653c1d63d803379166f2c..44fa49495b38a804d73211ed607433e010e01ae4 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -2312,11 +2312,10 @@ impl BufferSnapshot { pub fn git_diff_hunks_in_range<'a>( &'a self, - query_row_range: Range, + range: Range, reversed: bool, ) -> impl 'a + Iterator> { - self.git_diff - .hunks_in_range(query_row_range, self, reversed) + self.git_diff.hunks_in_range(range, self, reversed) } pub fn diagnostics_in_range<'a, T, O>( diff --git a/crates/sum_tree/src/cursor.rs b/crates/sum_tree/src/cursor.rs index 52200d64cf781bd9f07f9222891711bbed720a15..88412f60598e8e5eaafbb52515089b580e2613a2 100644 --- a/crates/sum_tree/src/cursor.rs +++ b/crates/sum_tree/src/cursor.rs @@ -597,6 +597,10 @@ where self.cursor.item() } + pub fn item_summary(&self) -> Option<&'a T::Summary> { + self.cursor.item_summary() + } + pub fn next(&mut self, cx: &::Context) { self.cursor.next_internal(&mut self.filter_node, cx); } From 7c3dc1e3dca12bd7c1729524f76754bf9d2e75cc Mon Sep 17 00:00:00 2001 From: Julia Date: Mon, 12 Dec 2022 11:57:14 -0500 Subject: [PATCH 02/17] Cleanup --- crates/editor/src/multi_buffer.rs | 34 ------------------------------- crates/git/src/diff.rs | 3 --- 2 files changed, 37 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 736db68965e9053d733c8ee058df974578a4e2c3..a386403a4132da5a35b85b3991fbe05dc13f3fa0 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -2635,7 +2635,6 @@ impl MultiBufferSnapshot { row_range: Range, reversed: bool, ) -> impl 'a + Iterator> { - // dbg!(&row_range); let mut lines_advance = 0; let mut cursor = self.excerpts.filter::<_, ExcerptSummary>(move |summary| { let filter = summary.text.lines.row + lines_advance >= row_range.start @@ -2652,8 +2651,6 @@ impl MultiBufferSnapshot { let range = excerpt.range.context.clone(); let range_start_row = range.start.to_point(&excerpt.buffer).row; - let range_end_row = range.end.to_point(&excerpt.buffer).row; - // dbg!(range_start_row); let a = Some(excerpt.buffer.git_diff_hunks_in_range(range, reversed).map( move |mut hunk| { hunk.buffer_range.start = hunk.buffer_range.start.max(range_start_row) @@ -2669,37 +2666,6 @@ impl MultiBufferSnapshot { a }) .flatten() - // let mut cursor = self.excerpts.cursor::(); - // cursor.seek(&Point::new(row_range.start, 0), Bias::Left, &()); - - // let mut is_first = true; - // let mut advance = 0; - // std::iter::from_fn(move || { - // if !is_first { - // cursor.next(&()); - // } - // is_first = false; - - // let (item, summary) = match (cursor.item(), cursor.item_summary()) { - // (Some(item), Some(summary)) => (item, summary), - // _ => return None, - // }; - - // // dbg!(&advance); - // // if advance > row_range.end { - // // println!("returning none"); - // // return None; - // // } - - // // let row_range = row_range.start - advance..row_range.end - advance; - // // println!("returning an iterator, {row_range:?}"); - // // // summary. - // // advance += summary.text.lines.row; - // Some(item.buffer.git_diff_hunks_in_range(row_range, reversed)) - - // item.range - // }) - // .flatten() } pub fn range_for_syntax_ancestor(&self, range: Range) -> Option> { diff --git a/crates/git/src/diff.rs b/crates/git/src/diff.rs index 3a818e65055d4dccfe2676ff7e4a6e26f49757bb..61396e9278115cc9b1aa3a0f680395d76f3f96e4 100644 --- a/crates/git/src/diff.rs +++ b/crates/git/src/diff.rs @@ -77,9 +77,6 @@ impl BufferDiff { buffer: &'a BufferSnapshot, reversed: bool, ) -> impl 'a + Iterator> { - // let start = buffer.anchor_before(Point::new(query_row_range.start, 0)); - // let end = buffer.anchor_after(Point::new(query_row_range.end, 0)); - let mut cursor = self.tree.filter::<_, DiffHunkSummary>(move |summary| { let before_start = summary.buffer_range.end.cmp(&range.start, buffer).is_lt(); let after_end = summary.buffer_range.start.cmp(&range.end, buffer).is_gt(); From 2cd9987b54daf35e05910a2ac35c348a32c34689 Mon Sep 17 00:00:00 2001 From: Julia Date: Mon, 12 Dec 2022 16:24:21 -0500 Subject: [PATCH 03/17] Git diff recalc in project search --- crates/search/src/project_search.rs | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index 13b754a4178eae117aea5b1090751c6b738c27ec..dda8a7ec9ce758864be0077dc3fd0abe8198da73 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -334,6 +334,15 @@ impl Item for ProjectSearchView { .update(cx, |editor, cx| editor.navigate(data, cx)) } + fn git_diff_recalc( + &mut self, + project: ModelHandle, + cx: &mut ViewContext, + ) -> Task> { + self.results_editor + .update(cx, |editor, cx| editor.git_diff_recalc(project, cx)) + } + fn to_item_events(event: &Self::Event) -> Vec { match event { ViewEvent::UpdateTab => vec![ItemEvent::UpdateBreadcrumbs, ItemEvent::UpdateTab], From ecd44e69144f01a0ac2c52f96d2b2e835b7b2973 Mon Sep 17 00:00:00 2001 From: Julia Date: Mon, 12 Dec 2022 16:54:14 -0500 Subject: [PATCH 04/17] Git diff recalc in project diagnostics --- crates/diagnostics/src/diagnostics.rs | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 9122706ad388b1f736c4b7eca6cf853bdb3c92f2..cb1ad13656c317170c15b5b05ef6b8ada0e3dbc4 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -575,6 +575,15 @@ impl Item for ProjectDiagnosticsEditor { unreachable!() } + fn git_diff_recalc( + &mut self, + project: ModelHandle, + cx: &mut ViewContext, + ) -> Task> { + self.editor + .update(cx, |editor, cx| editor.git_diff_recalc(project, cx)) + } + fn to_item_events(event: &Self::Event) -> Vec { Editor::to_item_events(event) } From cf721732824de9e716cd380faca829a8ae5ce07d Mon Sep 17 00:00:00 2001 From: Julia Date: Tue, 13 Dec 2022 13:58:50 -0500 Subject: [PATCH 05/17] Clamp end of visual git hunk to requested range --- crates/editor/src/multi_buffer.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index a386403a4132da5a35b85b3991fbe05dc13f3fa0..92801d1b47671670201e01b7d8eefdaac3b0fa5a 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -2651,12 +2651,13 @@ impl MultiBufferSnapshot { let range = excerpt.range.context.clone(); let range_start_row = range.start.to_point(&excerpt.buffer).row; + let range_end_row = range.end.to_point(&excerpt.buffer).row; let a = Some(excerpt.buffer.git_diff_hunks_in_range(range, reversed).map( move |mut hunk| { hunk.buffer_range.start = hunk.buffer_range.start.max(range_start_row) - range_start_row + lines_advance; - hunk.buffer_range.end = hunk.buffer_range.end.max(range_start_row) + hunk.buffer_range.end = hunk.buffer_range.end.min(range_end_row + 1) - range_start_row + lines_advance; hunk From 2bc685281c11c9cc01219608dffcc193e9cf9e45 Mon Sep 17 00:00:00 2001 From: Kay Simmons Date: Wed, 14 Dec 2022 15:59:50 -0800 Subject: [PATCH 06/17] Add recent project picker --- Cargo.lock | 19 + Cargo.toml | 1 + crates/file_finder/src/file_finder.rs | 7 +- crates/fuzzy/src/fuzzy.rs | 796 +----------------- crates/fuzzy/src/matcher.rs | 463 ++++++++++ crates/fuzzy/src/paths.rs | 174 ++++ crates/fuzzy/src/strings.rs | 161 ++++ crates/outline/src/outline.rs | 4 +- crates/recent_projects/Cargo.toml | 22 + .../src/highlighted_workspace_location.rs | 129 +++ crates/recent_projects/src/recent_projects.rs | 187 ++++ crates/workspace/src/workspace.rs | 2 +- crates/zed/Cargo.toml | 1 + crates/zed/src/main.rs | 1 + 14 files changed, 1170 insertions(+), 797 deletions(-) create mode 100644 crates/fuzzy/src/matcher.rs create mode 100644 crates/fuzzy/src/paths.rs create mode 100644 crates/fuzzy/src/strings.rs create mode 100644 crates/recent_projects/Cargo.toml create mode 100644 crates/recent_projects/src/highlighted_workspace_location.rs create mode 100644 crates/recent_projects/src/recent_projects.rs diff --git a/Cargo.lock b/Cargo.lock index 57f2bdbdc4ea106603b877d08ced8e8bd1204e07..d49637090767ba3f59d20a566ea088a1038b97b1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4805,6 +4805,24 @@ dependencies = [ "rand_core 0.3.1", ] +[[package]] +name = "recent_projects" +version = "0.1.0" +dependencies = [ + "db", + "editor", + "fuzzy", + "gpui", + "language", + "ordered-float", + "picker", + "postage", + "settings", + "smol", + "text", + "workspace", +] + [[package]] name = "redox_syscall" version = "0.2.16" @@ -8152,6 +8170,7 @@ dependencies = [ "project_panel", "project_symbols", "rand 0.8.5", + "recent_projects", "regex", "rpc", "rsa", diff --git a/Cargo.toml b/Cargo.toml index c4f54d6a90de8d65be9ed32c5979a40270707aac..1ace51dbd5c417290438bc2314920666001d4efd 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -40,6 +40,7 @@ members = [ "crates/project", "crates/project_panel", "crates/project_symbols", + "crates/recent_projects", "crates/rope", "crates/rpc", "crates/search", diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index 5122a46c2c161b474627bf7bfabeb72b4625f884..7561a68222c4d30133659cf4ae0901a1f0ea1ffa 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -62,11 +62,12 @@ impl View for FileFinder { impl FileFinder { fn labels_for_match(&self, path_match: &PathMatch) -> (String, Vec, String, Vec) { - let path_string = path_match.path.to_string_lossy(); + let path = &path_match.path; + let path_string = path.to_string_lossy(); let full_path = [path_match.path_prefix.as_ref(), path_string.as_ref()].join(""); let path_positions = path_match.positions.clone(); - let file_name = path_match.path.file_name().map_or_else( + let file_name = path.file_name().map_or_else( || path_match.path_prefix.to_string(), |file_name| file_name.to_string_lossy().to_string(), ); @@ -161,7 +162,7 @@ impl FileFinder { self.cancel_flag = Arc::new(AtomicBool::new(false)); let cancel_flag = self.cancel_flag.clone(); cx.spawn(|this, mut cx| async move { - let matches = fuzzy::match_paths( + let matches = fuzzy::match_path_sets( candidate_sets.as_slice(), &query, false, diff --git a/crates/fuzzy/src/fuzzy.rs b/crates/fuzzy/src/fuzzy.rs index 2f108b6274287e773fdf7cfe57a56d609f5245e8..49680236442b0ccddc964deb562673451a40fe37 100644 --- a/crates/fuzzy/src/fuzzy.rs +++ b/crates/fuzzy/src/fuzzy.rs @@ -1,794 +1,8 @@ mod char_bag; - -use gpui::executor; -use std::{ - borrow::Cow, - cmp::{self, Ordering}, - path::Path, - sync::atomic::{self, AtomicBool}, - sync::Arc, -}; +mod matcher; +mod paths; +mod strings; pub use char_bag::CharBag; - -const BASE_DISTANCE_PENALTY: f64 = 0.6; -const ADDITIONAL_DISTANCE_PENALTY: f64 = 0.05; -const MIN_DISTANCE_PENALTY: f64 = 0.2; - -pub struct Matcher<'a> { - query: &'a [char], - lowercase_query: &'a [char], - query_char_bag: CharBag, - smart_case: bool, - max_results: usize, - min_score: f64, - match_positions: Vec, - last_positions: Vec, - score_matrix: Vec>, - best_position_matrix: Vec, -} - -trait Match: Ord { - fn score(&self) -> f64; - fn set_positions(&mut self, positions: Vec); -} - -trait MatchCandidate { - fn has_chars(&self, bag: CharBag) -> bool; - fn to_string(&self) -> Cow<'_, str>; -} - -#[derive(Clone, Debug)] -pub struct PathMatchCandidate<'a> { - pub path: &'a Arc, - pub char_bag: CharBag, -} - -#[derive(Clone, Debug)] -pub struct PathMatch { - pub score: f64, - pub positions: Vec, - pub worktree_id: usize, - pub path: Arc, - pub path_prefix: Arc, -} - -#[derive(Clone, Debug)] -pub struct StringMatchCandidate { - pub id: usize, - pub string: String, - pub char_bag: CharBag, -} - -pub trait PathMatchCandidateSet<'a>: Send + Sync { - type Candidates: Iterator>; - fn id(&self) -> usize; - fn len(&self) -> usize; - fn is_empty(&self) -> bool { - self.len() == 0 - } - fn prefix(&self) -> Arc; - fn candidates(&'a self, start: usize) -> Self::Candidates; -} - -impl Match for PathMatch { - fn score(&self) -> f64 { - self.score - } - - fn set_positions(&mut self, positions: Vec) { - self.positions = positions; - } -} - -impl Match for StringMatch { - fn score(&self) -> f64 { - self.score - } - - fn set_positions(&mut self, positions: Vec) { - self.positions = positions; - } -} - -impl<'a> MatchCandidate for PathMatchCandidate<'a> { - fn has_chars(&self, bag: CharBag) -> bool { - self.char_bag.is_superset(bag) - } - - fn to_string(&self) -> Cow<'a, str> { - self.path.to_string_lossy() - } -} - -impl StringMatchCandidate { - pub fn new(id: usize, string: String) -> Self { - Self { - id, - char_bag: CharBag::from(string.as_str()), - string, - } - } -} - -impl<'a> MatchCandidate for &'a StringMatchCandidate { - fn has_chars(&self, bag: CharBag) -> bool { - self.char_bag.is_superset(bag) - } - - fn to_string(&self) -> Cow<'a, str> { - self.string.as_str().into() - } -} - -#[derive(Clone, Debug)] -pub struct StringMatch { - pub candidate_id: usize, - pub score: f64, - pub positions: Vec, - pub string: String, -} - -impl PartialEq for StringMatch { - fn eq(&self, other: &Self) -> bool { - self.cmp(other).is_eq() - } -} - -impl Eq for StringMatch {} - -impl PartialOrd for StringMatch { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl Ord for StringMatch { - fn cmp(&self, other: &Self) -> Ordering { - self.score - .partial_cmp(&other.score) - .unwrap_or(Ordering::Equal) - .then_with(|| self.candidate_id.cmp(&other.candidate_id)) - } -} - -impl PartialEq for PathMatch { - fn eq(&self, other: &Self) -> bool { - self.cmp(other).is_eq() - } -} - -impl Eq for PathMatch {} - -impl PartialOrd for PathMatch { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl Ord for PathMatch { - fn cmp(&self, other: &Self) -> Ordering { - self.score - .partial_cmp(&other.score) - .unwrap_or(Ordering::Equal) - .then_with(|| self.worktree_id.cmp(&other.worktree_id)) - .then_with(|| Arc::as_ptr(&self.path).cmp(&Arc::as_ptr(&other.path))) - } -} - -pub async fn match_strings( - candidates: &[StringMatchCandidate], - query: &str, - smart_case: bool, - max_results: usize, - cancel_flag: &AtomicBool, - background: Arc, -) -> Vec { - if candidates.is_empty() || max_results == 0 { - return Default::default(); - } - - if query.is_empty() { - return candidates - .iter() - .map(|candidate| StringMatch { - candidate_id: candidate.id, - score: 0., - positions: Default::default(), - string: candidate.string.clone(), - }) - .collect(); - } - - let lowercase_query = query.to_lowercase().chars().collect::>(); - let query = query.chars().collect::>(); - - let lowercase_query = &lowercase_query; - let query = &query; - let query_char_bag = CharBag::from(&lowercase_query[..]); - - let num_cpus = background.num_cpus().min(candidates.len()); - let segment_size = (candidates.len() + num_cpus - 1) / num_cpus; - let mut segment_results = (0..num_cpus) - .map(|_| Vec::with_capacity(max_results.min(candidates.len()))) - .collect::>(); - - background - .scoped(|scope| { - for (segment_idx, results) in segment_results.iter_mut().enumerate() { - let cancel_flag = &cancel_flag; - scope.spawn(async move { - let segment_start = cmp::min(segment_idx * segment_size, candidates.len()); - let segment_end = cmp::min(segment_start + segment_size, candidates.len()); - let mut matcher = Matcher::new( - query, - lowercase_query, - query_char_bag, - smart_case, - max_results, - ); - matcher.match_strings( - &candidates[segment_start..segment_end], - results, - cancel_flag, - ); - }); - } - }) - .await; - - let mut results = Vec::new(); - for segment_result in segment_results { - if results.is_empty() { - results = segment_result; - } else { - util::extend_sorted(&mut results, segment_result, max_results, |a, b| b.cmp(a)); - } - } - results -} - -pub async fn match_paths<'a, Set: PathMatchCandidateSet<'a>>( - candidate_sets: &'a [Set], - query: &str, - smart_case: bool, - max_results: usize, - cancel_flag: &AtomicBool, - background: Arc, -) -> Vec { - let path_count: usize = candidate_sets.iter().map(|s| s.len()).sum(); - if path_count == 0 { - return Vec::new(); - } - - let lowercase_query = query.to_lowercase().chars().collect::>(); - let query = query.chars().collect::>(); - - let lowercase_query = &lowercase_query; - let query = &query; - let query_char_bag = CharBag::from(&lowercase_query[..]); - - let num_cpus = background.num_cpus().min(path_count); - let segment_size = (path_count + num_cpus - 1) / num_cpus; - let mut segment_results = (0..num_cpus) - .map(|_| Vec::with_capacity(max_results)) - .collect::>(); - - background - .scoped(|scope| { - for (segment_idx, results) in segment_results.iter_mut().enumerate() { - scope.spawn(async move { - let segment_start = segment_idx * segment_size; - let segment_end = segment_start + segment_size; - let mut matcher = Matcher::new( - query, - lowercase_query, - query_char_bag, - smart_case, - max_results, - ); - - let mut tree_start = 0; - for candidate_set in candidate_sets { - let tree_end = tree_start + candidate_set.len(); - - if tree_start < segment_end && segment_start < tree_end { - let start = cmp::max(tree_start, segment_start) - tree_start; - let end = cmp::min(tree_end, segment_end) - tree_start; - let candidates = candidate_set.candidates(start).take(end - start); - - matcher.match_paths( - candidate_set.id(), - candidate_set.prefix(), - candidates, - results, - cancel_flag, - ); - } - if tree_end >= segment_end { - break; - } - tree_start = tree_end; - } - }) - } - }) - .await; - - let mut results = Vec::new(); - for segment_result in segment_results { - if results.is_empty() { - results = segment_result; - } else { - util::extend_sorted(&mut results, segment_result, max_results, |a, b| b.cmp(a)); - } - } - results -} - -impl<'a> Matcher<'a> { - pub fn new( - query: &'a [char], - lowercase_query: &'a [char], - query_char_bag: CharBag, - smart_case: bool, - max_results: usize, - ) -> Self { - Self { - query, - lowercase_query, - query_char_bag, - min_score: 0.0, - last_positions: vec![0; query.len()], - match_positions: vec![0; query.len()], - score_matrix: Vec::new(), - best_position_matrix: Vec::new(), - smart_case, - max_results, - } - } - - pub fn match_strings( - &mut self, - candidates: &[StringMatchCandidate], - results: &mut Vec, - cancel_flag: &AtomicBool, - ) { - self.match_internal( - &[], - &[], - candidates.iter(), - results, - cancel_flag, - |candidate, score| StringMatch { - candidate_id: candidate.id, - score, - positions: Vec::new(), - string: candidate.string.to_string(), - }, - ) - } - - pub fn match_paths<'c: 'a>( - &mut self, - tree_id: usize, - path_prefix: Arc, - path_entries: impl Iterator>, - results: &mut Vec, - cancel_flag: &AtomicBool, - ) { - let prefix = path_prefix.chars().collect::>(); - let lowercase_prefix = prefix - .iter() - .map(|c| c.to_ascii_lowercase()) - .collect::>(); - self.match_internal( - &prefix, - &lowercase_prefix, - path_entries, - results, - cancel_flag, - |candidate, score| PathMatch { - score, - worktree_id: tree_id, - positions: Vec::new(), - path: candidate.path.clone(), - path_prefix: path_prefix.clone(), - }, - ) - } - - fn match_internal( - &mut self, - prefix: &[char], - lowercase_prefix: &[char], - candidates: impl Iterator, - results: &mut Vec, - cancel_flag: &AtomicBool, - build_match: F, - ) where - R: Match, - F: Fn(&C, f64) -> R, - { - let mut candidate_chars = Vec::new(); - let mut lowercase_candidate_chars = Vec::new(); - - for candidate in candidates { - if !candidate.has_chars(self.query_char_bag) { - continue; - } - - if cancel_flag.load(atomic::Ordering::Relaxed) { - break; - } - - candidate_chars.clear(); - lowercase_candidate_chars.clear(); - for c in candidate.to_string().chars() { - candidate_chars.push(c); - lowercase_candidate_chars.push(c.to_ascii_lowercase()); - } - - if !self.find_last_positions(lowercase_prefix, &lowercase_candidate_chars) { - continue; - } - - let matrix_len = self.query.len() * (prefix.len() + candidate_chars.len()); - self.score_matrix.clear(); - self.score_matrix.resize(matrix_len, None); - self.best_position_matrix.clear(); - self.best_position_matrix.resize(matrix_len, 0); - - let score = self.score_match( - &candidate_chars, - &lowercase_candidate_chars, - prefix, - lowercase_prefix, - ); - - if score > 0.0 { - let mut mat = build_match(&candidate, score); - if let Err(i) = results.binary_search_by(|m| mat.cmp(m)) { - if results.len() < self.max_results { - mat.set_positions(self.match_positions.clone()); - results.insert(i, mat); - } else if i < results.len() { - results.pop(); - mat.set_positions(self.match_positions.clone()); - results.insert(i, mat); - } - if results.len() == self.max_results { - self.min_score = results.last().unwrap().score(); - } - } - } - } - } - - fn find_last_positions( - &mut self, - lowercase_prefix: &[char], - lowercase_candidate: &[char], - ) -> bool { - let mut lowercase_prefix = lowercase_prefix.iter(); - let mut lowercase_candidate = lowercase_candidate.iter(); - for (i, char) in self.lowercase_query.iter().enumerate().rev() { - if let Some(j) = lowercase_candidate.rposition(|c| c == char) { - self.last_positions[i] = j + lowercase_prefix.len(); - } else if let Some(j) = lowercase_prefix.rposition(|c| c == char) { - self.last_positions[i] = j; - } else { - return false; - } - } - true - } - - fn score_match( - &mut self, - path: &[char], - path_cased: &[char], - prefix: &[char], - lowercase_prefix: &[char], - ) -> f64 { - let score = self.recursive_score_match( - path, - path_cased, - prefix, - lowercase_prefix, - 0, - 0, - self.query.len() as f64, - ) * self.query.len() as f64; - - if score <= 0.0 { - return 0.0; - } - - let path_len = prefix.len() + path.len(); - let mut cur_start = 0; - let mut byte_ix = 0; - let mut char_ix = 0; - for i in 0..self.query.len() { - let match_char_ix = self.best_position_matrix[i * path_len + cur_start]; - while char_ix < match_char_ix { - let ch = prefix - .get(char_ix) - .or_else(|| path.get(char_ix - prefix.len())) - .unwrap(); - byte_ix += ch.len_utf8(); - char_ix += 1; - } - cur_start = match_char_ix + 1; - self.match_positions[i] = byte_ix; - } - - score - } - - #[allow(clippy::too_many_arguments)] - fn recursive_score_match( - &mut self, - path: &[char], - path_cased: &[char], - prefix: &[char], - lowercase_prefix: &[char], - query_idx: usize, - path_idx: usize, - cur_score: f64, - ) -> f64 { - if query_idx == self.query.len() { - return 1.0; - } - - let path_len = prefix.len() + path.len(); - - if let Some(memoized) = self.score_matrix[query_idx * path_len + path_idx] { - return memoized; - } - - let mut score = 0.0; - let mut best_position = 0; - - let query_char = self.lowercase_query[query_idx]; - let limit = self.last_positions[query_idx]; - - let mut last_slash = 0; - for j in path_idx..=limit { - let path_char = if j < prefix.len() { - lowercase_prefix[j] - } else { - path_cased[j - prefix.len()] - }; - let is_path_sep = path_char == '/' || path_char == '\\'; - - if query_idx == 0 && is_path_sep { - last_slash = j; - } - - if query_char == path_char || (is_path_sep && query_char == '_' || query_char == '\\') { - let curr = if j < prefix.len() { - prefix[j] - } else { - path[j - prefix.len()] - }; - - let mut char_score = 1.0; - if j > path_idx { - let last = if j - 1 < prefix.len() { - prefix[j - 1] - } else { - path[j - 1 - prefix.len()] - }; - - if last == '/' { - char_score = 0.9; - } else if (last == '-' || last == '_' || last == ' ' || last.is_numeric()) - || (last.is_lowercase() && curr.is_uppercase()) - { - char_score = 0.8; - } else if last == '.' { - char_score = 0.7; - } else if query_idx == 0 { - char_score = BASE_DISTANCE_PENALTY; - } else { - char_score = MIN_DISTANCE_PENALTY.max( - BASE_DISTANCE_PENALTY - - (j - path_idx - 1) as f64 * ADDITIONAL_DISTANCE_PENALTY, - ); - } - } - - // Apply a severe penalty if the case doesn't match. - // This will make the exact matches have higher score than the case-insensitive and the - // path insensitive matches. - if (self.smart_case || curr == '/') && self.query[query_idx] != curr { - char_score *= 0.001; - } - - let mut multiplier = char_score; - - // Scale the score based on how deep within the path we found the match. - if query_idx == 0 { - multiplier /= ((prefix.len() + path.len()) - last_slash) as f64; - } - - let mut next_score = 1.0; - if self.min_score > 0.0 { - next_score = cur_score * multiplier; - // Scores only decrease. If we can't pass the previous best, bail - if next_score < self.min_score { - // Ensure that score is non-zero so we use it in the memo table. - if score == 0.0 { - score = 1e-18; - } - continue; - } - } - - let new_score = self.recursive_score_match( - path, - path_cased, - prefix, - lowercase_prefix, - query_idx + 1, - j + 1, - next_score, - ) * multiplier; - - if new_score > score { - score = new_score; - best_position = j; - // Optimization: can't score better than 1. - if new_score == 1.0 { - break; - } - } - } - } - - if best_position != 0 { - self.best_position_matrix[query_idx * path_len + path_idx] = best_position; - } - - self.score_matrix[query_idx * path_len + path_idx] = Some(score); - score - } -} - -#[cfg(test)] -mod tests { - use super::*; - use std::path::PathBuf; - - #[test] - fn test_get_last_positions() { - let mut query: &[char] = &['d', 'c']; - let mut matcher = Matcher::new(query, query, query.into(), false, 10); - let result = matcher.find_last_positions(&['a', 'b', 'c'], &['b', 'd', 'e', 'f']); - assert!(!result); - - query = &['c', 'd']; - let mut matcher = Matcher::new(query, query, query.into(), false, 10); - let result = matcher.find_last_positions(&['a', 'b', 'c'], &['b', 'd', 'e', 'f']); - assert!(result); - assert_eq!(matcher.last_positions, vec![2, 4]); - - query = &['z', '/', 'z', 'f']; - let mut matcher = Matcher::new(query, query, query.into(), false, 10); - let result = matcher.find_last_positions(&['z', 'e', 'd', '/'], &['z', 'e', 'd', '/', 'f']); - assert!(result); - assert_eq!(matcher.last_positions, vec![0, 3, 4, 8]); - } - - #[test] - fn test_match_path_entries() { - let paths = vec![ - "", - "a", - "ab", - "abC", - "abcd", - "alphabravocharlie", - "AlphaBravoCharlie", - "thisisatestdir", - "/////ThisIsATestDir", - "/this/is/a/test/dir", - "/test/tiatd", - ]; - - assert_eq!( - match_query("abc", false, &paths), - vec![ - ("abC", vec![0, 1, 2]), - ("abcd", vec![0, 1, 2]), - ("AlphaBravoCharlie", vec![0, 5, 10]), - ("alphabravocharlie", vec![4, 5, 10]), - ] - ); - assert_eq!( - match_query("t/i/a/t/d", false, &paths), - vec![("/this/is/a/test/dir", vec![1, 5, 6, 8, 9, 10, 11, 15, 16]),] - ); - - assert_eq!( - match_query("tiatd", false, &paths), - vec![ - ("/test/tiatd", vec![6, 7, 8, 9, 10]), - ("/this/is/a/test/dir", vec![1, 6, 9, 11, 16]), - ("/////ThisIsATestDir", vec![5, 9, 11, 12, 16]), - ("thisisatestdir", vec![0, 2, 6, 7, 11]), - ] - ); - } - - #[test] - fn test_match_multibyte_path_entries() { - let paths = vec!["aαbβ/cγdδ", "αβγδ/bcde", "c1️⃣2️⃣3️⃣/d4️⃣5️⃣6️⃣/e7️⃣8️⃣9️⃣/f", "/d/🆒/h"]; - assert_eq!("1️⃣".len(), 7); - assert_eq!( - match_query("bcd", false, &paths), - vec![ - ("αβγδ/bcde", vec![9, 10, 11]), - ("aαbβ/cγdδ", vec![3, 7, 10]), - ] - ); - assert_eq!( - match_query("cde", false, &paths), - vec![ - ("αβγδ/bcde", vec![10, 11, 12]), - ("c1️⃣2️⃣3️⃣/d4️⃣5️⃣6️⃣/e7️⃣8️⃣9️⃣/f", vec![0, 23, 46]), - ] - ); - } - - fn match_query<'a>( - query: &str, - smart_case: bool, - paths: &[&'a str], - ) -> Vec<(&'a str, Vec)> { - let lowercase_query = query.to_lowercase().chars().collect::>(); - let query = query.chars().collect::>(); - let query_chars = CharBag::from(&lowercase_query[..]); - - let path_arcs = paths - .iter() - .map(|path| Arc::from(PathBuf::from(path))) - .collect::>(); - let mut path_entries = Vec::new(); - for (i, path) in paths.iter().enumerate() { - let lowercase_path = path.to_lowercase().chars().collect::>(); - let char_bag = CharBag::from(lowercase_path.as_slice()); - path_entries.push(PathMatchCandidate { - char_bag, - path: path_arcs.get(i).unwrap(), - }); - } - - let mut matcher = Matcher::new(&query, &lowercase_query, query_chars, smart_case, 100); - - let cancel_flag = AtomicBool::new(false); - let mut results = Vec::new(); - matcher.match_paths( - 0, - "".into(), - path_entries.into_iter(), - &mut results, - &cancel_flag, - ); - - results - .into_iter() - .map(|result| { - ( - paths - .iter() - .copied() - .find(|p| result.path.as_ref() == Path::new(p)) - .unwrap(), - result.positions, - ) - }) - .collect() - } -} +pub use paths::{match_path_sets, PathMatch, PathMatchCandidate, PathMatchCandidateSet}; +pub use strings::{match_strings, StringMatch, StringMatchCandidate}; diff --git a/crates/fuzzy/src/matcher.rs b/crates/fuzzy/src/matcher.rs new file mode 100644 index 0000000000000000000000000000000000000000..51ae75bac23a981804dd4626fc9daf0cc4f50af6 --- /dev/null +++ b/crates/fuzzy/src/matcher.rs @@ -0,0 +1,463 @@ +use std::{ + borrow::Cow, + sync::atomic::{self, AtomicBool}, +}; + +use crate::CharBag; + +const BASE_DISTANCE_PENALTY: f64 = 0.6; +const ADDITIONAL_DISTANCE_PENALTY: f64 = 0.05; +const MIN_DISTANCE_PENALTY: f64 = 0.2; + +pub struct Matcher<'a> { + query: &'a [char], + lowercase_query: &'a [char], + query_char_bag: CharBag, + smart_case: bool, + max_results: usize, + min_score: f64, + match_positions: Vec, + last_positions: Vec, + score_matrix: Vec>, + best_position_matrix: Vec, +} + +pub trait Match: Ord { + fn score(&self) -> f64; + fn set_positions(&mut self, positions: Vec); +} + +pub trait MatchCandidate { + fn has_chars(&self, bag: CharBag) -> bool; + fn to_string(&self) -> Cow<'_, str>; +} + +impl<'a> Matcher<'a> { + pub fn new( + query: &'a [char], + lowercase_query: &'a [char], + query_char_bag: CharBag, + smart_case: bool, + max_results: usize, + ) -> Self { + Self { + query, + lowercase_query, + query_char_bag, + min_score: 0.0, + last_positions: vec![0; query.len()], + match_positions: vec![0; query.len()], + score_matrix: Vec::new(), + best_position_matrix: Vec::new(), + smart_case, + max_results, + } + } + + pub fn match_candidates( + &mut self, + prefix: &[char], + lowercase_prefix: &[char], + candidates: impl Iterator, + results: &mut Vec, + cancel_flag: &AtomicBool, + build_match: F, + ) where + R: Match, + F: Fn(&C, f64) -> R, + { + let mut candidate_chars = Vec::new(); + let mut lowercase_candidate_chars = Vec::new(); + + for candidate in candidates { + if !candidate.has_chars(self.query_char_bag) { + continue; + } + + if cancel_flag.load(atomic::Ordering::Relaxed) { + break; + } + + candidate_chars.clear(); + lowercase_candidate_chars.clear(); + for c in candidate.to_string().chars() { + candidate_chars.push(c); + lowercase_candidate_chars.push(c.to_ascii_lowercase()); + } + + if !self.find_last_positions(lowercase_prefix, &lowercase_candidate_chars) { + continue; + } + + let matrix_len = self.query.len() * (prefix.len() + candidate_chars.len()); + self.score_matrix.clear(); + self.score_matrix.resize(matrix_len, None); + self.best_position_matrix.clear(); + self.best_position_matrix.resize(matrix_len, 0); + + let score = self.score_match( + &candidate_chars, + &lowercase_candidate_chars, + prefix, + lowercase_prefix, + ); + + if score > 0.0 { + let mut mat = build_match(&candidate, score); + if let Err(i) = results.binary_search_by(|m| mat.cmp(m)) { + if results.len() < self.max_results { + mat.set_positions(self.match_positions.clone()); + results.insert(i, mat); + } else if i < results.len() { + results.pop(); + mat.set_positions(self.match_positions.clone()); + results.insert(i, mat); + } + if results.len() == self.max_results { + self.min_score = results.last().unwrap().score(); + } + } + } + } + } + + fn find_last_positions( + &mut self, + lowercase_prefix: &[char], + lowercase_candidate: &[char], + ) -> bool { + let mut lowercase_prefix = lowercase_prefix.iter(); + let mut lowercase_candidate = lowercase_candidate.iter(); + for (i, char) in self.lowercase_query.iter().enumerate().rev() { + if let Some(j) = lowercase_candidate.rposition(|c| c == char) { + self.last_positions[i] = j + lowercase_prefix.len(); + } else if let Some(j) = lowercase_prefix.rposition(|c| c == char) { + self.last_positions[i] = j; + } else { + return false; + } + } + true + } + + fn score_match( + &mut self, + path: &[char], + path_cased: &[char], + prefix: &[char], + lowercase_prefix: &[char], + ) -> f64 { + let score = self.recursive_score_match( + path, + path_cased, + prefix, + lowercase_prefix, + 0, + 0, + self.query.len() as f64, + ) * self.query.len() as f64; + + if score <= 0.0 { + return 0.0; + } + + let path_len = prefix.len() + path.len(); + let mut cur_start = 0; + let mut byte_ix = 0; + let mut char_ix = 0; + for i in 0..self.query.len() { + let match_char_ix = self.best_position_matrix[i * path_len + cur_start]; + while char_ix < match_char_ix { + let ch = prefix + .get(char_ix) + .or_else(|| path.get(char_ix - prefix.len())) + .unwrap(); + byte_ix += ch.len_utf8(); + char_ix += 1; + } + cur_start = match_char_ix + 1; + self.match_positions[i] = byte_ix; + } + + score + } + + #[allow(clippy::too_many_arguments)] + fn recursive_score_match( + &mut self, + path: &[char], + path_cased: &[char], + prefix: &[char], + lowercase_prefix: &[char], + query_idx: usize, + path_idx: usize, + cur_score: f64, + ) -> f64 { + if query_idx == self.query.len() { + return 1.0; + } + + let path_len = prefix.len() + path.len(); + + if let Some(memoized) = self.score_matrix[query_idx * path_len + path_idx] { + return memoized; + } + + let mut score = 0.0; + let mut best_position = 0; + + let query_char = self.lowercase_query[query_idx]; + let limit = self.last_positions[query_idx]; + + let mut last_slash = 0; + for j in path_idx..=limit { + let path_char = if j < prefix.len() { + lowercase_prefix[j] + } else { + path_cased[j - prefix.len()] + }; + let is_path_sep = path_char == '/' || path_char == '\\'; + + if query_idx == 0 && is_path_sep { + last_slash = j; + } + + if query_char == path_char || (is_path_sep && query_char == '_' || query_char == '\\') { + let curr = if j < prefix.len() { + prefix[j] + } else { + path[j - prefix.len()] + }; + + let mut char_score = 1.0; + if j > path_idx { + let last = if j - 1 < prefix.len() { + prefix[j - 1] + } else { + path[j - 1 - prefix.len()] + }; + + if last == '/' { + char_score = 0.9; + } else if (last == '-' || last == '_' || last == ' ' || last.is_numeric()) + || (last.is_lowercase() && curr.is_uppercase()) + { + char_score = 0.8; + } else if last == '.' { + char_score = 0.7; + } else if query_idx == 0 { + char_score = BASE_DISTANCE_PENALTY; + } else { + char_score = MIN_DISTANCE_PENALTY.max( + BASE_DISTANCE_PENALTY + - (j - path_idx - 1) as f64 * ADDITIONAL_DISTANCE_PENALTY, + ); + } + } + + // Apply a severe penalty if the case doesn't match. + // This will make the exact matches have higher score than the case-insensitive and the + // path insensitive matches. + if (self.smart_case || curr == '/') && self.query[query_idx] != curr { + char_score *= 0.001; + } + + let mut multiplier = char_score; + + // Scale the score based on how deep within the path we found the match. + if query_idx == 0 { + multiplier /= ((prefix.len() + path.len()) - last_slash) as f64; + } + + let mut next_score = 1.0; + if self.min_score > 0.0 { + next_score = cur_score * multiplier; + // Scores only decrease. If we can't pass the previous best, bail + if next_score < self.min_score { + // Ensure that score is non-zero so we use it in the memo table. + if score == 0.0 { + score = 1e-18; + } + continue; + } + } + + let new_score = self.recursive_score_match( + path, + path_cased, + prefix, + lowercase_prefix, + query_idx + 1, + j + 1, + next_score, + ) * multiplier; + + if new_score > score { + score = new_score; + best_position = j; + // Optimization: can't score better than 1. + if new_score == 1.0 { + break; + } + } + } + } + + if best_position != 0 { + self.best_position_matrix[query_idx * path_len + path_idx] = best_position; + } + + self.score_matrix[query_idx * path_len + path_idx] = Some(score); + score + } +} + +#[cfg(test)] +mod tests { + use crate::{PathMatch, PathMatchCandidate}; + + use super::*; + use std::{ + path::{Path, PathBuf}, + sync::Arc, + }; + + #[test] + fn test_get_last_positions() { + let mut query: &[char] = &['d', 'c']; + let mut matcher = Matcher::new(query, query, query.into(), false, 10); + let result = matcher.find_last_positions(&['a', 'b', 'c'], &['b', 'd', 'e', 'f']); + assert!(!result); + + query = &['c', 'd']; + let mut matcher = Matcher::new(query, query, query.into(), false, 10); + let result = matcher.find_last_positions(&['a', 'b', 'c'], &['b', 'd', 'e', 'f']); + assert!(result); + assert_eq!(matcher.last_positions, vec![2, 4]); + + query = &['z', '/', 'z', 'f']; + let mut matcher = Matcher::new(query, query, query.into(), false, 10); + let result = matcher.find_last_positions(&['z', 'e', 'd', '/'], &['z', 'e', 'd', '/', 'f']); + assert!(result); + assert_eq!(matcher.last_positions, vec![0, 3, 4, 8]); + } + + #[test] + fn test_match_path_entries() { + let paths = vec![ + "", + "a", + "ab", + "abC", + "abcd", + "alphabravocharlie", + "AlphaBravoCharlie", + "thisisatestdir", + "/////ThisIsATestDir", + "/this/is/a/test/dir", + "/test/tiatd", + ]; + + assert_eq!( + match_single_path_query("abc", false, &paths), + vec![ + ("abC", vec![0, 1, 2]), + ("abcd", vec![0, 1, 2]), + ("AlphaBravoCharlie", vec![0, 5, 10]), + ("alphabravocharlie", vec![4, 5, 10]), + ] + ); + assert_eq!( + match_single_path_query("t/i/a/t/d", false, &paths), + vec![("/this/is/a/test/dir", vec![1, 5, 6, 8, 9, 10, 11, 15, 16]),] + ); + + assert_eq!( + match_single_path_query("tiatd", false, &paths), + vec![ + ("/test/tiatd", vec![6, 7, 8, 9, 10]), + ("/this/is/a/test/dir", vec![1, 6, 9, 11, 16]), + ("/////ThisIsATestDir", vec![5, 9, 11, 12, 16]), + ("thisisatestdir", vec![0, 2, 6, 7, 11]), + ] + ); + } + + #[test] + fn test_match_multibyte_path_entries() { + let paths = vec!["aαbβ/cγdδ", "αβγδ/bcde", "c1️⃣2️⃣3️⃣/d4️⃣5️⃣6️⃣/e7️⃣8️⃣9️⃣/f", "/d/🆒/h"]; + assert_eq!("1️⃣".len(), 7); + assert_eq!( + match_single_path_query("bcd", false, &paths), + vec![ + ("αβγδ/bcde", vec![9, 10, 11]), + ("aαbβ/cγdδ", vec![3, 7, 10]), + ] + ); + assert_eq!( + match_single_path_query("cde", false, &paths), + vec![ + ("αβγδ/bcde", vec![10, 11, 12]), + ("c1️⃣2️⃣3️⃣/d4️⃣5️⃣6️⃣/e7️⃣8️⃣9️⃣/f", vec![0, 23, 46]), + ] + ); + } + + fn match_single_path_query<'a>( + query: &str, + smart_case: bool, + paths: &[&'a str], + ) -> Vec<(&'a str, Vec)> { + let lowercase_query = query.to_lowercase().chars().collect::>(); + let query = query.chars().collect::>(); + let query_chars = CharBag::from(&lowercase_query[..]); + + let path_arcs: Vec> = paths + .iter() + .map(|path| Arc::from(PathBuf::from(path))) + .collect::>(); + let mut path_entries = Vec::new(); + for (i, path) in paths.iter().enumerate() { + let lowercase_path = path.to_lowercase().chars().collect::>(); + let char_bag = CharBag::from(lowercase_path.as_slice()); + path_entries.push(PathMatchCandidate { + char_bag, + path: &path_arcs[i], + }); + } + + let mut matcher = Matcher::new(&query, &lowercase_query, query_chars, smart_case, 100); + + let cancel_flag = AtomicBool::new(false); + let mut results = Vec::new(); + + matcher.match_candidates( + &[], + &[], + path_entries.into_iter(), + &mut results, + &cancel_flag, + |candidate, score| PathMatch { + score, + worktree_id: 0, + positions: Vec::new(), + path: candidate.path.clone(), + path_prefix: "".into(), + }, + ); + + results + .into_iter() + .map(|result| { + ( + paths + .iter() + .copied() + .find(|p| result.path.as_ref() == Path::new(p)) + .unwrap(), + result.positions, + ) + }) + .collect() + } +} diff --git a/crates/fuzzy/src/paths.rs b/crates/fuzzy/src/paths.rs new file mode 100644 index 0000000000000000000000000000000000000000..8d9ec97d9b26eb6c2d6b4ce7db8268eb05aac0d7 --- /dev/null +++ b/crates/fuzzy/src/paths.rs @@ -0,0 +1,174 @@ +use std::{ + borrow::Cow, + cmp::{self, Ordering}, + path::Path, + sync::{atomic::AtomicBool, Arc}, +}; + +use gpui::executor; + +use crate::{ + matcher::{Match, MatchCandidate, Matcher}, + CharBag, +}; + +#[derive(Clone, Debug)] +pub struct PathMatchCandidate<'a> { + pub path: &'a Arc, + pub char_bag: CharBag, +} + +#[derive(Clone, Debug)] +pub struct PathMatch { + pub score: f64, + pub positions: Vec, + pub worktree_id: usize, + pub path: Arc, + pub path_prefix: Arc, +} + +pub trait PathMatchCandidateSet<'a>: Send + Sync { + type Candidates: Iterator>; + fn id(&self) -> usize; + fn len(&self) -> usize; + fn is_empty(&self) -> bool { + self.len() == 0 + } + fn prefix(&self) -> Arc; + fn candidates(&'a self, start: usize) -> Self::Candidates; +} + +impl Match for PathMatch { + fn score(&self) -> f64 { + self.score + } + + fn set_positions(&mut self, positions: Vec) { + self.positions = positions; + } +} + +impl<'a> MatchCandidate for PathMatchCandidate<'a> { + fn has_chars(&self, bag: CharBag) -> bool { + self.char_bag.is_superset(bag) + } + + fn to_string(&self) -> Cow<'a, str> { + self.path.to_string_lossy() + } +} + +impl PartialEq for PathMatch { + fn eq(&self, other: &Self) -> bool { + self.cmp(other).is_eq() + } +} + +impl Eq for PathMatch {} + +impl PartialOrd for PathMatch { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for PathMatch { + fn cmp(&self, other: &Self) -> Ordering { + self.score + .partial_cmp(&other.score) + .unwrap_or(Ordering::Equal) + .then_with(|| self.worktree_id.cmp(&other.worktree_id)) + .then_with(|| self.path.cmp(&other.path)) + } +} + +pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>( + candidate_sets: &'a [Set], + query: &str, + smart_case: bool, + max_results: usize, + cancel_flag: &AtomicBool, + background: Arc, +) -> Vec { + let path_count: usize = candidate_sets.iter().map(|s| s.len()).sum(); + if path_count == 0 { + return Vec::new(); + } + + let lowercase_query = query.to_lowercase().chars().collect::>(); + let query = query.chars().collect::>(); + + let lowercase_query = &lowercase_query; + let query = &query; + let query_char_bag = CharBag::from(&lowercase_query[..]); + + let num_cpus = background.num_cpus().min(path_count); + let segment_size = (path_count + num_cpus - 1) / num_cpus; + let mut segment_results = (0..num_cpus) + .map(|_| Vec::with_capacity(max_results)) + .collect::>(); + + background + .scoped(|scope| { + for (segment_idx, results) in segment_results.iter_mut().enumerate() { + scope.spawn(async move { + let segment_start = segment_idx * segment_size; + let segment_end = segment_start + segment_size; + let mut matcher = Matcher::new( + query, + lowercase_query, + query_char_bag, + smart_case, + max_results, + ); + + let mut tree_start = 0; + for candidate_set in candidate_sets { + let tree_end = tree_start + candidate_set.len(); + + if tree_start < segment_end && segment_start < tree_end { + let start = cmp::max(tree_start, segment_start) - tree_start; + let end = cmp::min(tree_end, segment_end) - tree_start; + let candidates = candidate_set.candidates(start).take(end - start); + + let worktree_id = candidate_set.id(); + let prefix = candidate_set.prefix().chars().collect::>(); + let lowercase_prefix = prefix + .iter() + .map(|c| c.to_ascii_lowercase()) + .collect::>(); + matcher.match_candidates( + &prefix, + &lowercase_prefix, + candidates, + results, + cancel_flag, + |candidate, score| PathMatch { + score, + worktree_id, + positions: Vec::new(), + path: candidate.path.clone(), + path_prefix: candidate_set.prefix(), + }, + ); + } + if tree_end >= segment_end { + break; + } + tree_start = tree_end; + } + }) + } + }) + .await; + + let mut results = Vec::new(); + for segment_result in segment_results { + if results.is_empty() { + results = segment_result; + } else { + util::extend_sorted(&mut results, segment_result, max_results, |a, b| b.cmp(a)); + } + } + results +} diff --git a/crates/fuzzy/src/strings.rs b/crates/fuzzy/src/strings.rs new file mode 100644 index 0000000000000000000000000000000000000000..37ee20f5285f633af96d65d427e5bb398763ed8d --- /dev/null +++ b/crates/fuzzy/src/strings.rs @@ -0,0 +1,161 @@ +use std::{ + borrow::Cow, + cmp::{self, Ordering}, + sync::{atomic::AtomicBool, Arc}, +}; + +use gpui::executor; + +use crate::{ + matcher::{Match, MatchCandidate, Matcher}, + CharBag, +}; + +#[derive(Clone, Debug)] +pub struct StringMatchCandidate { + pub id: usize, + pub string: String, + pub char_bag: CharBag, +} + +impl Match for StringMatch { + fn score(&self) -> f64 { + self.score + } + + fn set_positions(&mut self, positions: Vec) { + self.positions = positions; + } +} + +impl StringMatchCandidate { + pub fn new(id: usize, string: String) -> Self { + Self { + id, + char_bag: CharBag::from(string.as_str()), + string, + } + } +} + +impl<'a> MatchCandidate for &'a StringMatchCandidate { + fn has_chars(&self, bag: CharBag) -> bool { + self.char_bag.is_superset(bag) + } + + fn to_string(&self) -> Cow<'a, str> { + self.string.as_str().into() + } +} + +#[derive(Clone, Debug)] +pub struct StringMatch { + pub candidate_id: usize, + pub score: f64, + pub positions: Vec, + pub string: String, +} + +impl PartialEq for StringMatch { + fn eq(&self, other: &Self) -> bool { + self.cmp(other).is_eq() + } +} + +impl Eq for StringMatch {} + +impl PartialOrd for StringMatch { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for StringMatch { + fn cmp(&self, other: &Self) -> Ordering { + self.score + .partial_cmp(&other.score) + .unwrap_or(Ordering::Equal) + .then_with(|| self.candidate_id.cmp(&other.candidate_id)) + } +} + +pub async fn match_strings( + candidates: &[StringMatchCandidate], + query: &str, + smart_case: bool, + max_results: usize, + cancel_flag: &AtomicBool, + background: Arc, +) -> Vec { + if candidates.is_empty() || max_results == 0 { + return Default::default(); + } + + if query.is_empty() { + return candidates + .iter() + .map(|candidate| StringMatch { + candidate_id: candidate.id, + score: 0., + positions: Default::default(), + string: candidate.string.clone(), + }) + .collect(); + } + + let lowercase_query = query.to_lowercase().chars().collect::>(); + let query = query.chars().collect::>(); + + let lowercase_query = &lowercase_query; + let query = &query; + let query_char_bag = CharBag::from(&lowercase_query[..]); + + let num_cpus = background.num_cpus().min(candidates.len()); + let segment_size = (candidates.len() + num_cpus - 1) / num_cpus; + let mut segment_results = (0..num_cpus) + .map(|_| Vec::with_capacity(max_results.min(candidates.len()))) + .collect::>(); + + background + .scoped(|scope| { + for (segment_idx, results) in segment_results.iter_mut().enumerate() { + let cancel_flag = &cancel_flag; + scope.spawn(async move { + let segment_start = cmp::min(segment_idx * segment_size, candidates.len()); + let segment_end = cmp::min(segment_start + segment_size, candidates.len()); + let mut matcher = Matcher::new( + query, + lowercase_query, + query_char_bag, + smart_case, + max_results, + ); + + matcher.match_candidates( + &[], + &[], + candidates[segment_start..segment_end].iter(), + results, + cancel_flag, + |candidate, score| StringMatch { + candidate_id: candidate.id, + score, + positions: Vec::new(), + string: candidate.string.to_string(), + }, + ); + }); + } + }) + .await; + + let mut results = Vec::new(); + for segment_result in segment_results { + if results.is_empty() { + results = segment_result; + } else { + util::extend_sorted(&mut results, segment_result, max_results, |a, b| b.cmp(a)); + } + } + results +} diff --git a/crates/outline/src/outline.rs b/crates/outline/src/outline.rs index f6698e23be3c3ad7871f91cf1a45777c63eb3864..52b168b70cbaa6f0d915733b47311e29cc2298df 100644 --- a/crates/outline/src/outline.rs +++ b/crates/outline/src/outline.rs @@ -84,13 +84,13 @@ impl OutlineView { .active_item(cx) .and_then(|item| item.downcast::()) { - let buffer = editor + let outline = editor .read(cx) .buffer() .read(cx) .snapshot(cx) .outline(Some(cx.global::().theme.editor.syntax.as_ref())); - if let Some(outline) = buffer { + if let Some(outline) = outline { workspace.toggle_modal(cx, |_, cx| { let view = cx.add_view(|cx| OutlineView::new(outline, editor, cx)); cx.subscribe(&view, Self::on_event).detach(); diff --git a/crates/recent_projects/Cargo.toml b/crates/recent_projects/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..d633381365b078b5e48efcb542c3236b7d49801c --- /dev/null +++ b/crates/recent_projects/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "recent_projects" +version = "0.1.0" +edition = "2021" + +[lib] +path = "src/recent_projects.rs" +doctest = false + +[dependencies] +db = { path = "../db" } +editor = { path = "../editor" } +fuzzy = { path = "../fuzzy" } +gpui = { path = "../gpui" } +language = { path = "../language" } +picker = { path = "../picker" } +settings = { path = "../settings" } +text = { path = "../text" } +workspace = { path = "../workspace" } +ordered-float = "2.1.1" +postage = { version = "0.4", features = ["futures-traits"] } +smol = "1.2" diff --git a/crates/recent_projects/src/highlighted_workspace_location.rs b/crates/recent_projects/src/highlighted_workspace_location.rs new file mode 100644 index 0000000000000000000000000000000000000000..8e75b291a0748627ce47c570c607eb2d2163358d --- /dev/null +++ b/crates/recent_projects/src/highlighted_workspace_location.rs @@ -0,0 +1,129 @@ +use std::path::Path; + +use fuzzy::StringMatch; +use gpui::{ + elements::{Label, LabelStyle}, + Element, ElementBox, +}; +use workspace::WorkspaceLocation; + +pub struct HighlightedText { + pub text: String, + pub highlight_positions: Vec, + char_count: usize, +} + +impl HighlightedText { + fn join(components: impl Iterator, separator: &str) -> Self { + let mut char_count = 0; + let separator_char_count = separator.chars().count(); + let mut text = String::new(); + let mut highlight_positions = Vec::new(); + for component in components { + if char_count != 0 { + text.push_str(separator); + char_count += separator_char_count; + } + + highlight_positions.extend( + component + .highlight_positions + .iter() + .map(|position| position + char_count), + ); + text.push_str(&component.text); + char_count += component.text.chars().count(); + } + + Self { + text, + highlight_positions, + char_count, + } + } + + pub fn render(self, style: impl Into) -> ElementBox { + Label::new(self.text, style) + .with_highlights(self.highlight_positions) + .boxed() + } +} + +pub struct HighlightedWorkspaceLocation { + pub names: HighlightedText, + pub paths: Vec, +} + +impl HighlightedWorkspaceLocation { + pub fn new(string_match: &StringMatch, location: &WorkspaceLocation) -> Self { + let mut path_start_offset = 0; + let (names, paths): (Vec<_>, Vec<_>) = location + .paths() + .iter() + .map(|path| { + let highlighted_text = Self::highlights_for_path( + path.as_ref(), + &string_match.positions, + path_start_offset, + ); + + path_start_offset += highlighted_text.1.char_count; + + highlighted_text + }) + .unzip(); + + Self { + names: HighlightedText::join(names.into_iter().filter_map(|name| name), ", "), + paths, + } + } + + // Compute the highlighted text for the name and path + fn highlights_for_path( + path: &Path, + match_positions: &Vec, + path_start_offset: usize, + ) -> (Option, HighlightedText) { + let path_string = path.to_string_lossy(); + let path_char_count = path_string.chars().count(); + // Get the subset of match highlight positions that line up with the given path. + // Also adjusts them to start at the path start + let path_positions = match_positions + .iter() + .copied() + .skip_while(|position| *position < path_start_offset) + .take_while(|position| *position < path_start_offset + path_char_count) + .map(|position| position - path_start_offset) + .collect::>(); + + // Again subset the highlight positions to just those that line up with the file_name + // again adjusted to the start of the file_name + let file_name_text_and_positions = path.file_name().map(|file_name| { + let text = file_name.to_string_lossy(); + let char_count = text.chars().count(); + let file_name_start = path_char_count - char_count; + let highlight_positions = path_positions + .iter() + .copied() + .skip_while(|position| *position < file_name_start) + .take_while(|position| *position < file_name_start + char_count) + .map(|position| position - file_name_start) + .collect::>(); + HighlightedText { + text: text.to_string(), + highlight_positions, + char_count, + } + }); + + ( + file_name_text_and_positions, + HighlightedText { + text: path_string.to_string(), + highlight_positions: path_positions, + char_count: path_char_count, + }, + ) + } +} diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs new file mode 100644 index 0000000000000000000000000000000000000000..1842540db38c33fa25b735c8fe2c0dc5616f7037 --- /dev/null +++ b/crates/recent_projects/src/recent_projects.rs @@ -0,0 +1,187 @@ +mod highlighted_workspace_location; + +use fuzzy::{StringMatch, StringMatchCandidate}; +use gpui::{ + actions, + elements::{ChildView, Flex, ParentElement}, + AnyViewHandle, Element, ElementBox, Entity, MutableAppContext, RenderContext, Task, View, + ViewContext, ViewHandle, +}; +use highlighted_workspace_location::HighlightedWorkspaceLocation; +use ordered_float::OrderedFloat; +use picker::{Picker, PickerDelegate}; +use settings::Settings; +use workspace::{OpenPaths, Workspace, WorkspaceLocation}; + +const RECENT_LIMIT: usize = 100; + +actions!(recent_projects, [Toggle]); + +pub fn init(cx: &mut MutableAppContext) { + cx.add_action(RecentProjectsView::toggle); + Picker::::init(cx); +} + +struct RecentProjectsView { + picker: ViewHandle>, + workspace_locations: Vec, + selected_match_index: usize, + matches: Vec, +} + +impl RecentProjectsView { + fn new(cx: &mut ViewContext) -> Self { + let handle = cx.weak_handle(); + let workspace_locations: Vec = workspace::WORKSPACE_DB + .recent_workspaces(RECENT_LIMIT) + .unwrap_or_default() + .into_iter() + .map(|(_, location)| location) + .collect(); + Self { + picker: cx.add_view(|cx| { + Picker::new("Recent Projects...", handle, cx).with_max_size(800., 1200.) + }), + workspace_locations, + selected_match_index: 0, + matches: Default::default(), + } + } + + fn toggle(workspace: &mut Workspace, _: &Toggle, cx: &mut ViewContext) { + workspace.toggle_modal(cx, |_, cx| { + let view = cx.add_view(|cx| Self::new(cx)); + cx.subscribe(&view, Self::on_event).detach(); + view + }); + } + + fn on_event( + workspace: &mut Workspace, + _: ViewHandle, + event: &Event, + cx: &mut ViewContext, + ) { + match event { + Event::Dismissed => workspace.dismiss_modal(cx), + } + } +} + +pub enum Event { + Dismissed, +} + +impl Entity for RecentProjectsView { + type Event = Event; +} + +impl View for RecentProjectsView { + fn ui_name() -> &'static str { + "RecentProjectsView" + } + + fn render(&mut self, cx: &mut RenderContext) -> ElementBox { + ChildView::new(self.picker.clone(), cx).boxed() + } + + fn focus_in(&mut self, _: AnyViewHandle, cx: &mut ViewContext) { + if cx.is_self_focused() { + cx.focus(&self.picker); + } + } +} + +impl PickerDelegate for RecentProjectsView { + fn match_count(&self) -> usize { + self.matches.len() + } + + fn selected_index(&self) -> usize { + self.selected_match_index + } + + fn set_selected_index(&mut self, ix: usize, _cx: &mut ViewContext) { + self.selected_match_index = ix; + } + + fn update_matches(&mut self, query: String, cx: &mut ViewContext) -> gpui::Task<()> { + let query = query.trim_start(); + let smart_case = query.chars().any(|c| c.is_uppercase()); + let candidates = self + .workspace_locations + .iter() + .enumerate() + .map(|(id, location)| { + let combined_string = location + .paths() + .iter() + .map(|path| path.to_string_lossy().to_owned()) + .collect::>() + .join(""); + StringMatchCandidate::new(id, combined_string) + }) + .collect::>(); + self.matches = smol::block_on(fuzzy::match_strings( + candidates.as_slice(), + query, + smart_case, + 100, + &Default::default(), + cx.background().clone(), + )); + self.matches.sort_unstable_by_key(|m| m.candidate_id); + + self.selected_match_index = self + .matches + .iter() + .enumerate() + .max_by_key(|(_, m)| OrderedFloat(m.score)) + .map(|(ix, _)| ix) + .unwrap_or(0); + Task::ready(()) + } + + fn confirm(&mut self, cx: &mut ViewContext) { + let selected_match = &self.matches[self.selected_index()]; + let workspace_location = &self.workspace_locations[selected_match.candidate_id]; + cx.dispatch_global_action(OpenPaths { + paths: workspace_location.paths().as_ref().clone(), + }); + cx.emit(Event::Dismissed); + } + + fn dismiss(&mut self, cx: &mut ViewContext) { + cx.emit(Event::Dismissed); + } + + fn render_match( + &self, + ix: usize, + mouse_state: &mut gpui::MouseState, + selected: bool, + cx: &gpui::AppContext, + ) -> ElementBox { + let settings = cx.global::(); + let string_match = &self.matches[ix]; + let style = settings.theme.picker.item.style_for(mouse_state, selected); + + let highlighted_location = HighlightedWorkspaceLocation::new( + &string_match, + &self.workspace_locations[string_match.candidate_id], + ); + + Flex::column() + .with_child(highlighted_location.names.render(style.label.clone())) + .with_children( + highlighted_location + .paths + .into_iter() + .map(|highlighted_path| highlighted_path.render(style.label.clone())), + ) + .flex(1., false) + .contained() + .with_style(style.container) + .named("match") + } +} diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index a0c353b3f808bf1f1a5c9a9909f2047139916449..7d37298e0b9019275a18d3bef64077195ed386d1 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -50,7 +50,7 @@ pub use pane_group::*; use persistence::{model::SerializedItem, DB}; pub use persistence::{ model::{ItemId, WorkspaceLocation}, - WorkspaceDb, + WorkspaceDb, DB as WORKSPACE_DB, }; use postage::prelude::Stream; use project::{Project, ProjectEntryId, ProjectPath, Worktree, WorktreeId}; diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index cc81f3bf2363c85b83070dacf9546abc55c6e3a6..3dde95358f20d2255089986d96ae8bff2533028a 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -44,6 +44,7 @@ plugin_runtime = { path = "../plugin_runtime" } project = { path = "../project" } project_panel = { path = "../project_panel" } project_symbols = { path = "../project_symbols" } +recent_projects = { path = "../recent_projects" } rpc = { path = "../rpc" } settings = { path = "../settings" } sum_tree = { path = "../sum_tree" } diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 4163841d455d3e4e30620a28b80a0e5fdba16ca9..c06023086f3eb5d1191b1b7cd16b21fd1b3a0fe4 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -121,6 +121,7 @@ fn main() { vim::init(cx); terminal::init(cx); theme_testbench::init(cx); + recent_projects::init(cx); cx.spawn(|cx| watch_themes(fs.clone(), themes.clone(), cx)) .detach(); From 6da59311d1610d0806d308adc654084931ade5ed Mon Sep 17 00:00:00 2001 From: Kay Simmons Date: Wed, 14 Dec 2022 16:02:48 -0800 Subject: [PATCH 07/17] Add open recent project to file menu --- crates/zed/src/menus.rs | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/crates/zed/src/menus.rs b/crates/zed/src/menus.rs index 132bbc44778ec94b9d05df0e5250a23a9cc4dac0..18e7d6fe9b1ef8be5c634ce04d25ee5a1cd09893 100644 --- a/crates/zed/src/menus.rs +++ b/crates/zed/src/menus.rs @@ -79,6 +79,11 @@ pub fn menus() -> Vec> { name: "Open…", action: Box::new(workspace::Open), }, + MenuItem::Action { + name: "Open Recent...", + action: Box::new(recent_projects::Toggle), + }, + MenuItem::Separator, MenuItem::Action { name: "Add Folder to Project…", action: Box::new(workspace::AddFolderToProject), From 81e3b48f37a449d60bc6ecad56512df964722315 Mon Sep 17 00:00:00 2001 From: Kay Simmons Date: Wed, 14 Dec 2022 16:14:16 -0800 Subject: [PATCH 08/17] Add keybinding --- assets/keymaps/default.json | 1 + 1 file changed, 1 insertion(+) diff --git a/assets/keymaps/default.json b/assets/keymaps/default.json index 774d8fbc9ee0183c6a98b0f0c79f92bf9c6a4b3e..783d90f8e1deceb7c40d0babbafae536813e9662 100644 --- a/assets/keymaps/default.json +++ b/assets/keymaps/default.json @@ -36,6 +36,7 @@ "cmd-n": "workspace::NewFile", "cmd-shift-n": "workspace::NewWindow", "cmd-o": "workspace::Open", + "alt-cmd-o": "recent_projects::Toggle", "ctrl-`": "workspace::NewTerminal" } }, From 0dedc1f3a43f93889a651960c98144a1df83b055 Mon Sep 17 00:00:00 2001 From: Julia Date: Thu, 15 Dec 2022 00:17:28 -0500 Subject: [PATCH 09/17] Get tests building again --- crates/collab/src/integration_tests.rs | 18 +++++++++--------- crates/editor/src/multi_buffer.rs | 8 +++++--- crates/editor/src/test/editor_test_context.rs | 2 +- crates/git/src/diff.rs | 17 +++++++++++++++-- crates/language/src/buffer.rs | 8 ++++++++ 5 files changed, 38 insertions(+), 15 deletions(-) diff --git a/crates/collab/src/integration_tests.rs b/crates/collab/src/integration_tests.rs index 8150b1c0af53563d068603f73434f120560b536c..ae8949b88f81dce8cc2c5c4bb0c347c9a8b35e29 100644 --- a/crates/collab/src/integration_tests.rs +++ b/crates/collab/src/integration_tests.rs @@ -1813,7 +1813,7 @@ async fn test_git_diff_base_change( buffer_local_a.read_with(cx_a, |buffer, _| { assert_eq!(buffer.diff_base(), Some(diff_base.as_ref())); git::diff::assert_hunks( - buffer.snapshot().git_diff_hunks_in_range(0..4, false), + buffer.snapshot().git_diff_hunks_in_row_range(0..4, false), &buffer, &diff_base, &[(1..2, "", "two\n")], @@ -1833,7 +1833,7 @@ async fn test_git_diff_base_change( buffer_remote_a.read_with(cx_b, |buffer, _| { assert_eq!(buffer.diff_base(), Some(diff_base.as_ref())); git::diff::assert_hunks( - buffer.snapshot().git_diff_hunks_in_range(0..4, false), + buffer.snapshot().git_diff_hunks_in_row_range(0..4, false), &buffer, &diff_base, &[(1..2, "", "two\n")], @@ -1857,7 +1857,7 @@ async fn test_git_diff_base_change( assert_eq!(buffer.diff_base(), Some(new_diff_base.as_ref())); git::diff::assert_hunks( - buffer.snapshot().git_diff_hunks_in_range(0..4, false), + buffer.snapshot().git_diff_hunks_in_row_range(0..4, false), &buffer, &diff_base, &[(2..3, "", "three\n")], @@ -1868,7 +1868,7 @@ async fn test_git_diff_base_change( buffer_remote_a.read_with(cx_b, |buffer, _| { assert_eq!(buffer.diff_base(), Some(new_diff_base.as_ref())); git::diff::assert_hunks( - buffer.snapshot().git_diff_hunks_in_range(0..4, false), + buffer.snapshot().git_diff_hunks_in_row_range(0..4, false), &buffer, &diff_base, &[(2..3, "", "three\n")], @@ -1911,7 +1911,7 @@ async fn test_git_diff_base_change( buffer_local_b.read_with(cx_a, |buffer, _| { assert_eq!(buffer.diff_base(), Some(diff_base.as_ref())); git::diff::assert_hunks( - buffer.snapshot().git_diff_hunks_in_range(0..4, false), + buffer.snapshot().git_diff_hunks_in_row_range(0..4, false), &buffer, &diff_base, &[(1..2, "", "two\n")], @@ -1931,7 +1931,7 @@ async fn test_git_diff_base_change( buffer_remote_b.read_with(cx_b, |buffer, _| { assert_eq!(buffer.diff_base(), Some(diff_base.as_ref())); git::diff::assert_hunks( - buffer.snapshot().git_diff_hunks_in_range(0..4, false), + buffer.snapshot().git_diff_hunks_in_row_range(0..4, false), &buffer, &diff_base, &[(1..2, "", "two\n")], @@ -1959,12 +1959,12 @@ async fn test_git_diff_base_change( "{:?}", buffer .snapshot() - .git_diff_hunks_in_range(0..4, false) + .git_diff_hunks_in_row_range(0..4, false) .collect::>() ); git::diff::assert_hunks( - buffer.snapshot().git_diff_hunks_in_range(0..4, false), + buffer.snapshot().git_diff_hunks_in_row_range(0..4, false), &buffer, &diff_base, &[(2..3, "", "three\n")], @@ -1975,7 +1975,7 @@ async fn test_git_diff_base_change( buffer_remote_b.read_with(cx_b, |buffer, _| { assert_eq!(buffer.diff_base(), Some(new_diff_base.as_ref())); git::diff::assert_hunks( - buffer.snapshot().git_diff_hunks_in_range(0..4, false), + buffer.snapshot().git_diff_hunks_in_row_range(0..4, false), &buffer, &diff_base, &[(2..3, "", "three\n")], diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 92801d1b47671670201e01b7d8eefdaac3b0fa5a..22e75a219b47e62c7dfe6ee67ccca574e39ba4c0 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -2652,7 +2652,8 @@ impl MultiBufferSnapshot { let range = excerpt.range.context.clone(); let range_start_row = range.start.to_point(&excerpt.buffer).row; let range_end_row = range.end.to_point(&excerpt.buffer).row; - let a = Some(excerpt.buffer.git_diff_hunks_in_range(range, reversed).map( + + let hunks = excerpt.buffer.git_diff_hunks_in_range(range, reversed).map( move |mut hunk| { hunk.buffer_range.start = hunk.buffer_range.start.max(range_start_row) - range_start_row @@ -2662,9 +2663,10 @@ impl MultiBufferSnapshot { + lines_advance; hunk }, - )); + ); + lines_advance += summary.text.lines.row; - a + Some(hunks) }) .flatten() } diff --git a/crates/editor/src/test/editor_test_context.rs b/crates/editor/src/test/editor_test_context.rs index 74b6bdd416b681fffefed91c8e5546c4b70b5703..568f29d3e11caa8667d524e49786b1e34058ce20 100644 --- a/crates/editor/src/test/editor_test_context.rs +++ b/crates/editor/src/test/editor_test_context.rs @@ -254,7 +254,7 @@ impl<'a> EditorTestContext<'a> { Actual selections: {} - "}, + "}, self.assertion_context(), expected_marked_text, actual_marked_text, diff --git a/crates/git/src/diff.rs b/crates/git/src/diff.rs index 61396e9278115cc9b1aa3a0f680395d76f3f96e4..066a7df2248a0bfb44359cb54d4b9dd6eceb5b5f 100644 --- a/crates/git/src/diff.rs +++ b/crates/git/src/diff.rs @@ -71,6 +71,17 @@ impl BufferDiff { } } + pub fn hunks_in_row_range<'a>( + &'a self, + range: Range, + buffer: &'a BufferSnapshot, + reversed: bool, + ) -> impl 'a + Iterator> { + let start = buffer.anchor_before(Point::new(range.start, 0)); + let end = buffer.anchor_after(Point::new(range.end, 0)); + self.hunks_in_range(start..end, buffer, reversed) + } + pub fn hunks_in_range<'a>( &'a self, range: Range, @@ -138,7 +149,9 @@ impl BufferDiff { #[cfg(test)] fn hunks<'a>(&'a self, text: &'a BufferSnapshot) -> impl 'a + Iterator> { - self.hunks_in_range(0..u32::MAX, text, false) + let start = text.anchor_before(Point::new(0, 0)); + let end = text.anchor_after(Point::new(u32::MAX, u32::MAX)); + self.hunks_in_range(start..end, text, false) } fn diff<'a>(head: &'a str, current: &'a str) -> Option> { @@ -352,7 +365,7 @@ mod tests { assert_eq!(diff.hunks(&buffer).count(), 8); assert_hunks( - diff.hunks_in_range(7..12, &buffer, false), + diff.hunks_in_row_range(7..12, &buffer, false), &buffer, &diff_base, &[ diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 44fa49495b38a804d73211ed607433e010e01ae4..4bf0f91a2a5833d4d755eb5de0bcb5b0676f0a79 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -2310,6 +2310,14 @@ impl BufferSnapshot { }) } + pub fn git_diff_hunks_in_row_range<'a>( + &'a self, + range: Range, + reversed: bool, + ) -> impl 'a + Iterator> { + self.git_diff.hunks_in_row_range(range, self, reversed) + } + pub fn git_diff_hunks_in_range<'a>( &'a self, range: Range, From f88b413f6a1659147bb1700117a9f30f34b5dca4 Mon Sep 17 00:00:00 2001 From: Julia Date: Thu, 15 Dec 2022 17:09:09 -0500 Subject: [PATCH 10/17] Rewrite multi-buffer aware git hunks in range to be more correct Less ad-hoc state tracking, rely more on values provided by the underlying data Co-Authored-By: Max Brunsfeld --- crates/editor/src/multi_buffer.rs | 236 ++++++++++++++++++++++++++---- crates/git/src/diff.rs | 6 +- crates/language/src/buffer.rs | 5 +- 3 files changed, 213 insertions(+), 34 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 22e75a219b47e62c7dfe6ee67ccca574e39ba4c0..fab55a5099d9abd6eaa393c359d83cf784849ff5 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -2635,38 +2635,58 @@ impl MultiBufferSnapshot { row_range: Range, reversed: bool, ) -> impl 'a + Iterator> { - let mut lines_advance = 0; - let mut cursor = self.excerpts.filter::<_, ExcerptSummary>(move |summary| { - let filter = summary.text.lines.row + lines_advance >= row_range.start - && lines_advance <= row_range.end; - lines_advance += summary.text.lines.row; - filter - }); + let mut cursor = self.excerpts.cursor::(); + cursor.seek(&Point::new(row_range.start, 0), Bias::Right, &()); - let mut lines_advance = 0; std::iter::from_fn(move || { - cursor.next(&()); let excerpt = cursor.item()?; - let summary = cursor.item_summary()?; - - let range = excerpt.range.context.clone(); - let range_start_row = range.start.to_point(&excerpt.buffer).row; - let range_end_row = range.end.to_point(&excerpt.buffer).row; - - let hunks = excerpt.buffer.git_diff_hunks_in_range(range, reversed).map( - move |mut hunk| { - hunk.buffer_range.start = hunk.buffer_range.start.max(range_start_row) - - range_start_row - + lines_advance; - hunk.buffer_range.end = hunk.buffer_range.end.min(range_end_row + 1) - - range_start_row - + lines_advance; - hunk - }, - ); + let multibuffer_start = *cursor.start(); + let multibuffer_end = multibuffer_start + excerpt.text_summary.lines; + if multibuffer_start.row >= row_range.end { + return None; + } + + let mut buffer_start = excerpt.range.context.start; + let mut buffer_end = excerpt.range.context.end; + let excerpt_start_point = buffer_start.to_point(&excerpt.buffer); + let excerpt_end_point = excerpt_start_point + excerpt.text_summary.lines; + + if row_range.start > multibuffer_start.row { + let buffer_start_point = + excerpt_start_point + Point::new(row_range.start - multibuffer_start.row, 0); + buffer_start = excerpt.buffer.anchor_before(buffer_start_point); + } + + if row_range.end < multibuffer_end.row { + let buffer_end_point = + excerpt_start_point + Point::new(row_range.end - multibuffer_start.row, 0); + buffer_end = excerpt.buffer.anchor_before(buffer_end_point); + } + + let buffer_hunks = excerpt + .buffer + .git_diff_hunks_intersecting_range(buffer_start..buffer_end, reversed) + .filter_map(move |hunk| { + let start = multibuffer_start.row + + hunk + .buffer_range + .start + .saturating_sub(excerpt_start_point.row); + let end = multibuffer_start.row + + hunk + .buffer_range + .end + .min(excerpt_end_point.row + 1) + .saturating_sub(excerpt_start_point.row); + + Some(DiffHunk { + buffer_range: start..end, + diff_base_byte_range: hunk.diff_base_byte_range.clone(), + }) + }); - lines_advance += summary.text.lines.row; - Some(hunks) + cursor.next(&()); + Some(buffer_hunks) }) .flatten() } @@ -3488,11 +3508,12 @@ impl ToPointUtf16 for PointUtf16 { #[cfg(test)] mod tests { use super::*; - use gpui::MutableAppContext; + use gpui::{MutableAppContext, TestAppContext}; use language::{Buffer, Rope}; use rand::prelude::*; use settings::Settings; use std::{env, rc::Rc}; + use unindent::Unindent; use util::test::sample_text; @@ -4033,6 +4054,163 @@ mod tests { ); } + #[gpui::test] + async fn test_diff_hunks_in_range(cx: &mut TestAppContext) { + use git::diff::DiffHunkStatus; + + // buffer has two modified hunks with two rows each + let buffer_1 = cx.add_model(|cx| { + let mut buffer = Buffer::new( + 0, + " + 1.zero + 1.ONE + 1.TWO + 1.three + 1.FOUR + 1.FIVE + 1.six + " + .unindent(), + cx, + ); + buffer.set_diff_base( + Some( + " + 1.zero + 1.one + 1.two + 1.three + 1.four + 1.five + 1.six + " + .unindent(), + ), + cx, + ); + buffer + }); + + // buffer has a deletion hunk and an insertion hunk + let buffer_2 = cx.add_model(|cx| { + let mut buffer = Buffer::new( + 0, + " + 2.zero + 2.one + 2.two + 2.three + 2.four + 2.five + 2.six + " + .unindent(), + cx, + ); + buffer.set_diff_base( + Some( + " + 2.zero + 2.one + 2.one-and-a-half + 2.two + 2.three + 2.four + 2.six + " + .unindent(), + ), + cx, + ); + buffer + }); + + cx.foreground().run_until_parked(); + + let multibuffer = cx.add_model(|cx| { + let mut multibuffer = MultiBuffer::new(0); + multibuffer.push_excerpts( + buffer_1.clone(), + [ + // excerpt ends in the middle of a modified hunk + ExcerptRange { + context: Point::new(0, 0)..Point::new(1, 5), + primary: Default::default(), + }, + // excerpt begins in the middle of a modified hunk + ExcerptRange { + context: Point::new(5, 0)..Point::new(6, 5), + primary: Default::default(), + }, + ], + cx, + ); + multibuffer.push_excerpts( + buffer_2.clone(), + [ + // excerpt ends at a deletion + ExcerptRange { + context: Point::new(0, 0)..Point::new(1, 5), + primary: Default::default(), + }, + // excerpt starts at a deletion + ExcerptRange { + context: Point::new(2, 0)..Point::new(2, 5), + primary: Default::default(), + }, + // excerpt fully contains a deletion hunk + ExcerptRange { + context: Point::new(1, 0)..Point::new(2, 5), + primary: Default::default(), + }, + // excerpt fully contains an insertion hunk + ExcerptRange { + context: Point::new(4, 0)..Point::new(6, 5), + primary: Default::default(), + }, + ], + cx, + ); + multibuffer + }); + + let snapshot = multibuffer.read_with(cx, |b, cx| b.snapshot(cx)); + + assert_eq!( + snapshot.text(), + " + 1.zero + 1.ONE + 1.FIVE + 1.six + 2.zero + 2.one + 2.two + 2.one + 2.two + 2.four + 2.five + 2.six" + .unindent() + ); + + assert_eq!( + snapshot + .git_diff_hunks_in_range(0..12, false) + .map(|hunk| (hunk.status(), hunk.buffer_range)) + .collect::>(), + &[ + (DiffHunkStatus::Modified, 1..2), + (DiffHunkStatus::Modified, 2..3), + //TODO: Define better when and where removed hunks show up at range extremities + (DiffHunkStatus::Removed, 6..6), + (DiffHunkStatus::Removed, 8..8), + (DiffHunkStatus::Added, 10..11), + ] + ); + } + #[gpui::test(iterations = 100)] fn test_random_multibuffer(cx: &mut MutableAppContext, mut rng: StdRng) { let operations = env::var("OPERATIONS") diff --git a/crates/git/src/diff.rs b/crates/git/src/diff.rs index 066a7df2248a0bfb44359cb54d4b9dd6eceb5b5f..b28af26f1679da459aaaab270233f8fabda00c2c 100644 --- a/crates/git/src/diff.rs +++ b/crates/git/src/diff.rs @@ -79,10 +79,10 @@ impl BufferDiff { ) -> impl 'a + Iterator> { let start = buffer.anchor_before(Point::new(range.start, 0)); let end = buffer.anchor_after(Point::new(range.end, 0)); - self.hunks_in_range(start..end, buffer, reversed) + self.hunks_intersecting_range(start..end, buffer, reversed) } - pub fn hunks_in_range<'a>( + pub fn hunks_intersecting_range<'a>( &'a self, range: Range, buffer: &'a BufferSnapshot, @@ -151,7 +151,7 @@ impl BufferDiff { fn hunks<'a>(&'a self, text: &'a BufferSnapshot) -> impl 'a + Iterator> { let start = text.anchor_before(Point::new(0, 0)); let end = text.anchor_after(Point::new(u32::MAX, u32::MAX)); - self.hunks_in_range(start..end, text, false) + self.hunks_intersecting_range(start..end, text, false) } fn diff<'a>(head: &'a str, current: &'a str) -> Option> { diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 4bf0f91a2a5833d4d755eb5de0bcb5b0676f0a79..a78bb4af79a4b69796e787a80602063d1cae9ba3 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -2318,12 +2318,13 @@ impl BufferSnapshot { self.git_diff.hunks_in_row_range(range, self, reversed) } - pub fn git_diff_hunks_in_range<'a>( + pub fn git_diff_hunks_intersecting_range<'a>( &'a self, range: Range, reversed: bool, ) -> impl 'a + Iterator> { - self.git_diff.hunks_in_range(range, self, reversed) + self.git_diff + .hunks_intersecting_range(range, self, reversed) } pub fn diagnostics_in_range<'a, T, O>( From ebd0c5d000b1c1b6d8ec1bc225266b2cbf6c2522 Mon Sep 17 00:00:00 2001 From: Julia Date: Thu, 15 Dec 2022 18:17:32 -0500 Subject: [PATCH 11/17] Handle reversed=true for multi-buffer git-hunks-in-range iteration Co-Authored-By: Nathan Sobo --- crates/editor/src/multi_buffer.rs | 48 ++++++++++++++++++++++++------- 1 file changed, 38 insertions(+), 10 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index fab55a5099d9abd6eaa393c359d83cf784849ff5..b76890efb0d74d154f42885d8240076dcbd86ac8 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -2636,7 +2636,15 @@ impl MultiBufferSnapshot { reversed: bool, ) -> impl 'a + Iterator> { let mut cursor = self.excerpts.cursor::(); - cursor.seek(&Point::new(row_range.start, 0), Bias::Right, &()); + + if reversed { + cursor.seek(&Point::new(row_range.end, 0), Bias::Left, &()); + if cursor.item().is_none() { + cursor.prev(&()); + } + } else { + cursor.seek(&Point::new(row_range.start, 0), Bias::Right, &()); + } std::iter::from_fn(move || { let excerpt = cursor.item()?; @@ -2685,7 +2693,12 @@ impl MultiBufferSnapshot { }) }); - cursor.next(&()); + if reversed { + cursor.prev(&()); + } else { + cursor.next(&()); + } + Some(buffer_hunks) }) .flatten() @@ -4195,19 +4208,34 @@ mod tests { .unindent() ); + let expected = [ + (DiffHunkStatus::Modified, 1..2), + (DiffHunkStatus::Modified, 2..3), + //TODO: Define better when and where removed hunks show up at range extremities + (DiffHunkStatus::Removed, 6..6), + (DiffHunkStatus::Removed, 8..8), + (DiffHunkStatus::Added, 10..11), + ]; + assert_eq!( snapshot .git_diff_hunks_in_range(0..12, false) .map(|hunk| (hunk.status(), hunk.buffer_range)) .collect::>(), - &[ - (DiffHunkStatus::Modified, 1..2), - (DiffHunkStatus::Modified, 2..3), - //TODO: Define better when and where removed hunks show up at range extremities - (DiffHunkStatus::Removed, 6..6), - (DiffHunkStatus::Removed, 8..8), - (DiffHunkStatus::Added, 10..11), - ] + &expected, + ); + + assert_eq!( + snapshot + .git_diff_hunks_in_range(0..12, true) + .map(|hunk| (hunk.status(), hunk.buffer_range)) + .collect::>(), + expected + .iter() + .rev() + .cloned() + .collect::>() + .as_slice(), ); } From 2c47bd4a9764ca8a8369bf3c52eefa59edbe3d8c Mon Sep 17 00:00:00 2001 From: Kay Simmons Date: Fri, 16 Dec 2022 15:45:17 -0800 Subject: [PATCH 12/17] Clear stale projects if they no longer exist --- crates/db/src/query.rs | 4 +- crates/recent_projects/src/recent_projects.rs | 42 ++++++++++++------- crates/workspace/src/persistence.rs | 27 +++++++++++- 3 files changed, 53 insertions(+), 20 deletions(-) diff --git a/crates/db/src/query.rs b/crates/db/src/query.rs index 731fca15cb5c47b58e89aac1eb2a7b42189829c2..20c2a267ffa3c357aeb75b0d6db3769efe884280 100644 --- a/crates/db/src/query.rs +++ b/crates/db/src/query.rs @@ -80,7 +80,7 @@ macro_rules! query { let sql_stmt = $crate::sqlez_macros::sql!($($sql)+); - self.select::<$return_type>(sql_stmt)?(()) + self.select::<$return_type>(sql_stmt)?() .context(::std::format!( "Error in {}, select_row failed to execute or parse for: {}", ::std::stringify!($id), @@ -95,7 +95,7 @@ macro_rules! query { self.write(|connection| { let sql_stmt = $crate::sqlez_macros::sql!($($sql)+); - connection.select::<$return_type>(sql_stmt)?(()) + connection.select::<$return_type>(sql_stmt)?() .context(::std::format!( "Error in {}, select_row failed to execute or parse for: {}", ::std::stringify!($id), diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index 1842540db38c33fa25b735c8fe2c0dc5616f7037..42ff2b2f1c3cb0a4e34ecc02c10415accc8f749e 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -11,9 +11,7 @@ use highlighted_workspace_location::HighlightedWorkspaceLocation; use ordered_float::OrderedFloat; use picker::{Picker, PickerDelegate}; use settings::Settings; -use workspace::{OpenPaths, Workspace, WorkspaceLocation}; - -const RECENT_LIMIT: usize = 100; +use workspace::{OpenPaths, Workspace, WorkspaceLocation, WORKSPACE_DB}; actions!(recent_projects, [Toggle]); @@ -30,14 +28,8 @@ struct RecentProjectsView { } impl RecentProjectsView { - fn new(cx: &mut ViewContext) -> Self { + fn new(workspace_locations: Vec, cx: &mut ViewContext) -> Self { let handle = cx.weak_handle(); - let workspace_locations: Vec = workspace::WORKSPACE_DB - .recent_workspaces(RECENT_LIMIT) - .unwrap_or_default() - .into_iter() - .map(|(_, location)| location) - .collect(); Self { picker: cx.add_view(|cx| { Picker::new("Recent Projects...", handle, cx).with_max_size(800., 1200.) @@ -48,12 +40,30 @@ impl RecentProjectsView { } } - fn toggle(workspace: &mut Workspace, _: &Toggle, cx: &mut ViewContext) { - workspace.toggle_modal(cx, |_, cx| { - let view = cx.add_view(|cx| Self::new(cx)); - cx.subscribe(&view, Self::on_event).detach(); - view - }); + fn toggle(_: &mut Workspace, _: &Toggle, cx: &mut ViewContext) { + cx.spawn(|workspace, mut cx| async move { + let workspace_locations = cx + .background() + .spawn(async { + WORKSPACE_DB + .recent_workspaces_on_disk() + .await + .unwrap_or_default() + .into_iter() + .map(|(_, location)| location) + .collect() + }) + .await; + + workspace.update(&mut cx, |workspace, cx| { + workspace.toggle_modal(cx, |_, cx| { + let view = cx.add_view(|cx| Self::new(workspace_locations, cx)); + cx.subscribe(&view, Self::on_event).detach(); + view + }); + }) + }) + .detach(); } fn on_event( diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index 2d4ae919f95d4fcaeb8f0a7466e39098132e1003..fcbdb1deba56fe5e74f5b039600edcd804503009 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -196,14 +196,37 @@ impl WorkspaceDb { } query! { - pub fn recent_workspaces(limit: usize) -> Result> { + fn recent_workspaces() -> Result> { SELECT workspace_id, workspace_location FROM workspaces WHERE workspace_location IS NOT NULL ORDER BY timestamp DESC - LIMIT ? } } + + query! { + async fn delete_stale_workspace(id: WorkspaceId) -> Result<()> { + DELETE FROM workspaces + WHERE workspace_id IS ? + } + } + + // Returns the recent locations which are still valid on disk and deletes ones which no longer + // exist. + pub async fn recent_workspaces_on_disk(&self) -> Result> { + let mut result = Vec::new(); + let mut delete_tasks = Vec::new(); + for (id, location) in self.recent_workspaces()? { + if location.paths().iter().all(|path| dbg!(path).exists()) { + result.push((id, location)); + } else { + delete_tasks.push(self.delete_stale_workspace(id)); + } + } + + futures::future::join_all(delete_tasks).await; + Ok(result) + } fn get_center_pane_group(&self, workspace_id: WorkspaceId) -> Result { self.get_pane_group(workspace_id, None)? From 8c64514570db92fd57893507d2e5da1812a26b85 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Sat, 17 Dec 2022 12:03:51 -0800 Subject: [PATCH 13/17] Add ZED_STATELESS env var, for suppressing persistence Use this env var in the start-local-collaboration script to make the behavior more predictable. --- crates/db/src/db.rs | 5 +++++ script/start-local-collaboration | 1 + 2 files changed, 6 insertions(+) diff --git a/crates/db/src/db.rs b/crates/db/src/db.rs index 704ac4049df5790b16e6d41b87142f1b619be6e2..3412a0baff64ec5203c0124af572152bb7b65e20 100644 --- a/crates/db/src/db.rs +++ b/crates/db/src/db.rs @@ -39,6 +39,7 @@ const FALLBACK_DB_NAME: &'static str = "FALLBACK_MEMORY_DB"; const DB_FILE_NAME: &'static str = "db.sqlite"; lazy_static::lazy_static! { + static ref ZED_STATELESS: bool = std::env::var("ZED_STATELESS").map_or(false, |v| !v.is_empty()); static ref DB_FILE_OPERATIONS: Mutex<()> = Mutex::new(()); pub static ref BACKUP_DB_PATH: RwLock> = RwLock::new(None); pub static ref ALL_FILE_DB_FAILED: AtomicBool = AtomicBool::new(false); @@ -49,6 +50,10 @@ lazy_static::lazy_static! { /// is moved to a backup folder and a new one is created. If that fails, a shared in memory db is created. /// In either case, static variables are set so that the user can be notified. pub async fn open_db(db_dir: &Path, release_channel: &ReleaseChannel) -> ThreadSafeConnection { + if *ZED_STATELESS { + return open_fallback_db().await; + } + let release_channel_name = release_channel.dev_name(); let main_db_dir = db_dir.join(Path::new(&format!("0-{}", release_channel_name))); diff --git a/script/start-local-collaboration b/script/start-local-collaboration index 9c63b301e550253e2e64d9526eb30e0f4a19f7bf..82341bf6db0d15ccb0a1ac84af5841b2279f6716 100755 --- a/script/start-local-collaboration +++ b/script/start-local-collaboration @@ -36,6 +36,7 @@ position_1=0,0 position_2=${width},0 # Authenticate using the collab server's admin secret. +export ZED_STATELESS=1 export ZED_ADMIN_API_TOKEN=secret export ZED_SERVER_URL=http://localhost:8080 export ZED_WINDOW_SIZE=${width},${height} From 1e02ebbd11bdb3faede5a92a593e73051ef195b0 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Sat, 17 Dec 2022 14:00:53 -0800 Subject: [PATCH 14/17] Replicate pending selections separately from other selections This fixes a panic that would occur when a leader created a pending selection that overlapped another selection, because the follower would attempt to treat that pending selection as non-pending, which would violate the invariant that selections are sorted and disjoint. --- crates/editor/src/editor.rs | 6 ++++++ crates/editor/src/items.rs | 27 ++++++++++++++++++++++----- crates/rpc/proto/zed.proto | 14 ++++++++------ crates/rpc/src/rpc.rs | 2 +- 4 files changed, 37 insertions(+), 12 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 8a3c7452ef5971a6dfdac39a0ab53d24dec3f550..d8ee49866b404d4d1d12efbd540b467096485d81 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -5453,11 +5453,17 @@ impl Editor { pub fn set_selections_from_remote( &mut self, selections: Vec>, + pending_selection: Option>, cx: &mut ViewContext, ) { let old_cursor_position = self.selections.newest_anchor().head(); self.selections.change_with(cx, |s| { s.select_anchors(selections); + if let Some(pending_selection) = pending_selection { + s.set_pending(pending_selection, SelectMode::Character); + } else { + s.clear_pending(); + } }); self.selections_did_change(false, &old_cursor_position, cx); } diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 0057df778b3d2795e9f02dd21e2fbc6c341c8762..9bf7106c683442877a81552a4947106f766d6ced 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -130,13 +130,17 @@ impl FollowableItem for Editor { .ok_or_else(|| anyhow!("invalid selection")) }) .collect::>>()?; + let pending_selection = state + .pending_selection + .map(|selection| deserialize_selection(&buffer, selection)) + .flatten(); let scroll_top_anchor = state .scroll_top_anchor .and_then(|anchor| deserialize_anchor(&buffer, anchor)); drop(buffer); - if !selections.is_empty() { - editor.set_selections_from_remote(selections, cx); + if !selections.is_empty() || pending_selection.is_some() { + editor.set_selections_from_remote(selections, pending_selection, cx); } if let Some(scroll_top_anchor) = scroll_top_anchor { @@ -216,6 +220,11 @@ impl FollowableItem for Editor { .iter() .map(serialize_selection) .collect(), + pending_selection: self + .selections + .pending_anchor() + .as_ref() + .map(serialize_selection), })) } @@ -269,9 +278,13 @@ impl FollowableItem for Editor { .selections .disjoint_anchors() .iter() - .chain(self.selections.pending_anchor().as_ref()) .map(serialize_selection) .collect(); + update.pending_selection = self + .selections + .pending_anchor() + .as_ref() + .map(serialize_selection); true } _ => false, @@ -307,6 +320,10 @@ impl FollowableItem for Editor { .into_iter() .filter_map(|selection| deserialize_selection(&multibuffer, selection)) .collect::>(); + let pending_selection = message + .pending_selection + .and_then(|selection| deserialize_selection(&multibuffer, selection)); + let scroll_top_anchor = message .scroll_top_anchor .and_then(|anchor| deserialize_anchor(&multibuffer, anchor)); @@ -361,8 +378,8 @@ impl FollowableItem for Editor { multibuffer.remove_excerpts(removals, cx); }); - if !selections.is_empty() { - this.set_selections_from_remote(selections, cx); + if !selections.is_empty() || pending_selection.is_some() { + this.set_selections_from_remote(selections, pending_selection, cx); this.request_autoscroll_remotely(Autoscroll::newest(), cx); } else if let Some(anchor) = scroll_top_anchor { this.set_scroll_anchor_remote(ScrollAnchor { diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index 9528bd10b7de96ad1e05063c6c445786fc3d8f5c..75b8d4d4574a4de52af8a5fef1fdea52eaaaf444 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -853,9 +853,10 @@ message UpdateView { repeated ExcerptInsertion inserted_excerpts = 1; repeated uint64 deleted_excerpts = 2; repeated Selection selections = 3; - EditorAnchor scroll_top_anchor = 4; - float scroll_x = 5; - float scroll_y = 6; + optional Selection pending_selection = 4; + EditorAnchor scroll_top_anchor = 5; + float scroll_x = 6; + float scroll_y = 7; } } @@ -872,9 +873,10 @@ message View { optional string title = 2; repeated Excerpt excerpts = 3; repeated Selection selections = 4; - EditorAnchor scroll_top_anchor = 5; - float scroll_x = 6; - float scroll_y = 7; + optional Selection pending_selection = 5; + EditorAnchor scroll_top_anchor = 6; + float scroll_x = 7; + float scroll_y = 8; } } diff --git a/crates/rpc/src/rpc.rs b/crates/rpc/src/rpc.rs index 01dda55cefc1d4193c5fee20a59f69a494c93d5c..7fc59d86fbccfa8ab787d3eed12063bb94fa7ac4 100644 --- a/crates/rpc/src/rpc.rs +++ b/crates/rpc/src/rpc.rs @@ -6,4 +6,4 @@ pub use conn::Connection; pub use peer::*; mod macros; -pub const PROTOCOL_VERSION: u32 = 43; +pub const PROTOCOL_VERSION: u32 = 44; From cb60eb8a57ad2a2db23cbdc738c218c679efc082 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Mon, 19 Dec 2022 16:27:25 +0100 Subject: [PATCH 15/17] Update rust-analyzer's `disk_based_diagnostics_progress_token` --- crates/zed/src/languages/rust.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/zed/src/languages/rust.rs b/crates/zed/src/languages/rust.rs index ba51fda253981eae6c2df57388fff1ae45a60452..30971fef1ae726802fc694c2c2e4c1f1e0d79079 100644 --- a/crates/zed/src/languages/rust.rs +++ b/crates/zed/src/languages/rust.rs @@ -93,7 +93,7 @@ impl LspAdapter for RustLspAdapter { } async fn disk_based_diagnostics_progress_token(&self) -> Option { - Some("rust-analyzer/checkOnSave".into()) + Some("rust-analyzer/flycheck".into()) } async fn process_diagnostics(&self, params: &mut lsp::PublishDiagnosticsParams) { From c49573dc1195e615812d54a35818b10b709fa4f9 Mon Sep 17 00:00:00 2001 From: Julia Date: Mon, 19 Dec 2022 11:11:10 -0500 Subject: [PATCH 16/17] Format problematic DB macros --- crates/db/src/db.rs | 150 ++++++++++++++++------------ crates/workspace/src/persistence.rs | 88 ++++++++-------- 2 files changed, 133 insertions(+), 105 deletions(-) diff --git a/crates/db/src/db.rs b/crates/db/src/db.rs index 3412a0baff64ec5203c0124af572152bb7b65e20..921b9c8a53d2f09dc28f208e508d6ecbc68eb42b 100644 --- a/crates/db/src/db.rs +++ b/crates/db/src/db.rs @@ -20,8 +20,8 @@ use std::fs::create_dir_all; use std::path::{Path, PathBuf}; use std::sync::atomic::{AtomicBool, Ordering}; use std::time::{SystemTime, UNIX_EPOCH}; -use util::{async_iife, ResultExt}; use util::channel::ReleaseChannel; +use util::{async_iife, ResultExt}; const CONNECTION_INITIALIZE_QUERY: &'static str = sql!( PRAGMA foreign_keys=TRUE; @@ -42,14 +42,17 @@ lazy_static::lazy_static! { static ref ZED_STATELESS: bool = std::env::var("ZED_STATELESS").map_or(false, |v| !v.is_empty()); static ref DB_FILE_OPERATIONS: Mutex<()> = Mutex::new(()); pub static ref BACKUP_DB_PATH: RwLock> = RwLock::new(None); - pub static ref ALL_FILE_DB_FAILED: AtomicBool = AtomicBool::new(false); + pub static ref ALL_FILE_DB_FAILED: AtomicBool = AtomicBool::new(false); } /// Open or create a database at the given directory path. /// This will retry a couple times if there are failures. If opening fails once, the db directory /// is moved to a backup folder and a new one is created. If that fails, a shared in memory db is created. /// In either case, static variables are set so that the user can be notified. -pub async fn open_db(db_dir: &Path, release_channel: &ReleaseChannel) -> ThreadSafeConnection { +pub async fn open_db( + db_dir: &Path, + release_channel: &ReleaseChannel, +) -> ThreadSafeConnection { if *ZED_STATELESS { return open_fallback_db().await; } @@ -69,11 +72,11 @@ pub async fn open_db(db_dir: &Path, release_channel: &Rel // // Basically: Don't ever push invalid migrations to stable or everyone will have // a bad time. - + // If no db folder, create one at 0-{channel} create_dir_all(&main_db_dir).context("Could not create db directory")?; let db_path = main_db_dir.join(Path::new(DB_FILE_NAME)); - + // Optimistically open databases in parallel if !DB_FILE_OPERATIONS.is_locked() { // Try building a connection @@ -81,7 +84,7 @@ pub async fn open_db(db_dir: &Path, release_channel: &Rel return Ok(connection) }; } - + // Take a lock in the failure case so that we move the db once per process instead // of potentially multiple times from different threads. This shouldn't happen in the // normal path @@ -89,12 +92,12 @@ pub async fn open_db(db_dir: &Path, release_channel: &Rel if let Some(connection) = open_main_db(&db_path).await { return Ok(connection) }; - + let backup_timestamp = SystemTime::now() .duration_since(UNIX_EPOCH) .expect("System clock is set before the unix timestamp, Zed does not support this region of spacetime") .as_millis(); - + // If failed, move 0-{channel} to {current unix timestamp}-{channel} let backup_db_dir = db_dir.join(Path::new(&format!( "{}-{}", @@ -110,7 +113,7 @@ pub async fn open_db(db_dir: &Path, release_channel: &Rel let mut guard = BACKUP_DB_PATH.write(); *guard = Some(backup_db_dir); } - + // Create a new 0-{channel} create_dir_all(&main_db_dir).context("Should be able to create the database directory")?; let db_path = main_db_dir.join(Path::new(DB_FILE_NAME)); @@ -122,10 +125,10 @@ pub async fn open_db(db_dir: &Path, release_channel: &Rel if let Some(connection) = connection { return connection; } - + // Set another static ref so that we can escalate the notification ALL_FILE_DB_FAILED.store(true, Ordering::Release); - + // If still failed, create an in memory db with a known name open_fallback_db().await } @@ -179,15 +182,15 @@ macro_rules! define_connection { &self.0 } } - + impl $crate::sqlez::domain::Domain for $t { fn name() -> &'static str { stringify!($t) } - + fn migrations() -> &'static [&'static str] { $migrations - } + } } #[cfg(any(test, feature = "test-support"))] @@ -210,15 +213,15 @@ macro_rules! define_connection { &self.0 } } - + impl $crate::sqlez::domain::Domain for $t { fn name() -> &'static str { stringify!($t) } - + fn migrations() -> &'static [&'static str] { $migrations - } + } } #[cfg(any(test, feature = "test-support"))] @@ -237,134 +240,157 @@ macro_rules! define_connection { mod tests { use std::{fs, thread}; - use sqlez::{domain::Domain, connection::Connection}; + use sqlez::{connection::Connection, domain::Domain}; use sqlez_macros::sql; use tempdir::TempDir; use crate::{open_db, DB_FILE_NAME}; - + // Test bad migration panics #[gpui::test] #[should_panic] async fn test_bad_migration_panics() { enum BadDB {} - + impl Domain for BadDB { fn name() -> &'static str { "db_tests" } - + fn migrations() -> &'static [&'static str] { - &[sql!(CREATE TABLE test(value);), + &[ + sql!(CREATE TABLE test(value);), // failure because test already exists - sql!(CREATE TABLE test(value);)] + sql!(CREATE TABLE test(value);), + ] } } - + let tempdir = TempDir::new("DbTests").unwrap(); let _bad_db = open_db::(tempdir.path(), &util::channel::ReleaseChannel::Dev).await; } - + /// Test that DB exists but corrupted (causing recreate) #[gpui::test] async fn test_db_corruption() { enum CorruptedDB {} - + impl Domain for CorruptedDB { fn name() -> &'static str { "db_tests" } - + fn migrations() -> &'static [&'static str] { &[sql!(CREATE TABLE test(value);)] } } - + enum GoodDB {} - + impl Domain for GoodDB { fn name() -> &'static str { "db_tests" //Notice same name } - + fn migrations() -> &'static [&'static str] { &[sql!(CREATE TABLE test2(value);)] //But different migration } } - + let tempdir = TempDir::new("DbTests").unwrap(); { - let corrupt_db = open_db::(tempdir.path(), &util::channel::ReleaseChannel::Dev).await; + let corrupt_db = + open_db::(tempdir.path(), &util::channel::ReleaseChannel::Dev).await; assert!(corrupt_db.persistent()); } - - + let good_db = open_db::(tempdir.path(), &util::channel::ReleaseChannel::Dev).await; - assert!(good_db.select_row::("SELECT * FROM test2").unwrap()().unwrap().is_none()); - - let mut corrupted_backup_dir = fs::read_dir( - tempdir.path() - ).unwrap().find(|entry| { - !entry.as_ref().unwrap().file_name().to_str().unwrap().starts_with("0") - } - ).unwrap().unwrap().path(); + assert!( + good_db.select_row::("SELECT * FROM test2").unwrap()() + .unwrap() + .is_none() + ); + + let mut corrupted_backup_dir = fs::read_dir(tempdir.path()) + .unwrap() + .find(|entry| { + !entry + .as_ref() + .unwrap() + .file_name() + .to_str() + .unwrap() + .starts_with("0") + }) + .unwrap() + .unwrap() + .path(); corrupted_backup_dir.push(DB_FILE_NAME); - + dbg!(&corrupted_backup_dir); - + let backup = Connection::open_file(&corrupted_backup_dir.to_string_lossy()); - assert!(backup.select_row::("SELECT * FROM test").unwrap()().unwrap().is_none()); + assert!(backup.select_row::("SELECT * FROM test").unwrap()() + .unwrap() + .is_none()); } - + /// Test that DB exists but corrupted (causing recreate) #[gpui::test] async fn test_simultaneous_db_corruption() { enum CorruptedDB {} - + impl Domain for CorruptedDB { fn name() -> &'static str { "db_tests" } - + fn migrations() -> &'static [&'static str] { &[sql!(CREATE TABLE test(value);)] } } - + enum GoodDB {} - + impl Domain for GoodDB { fn name() -> &'static str { "db_tests" //Notice same name } - + fn migrations() -> &'static [&'static str] { &[sql!(CREATE TABLE test2(value);)] //But different migration } } - + let tempdir = TempDir::new("DbTests").unwrap(); { // Setup the bad database - let corrupt_db = open_db::(tempdir.path(), &util::channel::ReleaseChannel::Dev).await; + let corrupt_db = + open_db::(tempdir.path(), &util::channel::ReleaseChannel::Dev).await; assert!(corrupt_db.persistent()); } - + // Try to connect to it a bunch of times at once let mut guards = vec![]; for _ in 0..10 { let tmp_path = tempdir.path().to_path_buf(); let guard = thread::spawn(move || { - let good_db = smol::block_on(open_db::(tmp_path.as_path(), &util::channel::ReleaseChannel::Dev)); - assert!(good_db.select_row::("SELECT * FROM test2").unwrap()().unwrap().is_none()); + let good_db = smol::block_on(open_db::( + tmp_path.as_path(), + &util::channel::ReleaseChannel::Dev, + )); + assert!( + good_db.select_row::("SELECT * FROM test2").unwrap()() + .unwrap() + .is_none() + ); }); - + guards.push(guard); - } - - for guard in guards.into_iter() { - assert!(guard.join().is_ok()); - } + + for guard in guards.into_iter() { + assert!(guard.join().is_ok()); + } } } diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index 8e909583deb56a7746ec8091f8909ab330abb296..bd4e6653d903c319ddf0a6510b0b1df35717f96c 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -8,7 +8,7 @@ use anyhow::{anyhow, bail, Context, Result}; use db::{define_connection, query, sqlez::connection::Connection, sqlez_macros::sql}; use gpui::Axis; -use util::{ unzip_option, ResultExt}; +use util::{unzip_option, ResultExt}; use crate::dock::DockPosition; use crate::WorkspaceId; @@ -31,7 +31,7 @@ define_connection! { timestamp TEXT DEFAULT CURRENT_TIMESTAMP NOT NULL, FOREIGN KEY(dock_pane) REFERENCES panes(pane_id) ) STRICT; - + CREATE TABLE pane_groups( group_id INTEGER PRIMARY KEY, workspace_id INTEGER NOT NULL, @@ -43,7 +43,7 @@ define_connection! { ON UPDATE CASCADE, FOREIGN KEY(parent_group_id) REFERENCES pane_groups(group_id) ON DELETE CASCADE ) STRICT; - + CREATE TABLE panes( pane_id INTEGER PRIMARY KEY, workspace_id INTEGER NOT NULL, @@ -52,7 +52,7 @@ define_connection! { ON DELETE CASCADE ON UPDATE CASCADE ) STRICT; - + CREATE TABLE center_panes( pane_id INTEGER PRIMARY KEY, parent_group_id INTEGER, // NULL means that this is a root pane @@ -61,7 +61,7 @@ define_connection! { ON DELETE CASCADE, FOREIGN KEY(parent_group_id) REFERENCES pane_groups(group_id) ON DELETE CASCADE ) STRICT; - + CREATE TABLE items( item_id INTEGER NOT NULL, // This is the item's view id, so this is not unique workspace_id INTEGER NOT NULL, @@ -96,10 +96,10 @@ impl WorkspaceDb { WorkspaceLocation, bool, DockPosition, - ) = + ) = self.select_row_bound(sql!{ SELECT workspace_id, workspace_location, left_sidebar_open, dock_visible, dock_anchor - FROM workspaces + FROM workspaces WHERE workspace_location = ? }) .and_then(|mut prepared_statement| (prepared_statement)(&workspace_location)) @@ -119,7 +119,7 @@ impl WorkspaceDb { .context("Getting center group") .log_err()?, dock_position, - left_sidebar_open + left_sidebar_open, }) } @@ -158,7 +158,12 @@ impl WorkspaceDb { dock_visible = ?4, dock_anchor = ?5, timestamp = CURRENT_TIMESTAMP - ))?((workspace.id, &workspace.location, workspace.left_sidebar_open, workspace.dock_position)) + ))?(( + workspace.id, + &workspace.location, + workspace.left_sidebar_open, + workspace.dock_position, + )) .context("Updating workspace")?; // Save center pane group and dock pane @@ -191,20 +196,20 @@ impl WorkspaceDb { query! { fn recent_workspaces() -> Result> { - SELECT workspace_id, workspace_location + SELECT workspace_id, workspace_location FROM workspaces WHERE workspace_location IS NOT NULL - ORDER BY timestamp DESC + ORDER BY timestamp DESC } } - + query! { async fn delete_stale_workspace(id: WorkspaceId) -> Result<()> { DELETE FROM workspaces WHERE workspace_id IS ? } } - + // Returns the recent locations which are still valid on disk and deletes ones which no longer // exist. pub async fn recent_workspaces_on_disk(&self) -> Result> { @@ -217,7 +222,7 @@ impl WorkspaceDb { delete_tasks.push(self.delete_stale_workspace(id)); } } - + futures::future::join_all(delete_tasks).await; Ok(result) } @@ -233,10 +238,16 @@ impl WorkspaceDb { } fn get_center_pane_group(&self, workspace_id: WorkspaceId) -> Result { - Ok(self.get_pane_group(workspace_id, None)? + Ok(self + .get_pane_group(workspace_id, None)? .into_iter() .next() - .unwrap_or_else(|| SerializedPaneGroup::Pane(SerializedPane { active: true, children: vec![] }))) + .unwrap_or_else(|| { + SerializedPaneGroup::Pane(SerializedPane { + active: true, + children: vec![], + }) + })) } fn get_pane_group( @@ -248,7 +259,7 @@ impl WorkspaceDb { type GroupOrPane = (Option, Option, Option, Option); self.select_bound::(sql!( SELECT group_id, axis, pane_id, active - FROM (SELECT + FROM (SELECT group_id, axis, NULL as pane_id, @@ -256,18 +267,18 @@ impl WorkspaceDb { position, parent_group_id, workspace_id - FROM pane_groups + FROM pane_groups UNION - SELECT + SELECT + NULL, NULL, - NULL, center_panes.pane_id, panes.active as active, position, parent_group_id, panes.workspace_id as workspace_id FROM center_panes - JOIN panes ON center_panes.pane_id = panes.pane_id) + JOIN panes ON center_panes.pane_id = panes.pane_id) WHERE parent_group_id IS ? AND workspace_id = ? ORDER BY position ))?((group_id, workspace_id))? @@ -290,13 +301,12 @@ impl WorkspaceDb { // Filter out panes and pane groups which don't have any children or items .filter(|pane_group| match pane_group { Ok(SerializedPaneGroup::Group { children, .. }) => !children.is_empty(), - Ok(SerializedPaneGroup::Pane(pane)) => !pane.children.is_empty(), + Ok(SerializedPaneGroup::Pane(pane)) => !pane.children.is_empty(), _ => true, }) .collect::>() } - fn save_pane_group( conn: &Connection, workspace_id: WorkspaceId, @@ -308,15 +318,10 @@ impl WorkspaceDb { let (parent_id, position) = unzip_option(parent); let group_id = conn.select_row_bound::<_, i64>(sql!( - INSERT INTO pane_groups(workspace_id, parent_group_id, position, axis) - VALUES (?, ?, ?, ?) + INSERT INTO pane_groups(workspace_id, parent_group_id, position, axis) + VALUES (?, ?, ?, ?) RETURNING group_id - ))?(( - workspace_id, - parent_id, - position, - *axis, - ))? + ))?((workspace_id, parent_id, position, *axis))? .ok_or_else(|| anyhow!("Couldn't retrieve group_id from inserted pane_group"))?; for (position, group) in children.iter().enumerate() { @@ -337,9 +342,7 @@ impl WorkspaceDb { SELECT pane_id, active FROM panes WHERE pane_id = (SELECT dock_pane FROM workspaces WHERE workspace_id = ?) - ))?( - workspace_id, - )? + ))?(workspace_id)? .context("No dock pane for workspace")?; Ok(SerializedPane::new( @@ -356,8 +359,8 @@ impl WorkspaceDb { dock: bool, ) -> Result { let pane_id = conn.select_row_bound::<_, i64>(sql!( - INSERT INTO panes(workspace_id, active) - VALUES (?, ?) + INSERT INTO panes(workspace_id, active) + VALUES (?, ?) RETURNING pane_id ))?((workspace_id, pane.active))? .ok_or_else(|| anyhow!("Could not retrieve inserted pane_id"))?; @@ -399,14 +402,13 @@ impl WorkspaceDb { Ok(()) } - query!{ + query! { pub async fn update_timestamp(workspace_id: WorkspaceId) -> Result<()> { UPDATE workspaces SET timestamp = CURRENT_TIMESTAMP WHERE workspace_id = ? } } - } #[cfg(test)] @@ -495,7 +497,7 @@ mod tests { dock_position: crate::dock::DockPosition::Shown(DockAnchor::Bottom), center_group: Default::default(), dock_pane: Default::default(), - left_sidebar_open: true + left_sidebar_open: true, }; let mut workspace_2 = SerializedWorkspace { @@ -504,7 +506,7 @@ mod tests { dock_position: crate::dock::DockPosition::Hidden(DockAnchor::Expanded), center_group: Default::default(), dock_pane: Default::default(), - left_sidebar_open: false + left_sidebar_open: false, }; db.save_workspace(workspace_1.clone()).await; @@ -610,7 +612,7 @@ mod tests { dock_position: DockPosition::Shown(DockAnchor::Bottom), center_group, dock_pane, - left_sidebar_open: true + left_sidebar_open: true, }; db.save_workspace(workspace.clone()).await; @@ -683,7 +685,7 @@ mod tests { dock_position: DockPosition::Shown(DockAnchor::Right), center_group: Default::default(), dock_pane: Default::default(), - left_sidebar_open: false + left_sidebar_open: false, }; db.save_workspace(workspace_3.clone()).await; @@ -718,7 +720,7 @@ mod tests { dock_position: crate::dock::DockPosition::Hidden(DockAnchor::Right), center_group: center_group.clone(), dock_pane, - left_sidebar_open: true + left_sidebar_open: true, } } From c8b209306e019982f2bc606f3a5b1730631bd170 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 19 Dec 2022 11:29:22 -0800 Subject: [PATCH 17/17] collab 0.4.2 --- Cargo.lock | 2 +- crates/collab/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e6be29fb86e5f9a4b7a40c3fa00b3e56f719e06f..57a7dde194c899107baba60869ebbef8536798dc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1131,7 +1131,7 @@ dependencies = [ [[package]] name = "collab" -version = "0.4.1" +version = "0.4.2" dependencies = [ "anyhow", "async-tungstenite", diff --git a/crates/collab/Cargo.toml b/crates/collab/Cargo.toml index 2bdc1f36a12220255711f6fc1b3333df6e89b8ea..261289b9b325f1c8e291a35538ab7e2c5a3d0f64 100644 --- a/crates/collab/Cargo.toml +++ b/crates/collab/Cargo.toml @@ -3,7 +3,7 @@ authors = ["Nathan Sobo "] default-run = "collab" edition = "2021" name = "collab" -version = "0.4.1" +version = "0.4.2" [[bin]] name = "collab"