diff --git a/zed/src/file_finder.rs b/zed/src/file_finder.rs index caf560c16a54cd6ceadf2f4e035d25d3b26566a2..b6c6ad18b2357e0d7de32e88c9c108d119cf3ff6 100644 --- a/zed/src/file_finder.rs +++ b/zed/src/file_finder.rs @@ -1,9 +1,10 @@ use crate::{ editor::{self, Editor}, + project::Project, settings::Settings, util, workspace::Workspace, - worktree::{match_paths, PathMatch}, + worktree::PathMatch, }; use gpui::{ action, @@ -13,8 +14,8 @@ use gpui::{ menu::{SelectNext, SelectPrev}, Binding, }, - AppContext, Axis, Entity, MutableAppContext, RenderContext, Task, View, ViewContext, - ViewHandle, WeakViewHandle, + AppContext, Axis, Entity, ModelHandle, MutableAppContext, RenderContext, Task, View, + ViewContext, ViewHandle, WeakViewHandle, }; use postage::watch; use std::{ @@ -29,7 +30,7 @@ use std::{ pub struct FileFinder { handle: WeakViewHandle, settings: watch::Receiver, - workspace: WeakViewHandle, + project: ModelHandle, query_editor: ViewHandle, search_count: usize, latest_search_id: usize, @@ -241,8 +242,8 @@ impl FileFinder { fn toggle(workspace: &mut Workspace, _: &Toggle, cx: &mut ViewContext) { workspace.toggle_modal(cx, |cx, workspace| { - let handle = cx.handle(); - let finder = cx.add_view(|cx| Self::new(workspace.settings.clone(), handle, cx)); + let project = workspace.project().clone(); + let finder = cx.add_view(|cx| Self::new(workspace.settings.clone(), project, cx)); cx.subscribe(&finder, Self::on_event).detach(); finder }); @@ -269,10 +270,10 @@ impl FileFinder { pub fn new( settings: watch::Receiver, - workspace: ViewHandle, + project: ModelHandle, cx: &mut ViewContext, ) -> Self { - cx.observe(&workspace, Self::workspace_updated).detach(); + cx.observe(&project, Self::project_updated).detach(); let query_editor = cx.add_view(|cx| { Editor::single_line( @@ -290,7 +291,7 @@ impl FileFinder { Self { handle: cx.handle().downgrade(), settings, - workspace: workspace.downgrade(), + project, query_editor, search_count: 0, latest_search_id: 0, @@ -303,7 +304,7 @@ impl FileFinder { } } - fn workspace_updated(&mut self, _: ViewHandle, cx: &mut ViewContext) { + fn project_updated(&mut self, _: ModelHandle, cx: &mut ViewContext) { let query = self.query_editor.update(cx, |buffer, cx| buffer.text(cx)); if let Some(task) = self.spawn_search(query, cx) { task.detach(); @@ -381,30 +382,17 @@ impl FileFinder { #[must_use] fn spawn_search(&mut self, query: String, cx: &mut ViewContext) -> Option> { - let snapshots = self - .workspace - .upgrade(&cx)? - .read(cx) - .worktrees(cx) - .iter() - .map(|tree| tree.read(cx).snapshot()) - .collect::>(); let search_id = util::post_inc(&mut self.search_count); - let background = cx.as_ref().background().clone(); self.cancel_flag.store(true, atomic::Ordering::Relaxed); self.cancel_flag = Arc::new(AtomicBool::new(false)); let cancel_flag = self.cancel_flag.clone(); + let project = self.project.clone(); Some(cx.spawn(|this, mut cx| async move { - let matches = match_paths( - &snapshots, - &query, - false, - false, - 100, - cancel_flag.as_ref(), - background, - ) - .await; + let matches = project + .read_with(&cx, |project, cx| { + project.match_paths(&query, false, false, 100, cancel_flag.as_ref(), cx) + }) + .await; let did_cancel = cancel_flag.load(atomic::Ordering::Relaxed); this.update(&mut cx, |this, cx| { this.update_matches((search_id, did_cancel, query, matches), cx) @@ -536,8 +524,13 @@ mod tests { .unwrap(); cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx)) .await; - let (_, finder) = - cx.add_window(|cx| FileFinder::new(app_state.settings.clone(), workspace.clone(), cx)); + let (_, finder) = cx.add_window(|cx| { + FileFinder::new( + app_state.settings.clone(), + workspace.read(cx).project().clone(), + cx, + ) + }); let query = "hi".to_string(); finder @@ -596,8 +589,13 @@ mod tests { .unwrap(); cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx)) .await; - let (_, finder) = - cx.add_window(|cx| FileFinder::new(app_state.settings.clone(), workspace.clone(), cx)); + let (_, finder) = cx.add_window(|cx| { + FileFinder::new( + app_state.settings.clone(), + workspace.read(cx).project().clone(), + cx, + ) + }); // Even though there is only one worktree, that worktree's filename // is included in the matching, because the worktree is a single file. @@ -654,8 +652,13 @@ mod tests { cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx)) .await; - let (_, finder) = - cx.add_window(|cx| FileFinder::new(app_state.settings.clone(), workspace.clone(), cx)); + let (_, finder) = cx.add_window(|cx| { + FileFinder::new( + app_state.settings.clone(), + workspace.read(cx).project().clone(), + cx, + ) + }); // Run a search that matches two files with the same relative path. finder diff --git a/zed/src/fuzzy.rs b/zed/src/fuzzy.rs index 3b2732eb448a4b21c88567ffe8d78d011b7dda69..985ca209fa7195e3ca8b388c7395ff63c4a5e807 100644 --- a/zed/src/fuzzy.rs +++ b/zed/src/fuzzy.rs @@ -19,7 +19,7 @@ const BASE_DISTANCE_PENALTY: f64 = 0.6; const ADDITIONAL_DISTANCE_PENALTY: f64 = 0.05; const MIN_DISTANCE_PENALTY: f64 = 0.2; -struct Matcher<'a> { +pub struct Matcher<'a> { query: &'a [char], lowercase_query: &'a [char], query_char_bag: CharBag, @@ -213,116 +213,8 @@ pub async fn match_strings( results } -pub async fn match_paths( - snapshots: &[Snapshot], - query: &str, - include_ignored: bool, - smart_case: bool, - max_results: usize, - cancel_flag: &AtomicBool, - background: Arc, -) -> Vec { - let path_count: usize = if include_ignored { - snapshots.iter().map(Snapshot::file_count).sum() - } else { - snapshots.iter().map(Snapshot::visible_file_count).sum() - }; - if path_count == 0 { - return Vec::new(); - } - - let lowercase_query = query.to_lowercase().chars().collect::>(); - let query = query.chars().collect::>(); - - let lowercase_query = &lowercase_query; - let query = &query; - let query_char_bag = CharBag::from(&lowercase_query[..]); - - let num_cpus = background.num_cpus().min(path_count); - let segment_size = (path_count + num_cpus - 1) / num_cpus; - let mut segment_results = (0..num_cpus) - .map(|_| Vec::with_capacity(max_results)) - .collect::>(); - - background - .scoped(|scope| { - for (segment_idx, results) in segment_results.iter_mut().enumerate() { - scope.spawn(async move { - let segment_start = segment_idx * segment_size; - let segment_end = segment_start + segment_size; - let mut matcher = Matcher::new( - query, - lowercase_query, - query_char_bag, - smart_case, - max_results, - ); - - let mut tree_start = 0; - for snapshot in snapshots { - let tree_end = if include_ignored { - tree_start + snapshot.file_count() - } else { - tree_start + snapshot.visible_file_count() - }; - - if tree_start < segment_end && segment_start < tree_end { - let path_prefix: Arc = - if snapshot.root_entry().map_or(false, |e| e.is_file()) { - snapshot.root_name().into() - } else if snapshots.len() > 1 { - format!("{}/", snapshot.root_name()).into() - } else { - "".into() - }; - - let start = max(tree_start, segment_start) - tree_start; - let end = min(tree_end, segment_end) - tree_start; - let paths = snapshot - .files(include_ignored, start) - .take(end - start) - .map(|entry| { - if let EntryKind::File(char_bag) = entry.kind { - PathMatchCandidate { - path: &entry.path, - char_bag, - } - } else { - unreachable!() - } - }); - - matcher.match_paths( - snapshot.id(), - path_prefix, - paths, - results, - &cancel_flag, - ); - } - if tree_end >= segment_end { - break; - } - tree_start = tree_end; - } - }) - } - }) - .await; - - let mut results = Vec::new(); - for segment_result in segment_results { - if results.is_empty() { - results = segment_result; - } else { - util::extend_sorted(&mut results, segment_result, max_results, |a, b| b.cmp(&a)); - } - } - results -} - impl<'a> Matcher<'a> { - fn new( + pub fn new( query: &'a [char], lowercase_query: &'a [char], query_char_bag: CharBag, @@ -343,7 +235,7 @@ impl<'a> Matcher<'a> { } } - fn match_strings( + pub fn match_strings( &mut self, candidates: &[StringMatchCandidate], results: &mut Vec, @@ -363,7 +255,7 @@ impl<'a> Matcher<'a> { ) } - fn match_paths( + pub fn match_paths( &mut self, tree_id: usize, path_prefix: Arc, diff --git a/zed/src/project.rs b/zed/src/project.rs index 3f7ef27a455a3527332747b2766ceefe2082dfce..7b5480209ed6176f7efae19411aa44dfeae87cf0 100644 --- a/zed/src/project.rs +++ b/zed/src/project.rs @@ -1,14 +1,20 @@ use crate::{ fs::Fs, + fuzzy::{CharBag, Matcher, PathMatchCandidate}, language::LanguageRegistry, rpc::Client, - util::TryFutureExt as _, - worktree::{self, Worktree}, + util::{self, TryFutureExt as _}, + worktree::{self, EntryKind, PathMatch, Snapshot, Worktree}, AppState, }; use anyhow::Result; -use gpui::{Entity, ModelContext, ModelHandle, Task}; -use std::{path::Path, sync::Arc}; +use futures::Future; +use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task}; +use std::{ + cmp, + path::Path, + sync::{atomic::AtomicBool, Arc}, +}; pub struct Project { worktrees: Vec>, @@ -47,13 +53,13 @@ impl Project { pub fn add_local_worktree( &mut self, - path: &Path, + abs_path: &Path, cx: &mut ModelContext, ) -> Task>> { let fs = self.fs.clone(); let rpc = self.rpc.clone(); let languages = self.languages.clone(); - let path = Arc::from(path); + let path = Arc::from(abs_path); cx.spawn(|this, mut cx| async move { let worktree = Worktree::open_local(rpc, path, fs, languages, &mut cx).await?; this.update(&mut cx, |this, cx| { @@ -182,8 +188,239 @@ impl Project { keep }); } + + pub fn match_paths<'a>( + &self, + query: &str, + include_ignored: bool, + smart_case: bool, + max_results: usize, + cancel_flag: &'a AtomicBool, + cx: &AppContext, + ) -> impl 'a + Future> { + let snapshots = self + .worktrees + .iter() + .map(|worktree| worktree.read(cx).snapshot()) + .collect::>(); + + let path_count: usize = if include_ignored { + snapshots.iter().map(Snapshot::file_count).sum() + } else { + snapshots.iter().map(Snapshot::visible_file_count).sum() + }; + + let lowercase_query = query.to_lowercase().chars().collect::>(); + let query = query.chars().collect::>(); + let query_char_bag = CharBag::from(&lowercase_query[..]); + + let background = cx.background().clone(); + + async move { + if path_count == 0 { + return Vec::new(); + } + + let num_cpus = background.num_cpus().min(path_count); + let segment_size = (path_count + num_cpus - 1) / num_cpus; + let mut segment_results = (0..num_cpus) + .map(|_| Vec::with_capacity(max_results)) + .collect::>(); + + let lowercase_query = &lowercase_query; + let query = &query; + let snapshots = snapshots.as_slice(); + + background + .scoped(|scope| { + for (segment_idx, results) in segment_results.iter_mut().enumerate() { + scope.spawn(async move { + let segment_start = segment_idx * segment_size; + let segment_end = segment_start + segment_size; + let mut matcher = Matcher::new( + query, + lowercase_query, + query_char_bag, + smart_case, + max_results, + ); + + let mut tree_start = 0; + for snapshot in snapshots { + let tree_end = if include_ignored { + tree_start + snapshot.file_count() + } else { + tree_start + snapshot.visible_file_count() + }; + + if tree_start < segment_end && segment_start < tree_end { + let path_prefix: Arc = + if snapshot.root_entry().map_or(false, |e| e.is_file()) { + snapshot.root_name().into() + } else if snapshots.len() > 1 { + format!("{}/", snapshot.root_name()).into() + } else { + "".into() + }; + + let start = cmp::max(tree_start, segment_start) - tree_start; + let end = cmp::min(tree_end, segment_end) - tree_start; + let paths = snapshot + .files(include_ignored, start) + .take(end - start) + .map(|entry| { + if let EntryKind::File(char_bag) = entry.kind { + PathMatchCandidate { + path: &entry.path, + char_bag, + } + } else { + unreachable!() + } + }); + + matcher.match_paths( + snapshot.id(), + path_prefix, + paths, + results, + &cancel_flag, + ); + } + if tree_end >= segment_end { + break; + } + tree_start = tree_end; + } + }) + } + }) + .await; + + let mut results = Vec::new(); + for segment_result in segment_results { + if results.is_empty() { + results = segment_result; + } else { + util::extend_sorted(&mut results, segment_result, max_results, |a, b| { + b.cmp(&a) + }); + } + } + results + } + } } impl Entity for Project { type Event = Event; } + +#[cfg(test)] +mod tests { + use super::*; + use crate::{ + fs::RealFs, + test::{temp_tree, test_app_state}, + }; + use serde_json::json; + use std::{os::unix, path::PathBuf}; + + #[gpui::test] + async fn test_populate_and_search(mut cx: gpui::TestAppContext) { + let mut app_state = cx.update(test_app_state); + Arc::get_mut(&mut app_state).unwrap().fs = Arc::new(RealFs); + let dir = temp_tree(json!({ + "root": { + "apple": "", + "banana": { + "carrot": { + "date": "", + "endive": "", + } + }, + "fennel": { + "grape": "", + } + } + })); + + let root_link_path = dir.path().join("root_link"); + unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap(); + unix::fs::symlink( + &dir.path().join("root/fennel"), + &dir.path().join("root/finnochio"), + ) + .unwrap(); + + let project = cx.add_model(|_| Project::new(app_state.as_ref())); + let tree = project + .update(&mut cx, |project, cx| { + project.add_local_worktree(&root_link_path, cx) + }) + .await + .unwrap(); + + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + cx.read(|cx| { + let tree = tree.read(cx); + assert_eq!(tree.file_count(), 5); + assert_eq!( + tree.inode_for_path("fennel/grape"), + tree.inode_for_path("finnochio/grape") + ); + }); + + let cancel_flag = Default::default(); + let results = project + .read_with(&cx, |project, cx| { + project.match_paths("bna", false, false, 10, &cancel_flag, cx) + }) + .await; + assert_eq!( + results + .into_iter() + .map(|result| result.path) + .collect::>>(), + vec![ + PathBuf::from("banana/carrot/date").into(), + PathBuf::from("banana/carrot/endive").into(), + ] + ); + } + + #[gpui::test] + async fn test_search_worktree_without_files(mut cx: gpui::TestAppContext) { + let mut app_state = cx.update(test_app_state); + Arc::get_mut(&mut app_state).unwrap().fs = Arc::new(RealFs); + let dir = temp_tree(json!({ + "root": { + "dir1": {}, + "dir2": { + "dir3": {} + } + } + })); + + let project = cx.add_model(|_| Project::new(app_state.as_ref())); + let tree = project + .update(&mut cx, |project, cx| { + project.add_local_worktree(&dir.path(), cx) + }) + .await + .unwrap(); + + cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) + .await; + + let cancel_flag = Default::default(); + let results = project + .read_with(&cx, |project, cx| { + project.match_paths("dir", false, false, 10, &cancel_flag, cx) + }) + .await; + + assert!(results.is_empty()); + } +} diff --git a/zed/src/workspace.rs b/zed/src/workspace.rs index f382af321512095256b30c0b906af684c89d9e31..9e1710dac48b68520f83fb05897ef7b2be0c8dab 100644 --- a/zed/src/workspace.rs +++ b/zed/src/workspace.rs @@ -454,6 +454,10 @@ impl Workspace { } } + pub fn project(&self) -> &ModelHandle { + &self.project + } + pub fn worktrees<'a>(&self, cx: &'a AppContext) -> &'a [ModelHandle] { &self.project.read(cx).worktrees() } diff --git a/zed/src/worktree.rs b/zed/src/worktree.rs index 56ed2cb731fd9a9e2dd73478d904eac19ed69ef0..0d033d681ba00790701fdcd9c6fde6e3105b0db6 100644 --- a/zed/src/worktree.rs +++ b/zed/src/worktree.rs @@ -14,12 +14,10 @@ use crate::{ use ::ignore::gitignore::{Gitignore, GitignoreBuilder}; use anyhow::{anyhow, Result}; use futures::{Stream, StreamExt}; -pub use fuzzy::{match_paths, PathMatch}; -use sum_tree::{self, Edit, SeekTarget, SumTree}; +pub use fuzzy::PathMatch; use gpui::{ - executor, - AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, - UpgradeModelHandle, WeakModelHandle, + executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, + Task, UpgradeModelHandle, WeakModelHandle, }; use lazy_static::lazy_static; use parking_lot::Mutex; @@ -44,6 +42,7 @@ use std::{ }, time::{Duration, SystemTime}, }; +use sum_tree::{self, Edit, SeekTarget, SumTree}; use zrpc::{PeerId, TypedEnvelope}; lazy_static! { @@ -2841,128 +2840,6 @@ mod tests { }) } - #[gpui::test] - async fn test_populate_and_search(cx: gpui::TestAppContext) { - let dir = temp_tree(json!({ - "root": { - "apple": "", - "banana": { - "carrot": { - "date": "", - "endive": "", - } - }, - "fennel": { - "grape": "", - } - } - })); - - let root_link_path = dir.path().join("root_link"); - unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap(); - unix::fs::symlink( - &dir.path().join("root/fennel"), - &dir.path().join("root/finnochio"), - ) - .unwrap(); - - let tree = Worktree::open_local( - rpc::Client::new(), - root_link_path, - Arc::new(RealFs), - Default::default(), - &mut cx.to_async(), - ) - .await - .unwrap(); - - cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) - .await; - let snapshots = [cx.read(|cx| { - let tree = tree.read(cx); - assert_eq!(tree.file_count(), 5); - assert_eq!( - tree.inode_for_path("fennel/grape"), - tree.inode_for_path("finnochio/grape") - ); - tree.snapshot() - })]; - let cancel_flag = Default::default(); - let results = cx - .read(|cx| { - match_paths( - &snapshots, - "bna", - false, - false, - 10, - &cancel_flag, - cx.background().clone(), - ) - }) - .await; - assert_eq!( - results - .into_iter() - .map(|result| result.path) - .collect::>>(), - vec![ - PathBuf::from("banana/carrot/date").into(), - PathBuf::from("banana/carrot/endive").into(), - ] - ); - } - - #[gpui::test] - async fn test_search_worktree_without_files(cx: gpui::TestAppContext) { - let dir = temp_tree(json!({ - "root": { - "dir1": {}, - "dir2": { - "dir3": {} - } - } - })); - let tree = Worktree::open_local( - rpc::Client::new(), - dir.path(), - Arc::new(RealFs), - Default::default(), - &mut cx.to_async(), - ) - .await - .unwrap(); - - cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete()) - .await; - let snapshots = [cx.read(|cx| { - let tree = tree.read(cx); - assert_eq!(tree.file_count(), 0); - tree.snapshot() - })]; - let cancel_flag = Default::default(); - let results = cx - .read(|cx| { - match_paths( - &snapshots, - "dir", - false, - false, - 10, - &cancel_flag, - cx.background().clone(), - ) - }) - .await; - assert_eq!( - results - .into_iter() - .map(|result| result.path) - .collect::>>(), - vec![] - ); - } - #[gpui::test] async fn test_save_file(mut cx: gpui::TestAppContext) { let dir = temp_tree(json!({