From 0bbff090f0765768901aa7a086c7f34982238a77 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 15 Apr 2021 12:10:37 +0200 Subject: [PATCH] WIP --- zed/src/sum_tree/mod.rs | 9 ++ zed/src/worktree.rs | 316 +++++++++++++++++++++++++++++++--------- 2 files changed, 257 insertions(+), 68 deletions(-) diff --git a/zed/src/sum_tree/mod.rs b/zed/src/sum_tree/mod.rs index e690556d305e2e84038d61776fcca36771e563d3..0b7b39842040300de19b50cd2ba2b00f82af0da4 100644 --- a/zed/src/sum_tree/mod.rs +++ b/zed/src/sum_tree/mod.rs @@ -373,6 +373,15 @@ impl SumTree { new_tree }; } + + pub fn get(&self, key: &T::Key) -> Option<&T> { + let mut cursor = self.cursor::(); + if cursor.seek(key, SeekBias::Left) { + cursor.item() + } else { + None + } + } } impl Default for SumTree { diff --git a/zed/src/worktree.rs b/zed/src/worktree.rs index 2b1ee86d56e1fb880ff57c44516b58e152beb0a0..3e47a3c80e0435a70cf4ce4f1942f0e276a6abc7 100644 --- a/zed/src/worktree.rs +++ b/zed/src/worktree.rs @@ -1,20 +1,26 @@ use crate::sum_tree::{self, Edit, SumTree}; -use gpui::{Entity, ModelContext}; +use gpui::{scoped_pool, Entity, ModelContext}; use ignore::dir::{Ignore, IgnoreBuilder}; use parking_lot::Mutex; -use smol::channel::Sender; +use smol::{channel::Sender, Timer}; use std::{ ffi::{OsStr, OsString}, - fs, io, + fmt, fs, io, ops::AddAssign, os::unix::fs::MetadataExt, path::{Path, PathBuf}, - sync::Arc, + sync::{ + atomic::{self, AtomicU64}, + Arc, + }, + time::{Duration, Instant}, }; +#[derive(Debug)] enum ScanState { Idle, Scanning, + Err(io::Error), } pub struct Worktree { @@ -22,33 +28,109 @@ pub struct Worktree { entries: SumTree, scanner: BackgroundScanner, scan_state: ScanState, + will_poll_entries: bool, } impl Worktree { fn new(path: impl Into>, ctx: &mut ModelContext) -> Self { let path = path.into(); let scan_state = smol::channel::unbounded(); - let scanner = BackgroundScanner::new(path.clone(), scan_state.0); + let scanner = BackgroundScanner::new(path.clone(), scan_state.0, ctx.thread_pool().clone()); let tree = Self { path, entries: Default::default(), scanner, scan_state: ScanState::Idle, + will_poll_entries: false, }; - { - let scanner = tree.scanner.clone(); - std::thread::spawn(move || scanner.run()); - } + let scanner = tree.scanner.clone(); + std::thread::spawn(move || scanner.run()); + + ctx.spawn_stream(scan_state.1, Self::observe_scan_state, |_, _| {}) + .detach(); tree } + + fn observe_scan_state(&mut self, scan_state: ScanState, ctx: &mut ModelContext) { + self.scan_state = scan_state; + self.poll_entries(ctx); + ctx.notify(); + } + + fn poll_entries(&mut self, ctx: &mut ModelContext) { + self.entries = self.scanner.snapshot(); + if self.is_scanning() && !self.will_poll_entries { + self.will_poll_entries = true; + ctx.spawn(Timer::after(Duration::from_millis(100)), |this, _, ctx| { + this.will_poll_entries = false; + this.poll_entries(ctx); + }) + .detach(); + } + } + + fn is_scanning(&self) -> bool { + if let ScanState::Scanning = self.scan_state { + true + } else { + false + } + } + + fn is_empty(&self) -> bool { + self.root_ino() == 0 + } + + fn root_ino(&self) -> u64 { + self.scanner.root_ino.load(atomic::Ordering::SeqCst) + } + + fn file_count(&self) -> usize { + self.entries.summary().file_count + } + + fn fmt_entry(&self, f: &mut fmt::Formatter<'_>, ino: u64, indent: usize) -> fmt::Result { + match self.entries.get(&ino).unwrap() { + Entry::Dir { name, children, .. } => { + write!( + f, + "{}{}/ ({})\n", + " ".repeat(indent), + name.to_string_lossy(), + ino + )?; + for child_id in children.iter() { + self.fmt_entry(f, *child_id, indent + 2)?; + } + Ok(()) + } + Entry::File { name, .. } => write!( + f, + "{}{} ({})\n", + " ".repeat(indent), + name.to_string_lossy(), + ino + ), + } + } } impl Entity for Worktree { type Event = (); } +impl fmt::Debug for Worktree { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.is_empty() { + write!(f, "Empty tree\n") + } else { + self.fmt_entry(f, self.root_ino(), 0) + } + } +} + #[derive(Clone, Debug, Eq, PartialEq)] pub enum Entry { Dir { @@ -123,25 +205,37 @@ impl<'a> sum_tree::Dimension<'a, EntrySummary> for u64 { #[derive(Clone)] struct BackgroundScanner { path: Arc, + root_ino: Arc, entries: Arc>>, notify: Sender, + thread_pool: scoped_pool::Pool, } impl BackgroundScanner { - fn new(path: Arc, notify: Sender) -> Self { + fn new(path: Arc, notify: Sender, thread_pool: scoped_pool::Pool) -> Self { Self { path, + root_ino: Arc::new(AtomicU64::new(0)), entries: Default::default(), notify, + thread_pool, } } + fn snapshot(&self) -> SumTree { + self.entries.lock().clone() + } + fn run(&self) { if smol::block_on(self.notify.send(ScanState::Scanning)).is_err() { return; } - self.scan_dirs(); + if let Err(err) = self.scan_dirs() { + if smol::block_on(self.notify.send(ScanState::Err(err))).is_err() { + return; + } + } if smol::block_on(self.notify.send(ScanState::Idle)).is_err() { return; @@ -151,10 +245,11 @@ impl BackgroundScanner { } fn scan_dirs(&self) -> io::Result<()> { + println!("Scanning dirs ;)"); let metadata = fs::metadata(&self.path)?; let ino = metadata.ino(); let is_symlink = fs::symlink_metadata(&self.path)?.file_type().is_symlink(); - let name = self.path.file_name().unwrap_or(OsStr::new("/")).into(); + let name = Arc::from(self.path.file_name().unwrap_or(OsStr::new("/"))); let relative_path = PathBuf::from(&name); let mut ignore = IgnoreBuilder::new() @@ -167,9 +262,8 @@ impl BackgroundScanner { let is_ignored = ignore.matched(&self.path, metadata.is_dir()).is_ignore(); if metadata.file_type().is_dir() { - let is_ignored = is_ignored || name == ".git"; - - self.insert_entries(Some(Entry::Dir { + let is_ignored = is_ignored || name.as_ref() == ".git"; + let dir_entry = Entry::Dir { parent: None, name, ino, @@ -177,106 +271,131 @@ impl BackgroundScanner { is_ignored, children: Arc::from([]), pending: true, - })); + }; + self.insert_entries(Some(dir_entry.clone())); let (tx, rx) = crossbeam_channel::unbounded(); tx.send(Ok(ScanJob { ino, - path: path.into(), + path: self.path.clone(), relative_path, + dir_entry, ignore: Some(ignore), scan_queue: tx.clone(), })) .unwrap(); drop(tx); - Parallel::>::new() - .each(0..16, |_| { - while let Ok(result) = rx.recv() { - self.scan_dir(result?)?; - } - Ok(()) - }) - .run() - .into_iter() - .collect::>()?; + let mut results = Vec::new(); + results.resize_with(16, || Ok(())); + self.thread_pool.scoped(|pool| { + for result in &mut results { + pool.execute(|| { + let result = result; + while let Ok(job) = rx.recv() { + if let Err(err) = job.and_then(|job| self.scan_dir(job)) { + *result = Err(err); + break; + } + } + }); + } + }); + results.into_iter().collect::>()?; } else { - self.insert_file(None, name, ino, is_symlink, is_ignored, relative_path); + self.insert_entries(Some(Entry::File { + parent: None, + name, + ino, + is_symlink, + is_ignored, + })); } - self.0.write().root_ino = Some(ino); + + self.root_ino.store(ino, atomic::Ordering::SeqCst); Ok(()) } - fn scan_dir(&self, to_scan: ScanJob) -> io::Result<()> { + fn scan_dir(&self, job: ScanJob) -> io::Result<()> { + let scan_queue = job.scan_queue; + let mut dir_entry = job.dir_entry; + let mut new_children = Vec::new(); + let mut new_entries = Vec::new(); + let mut new_jobs = Vec::new(); - for child_entry in fs::read_dir(&to_scan.path)? { + for child_entry in fs::read_dir(&job.path)? { let child_entry = child_entry?; - let name = child_entry.file_name(); - let relative_path = to_scan.relative_path.join(&name); + let name: Arc = child_entry.file_name().into(); + let relative_path = job.relative_path.join(name.as_ref()); let metadata = child_entry.metadata()?; let ino = metadata.ino(); let is_symlink = metadata.file_type().is_symlink(); + let path = job.path.join(name.as_ref()); + new_children.push(ino); if metadata.is_dir() { - let path = to_scan.path.join(&name); let mut is_ignored = true; let mut ignore = None; - if let Some(parent_ignore) = to_scan.ignore.as_ref() { + if let Some(parent_ignore) = job.ignore.as_ref() { let child_ignore = parent_ignore.add_child(&path).unwrap(); - is_ignored = child_ignore.matched(&path, true).is_ignore() || name == ".git"; + is_ignored = + child_ignore.matched(&path, true).is_ignore() || name.as_ref() == ".git"; if !is_ignored { ignore = Some(child_ignore); } } - self.insert_entries( - Some(Entry::Dir { - parent: (), - name: (), - ino: (), - is_symlink: (), - is_ignored: (), - children: (), - pending: (), - }) - .into_iter(), - ); - - self.insert_dir(Some(to_scan.ino), name, ino, is_symlink, is_ignored); - new_children.push(ino); - - let dirs_to_scan = to_scan.scan_queue.clone(); - let _ = to_scan.scan_queue.send(Ok(ScanJob { + let dir_entry = Entry::Dir { + parent: Some(job.ino), + name, + ino, + is_symlink, + is_ignored, + children: Arc::from([]), + pending: true, + }; + new_entries.push(dir_entry.clone()); + new_jobs.push(ScanJob { ino, - path, + path: Arc::from(path), relative_path, + dir_entry, ignore, - scan_queue: dirs_to_scan, - })); - } else { - let is_ignored = to_scan.ignore.as_ref().map_or(true, |i| { - i.matched(to_scan.path.join(&name), false).is_ignore() + scan_queue: scan_queue.clone(), }); - - self.insert_file( - Some(to_scan.ino), + } else { + let is_ignored = job + .ignore + .as_ref() + .map_or(true, |i| i.matched(path, false).is_ignore()); + new_entries.push(Entry::File { + parent: Some(job.ino), name, ino, is_symlink, is_ignored, - relative_path, - ); - new_children.push(ino); + }); }; } - if let Some(Entry::Dir { children, .. }) = &mut self.0.write().entries.get_mut(&to_scan.ino) + if let Entry::Dir { + children, pending, .. + } = &mut dir_entry { - *children = new_children.clone(); + *children = Arc::from(new_children); + *pending = false; + } else { + unreachable!() + } + new_entries.push(dir_entry); + + self.insert_entries(new_entries); + for new_job in new_jobs { + let _ = scan_queue.send(Ok(new_job)); } Ok(()) @@ -293,6 +412,7 @@ struct ScanJob { ino: u64, path: Arc, relative_path: PathBuf, + dir_entry: Entry, ignore: Option, scan_queue: crossbeam_channel::Sender>, } @@ -309,3 +429,63 @@ impl UnwrapIgnoreTuple for (Ignore, Option) { self.0 } } + +#[cfg(test)] +mod tests { + use super::*; + use crate::editor::Buffer; + use crate::test::*; + use anyhow::Result; + use gpui::App; + use serde_json::json; + use std::os::unix; + + #[test] + fn test_populate_and_search() { + App::test_async((), |mut app| async move { + let dir = temp_tree(json!({ + "root": { + "apple": "", + "banana": { + "carrot": { + "date": "", + "endive": "", + } + }, + "fennel": { + "grape": "", + } + } + })); + + let root_link_path = dir.path().join("root_link"); + unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap(); + + let tree = app.add_model(|ctx| Worktree::new(root_link_path, ctx)); + assert_condition(1, 300, || app.read(|ctx| tree.read(ctx).file_count() == 4)).await; + // app.read(|ctx| { + // let tree = tree.read(ctx); + // assert_eq!(tree.file_count(), 4); + // let results = match_paths( + // &[tree.clone()], + // "bna", + // false, + // false, + // 10, + // ctx.thread_pool().clone(), + // ) + // .iter() + // .map(|result| tree.entry_path(result.entry_id)) + // .collect::, _>>() + // .unwrap(); + // assert_eq!( + // results, + // vec![ + // PathBuf::from("root_link/banana/carrot/date"), + // PathBuf::from("root_link/banana/carrot/endive"), + // ] + // ); + // }) + }); + } +}