From dcae4747b01525acc9e95c69dfd2fa4a6c8377da Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 13 Jul 2021 15:33:20 -0700 Subject: [PATCH] Decouple Fs trait from Worktree Make it a more direct shim for the standard FS APIs --- zed/src/worktree.rs | 137 ++++++++++++++--------- zed/src/worktree/fs.rs | 240 ++++++++++++++--------------------------- 2 files changed, 167 insertions(+), 210 deletions(-) diff --git a/zed/src/worktree.rs b/zed/src/worktree.rs index 55808941105800b584f5227cbbe27c73535d7042..755bc56720be7a4e841c708ceca5108368bd17d1 100644 --- a/zed/src/worktree.rs +++ b/zed/src/worktree.rs @@ -37,7 +37,10 @@ use std::{ future::Future, ops::Deref, path::{Path, PathBuf}, - sync::{atomic::AtomicUsize, Arc}, + sync::{ + atomic::{AtomicUsize, Ordering::SeqCst}, + Arc, + }, time::{Duration, SystemTime}, }; use zrpc::{ForegroundRouter, PeerId, TypedEnvelope}; @@ -588,12 +591,11 @@ impl LocalWorktree { .file_name() .map_or(String::new(), |f| f.to_string_lossy().to_string()); let root_char_bag = root_name.chars().map(|c| c.to_ascii_lowercase()).collect(); - let entry = fs - .entry(root_char_bag, &next_entry_id, path.clone(), &abs_path) + let metadata = fs + .metadata(&abs_path) .await? .ok_or_else(|| anyhow!("root entry does not exist"))?; - let is_dir = entry.is_dir(); - if is_dir { + if metadata.is_dir { root_name.push('/'); } @@ -612,7 +614,19 @@ impl LocalWorktree { removed_entry_ids: Default::default(), next_entry_id: Arc::new(next_entry_id), }; - snapshot.insert_entry(entry); + snapshot.insert_entry(Entry { + id: snapshot.next_entry_id.fetch_add(1, SeqCst), + kind: if metadata.is_dir { + EntryKind::PendingDir + } else { + EntryKind::File(char_bag_for_path(root_char_bag, &path)) + }, + path: Arc::from(path), + inode: metadata.ino, + mtime: metadata.mtime, + is_symlink: metadata.is_symlink, + is_ignored: false, + }); let tree = Self { snapshot: snapshot.clone(), @@ -1558,6 +1572,27 @@ pub enum EntryKind { } impl Entry { + fn new( + path: Arc, + metadata: &fs::Metadata, + next_entry_id: &AtomicUsize, + root_char_bag: CharBag, + ) -> Self { + Self { + id: next_entry_id.fetch_add(1, SeqCst), + kind: if metadata.is_dir { + EntryKind::PendingDir + } else { + EntryKind::File(char_bag_for_path(root_char_bag, &path)) + }, + path, + inode: metadata.ino, + mtime: metadata.mtime, + is_symlink: metadata.is_symlink, + is_ignored: false, + } + } + pub fn path(&self) -> &Arc { &self.path } @@ -1878,32 +1913,27 @@ impl BackgroundScanner { let mut ignore_stack = job.ignore_stack.clone(); let mut new_ignore = None; - let mut child_entries = self - .fs - .child_entries( - root_char_bag, - next_entry_id.as_ref(), - &job.path, - &job.abs_path, - ) - .await?; - while let Some(child_entry) = child_entries.next().await { - let mut child_entry = match child_entry { - Ok(child_entry) => child_entry, + let mut child_paths = self.fs.read_dir(&job.abs_path).await?; + while let Some(child_abs_path) = child_paths.next().await { + let child_abs_path = match child_abs_path { + Ok(child_abs_path) => child_abs_path, Err(error) => { log::error!("error processing entry {:?}", error); continue; } }; - let child_name = child_entry.path.file_name().unwrap(); - let child_abs_path = job.abs_path.join(&child_name); - let child_path = child_entry.path.clone(); + let child_name = child_abs_path.file_name().unwrap(); + let child_path: Arc = job.path.join(child_name).into(); + let child_metadata = match self.fs.metadata(&child_abs_path).await? { + Some(metadata) => metadata, + None => continue, + }; // If we find a .gitignore, add it to the stack of ignores used to determine which paths are ignored if child_name == *GITIGNORE { let (ignore, err) = Gitignore::new(&child_abs_path); if let Some(err) = err { - log::error!("error in ignore file {:?} - {:?}", child_entry.path, err); + log::error!("error in ignore file {:?} - {:?}", child_name, err); } let ignore = Arc::new(ignore); ignore_stack = ignore_stack.append(job.path.clone(), ignore.clone()); @@ -1926,7 +1956,14 @@ impl BackgroundScanner { } } - if child_entry.is_dir() { + let mut child_entry = Entry::new( + child_path.clone(), + &child_metadata, + &next_entry_id, + root_char_bag, + ); + + if child_metadata.is_dir { let is_ignored = ignore_stack.is_path_ignored(&child_path, true); child_entry.is_ignored = is_ignored; new_entries.push(child_entry); @@ -1999,22 +2036,18 @@ impl BackgroundScanner { } }; - match self - .fs - .entry( - snapshot.root_char_bag, - &next_entry_id, - path.clone(), - &event.path, - ) - .await - { - Ok(Some(mut fs_entry)) => { - let is_dir = fs_entry.is_dir(); - let ignore_stack = snapshot.ignore_stack_for_path(&path, is_dir); + match self.fs.metadata(&event.path).await { + Ok(Some(metadata)) => { + let ignore_stack = snapshot.ignore_stack_for_path(&path, metadata.is_dir); + let mut fs_entry = Entry::new( + path.clone(), + &metadata, + snapshot.next_entry_id.as_ref(), + snapshot.root_char_bag, + ); fs_entry.is_ignored = ignore_stack.is_all(); snapshot.insert_entry(fs_entry); - if is_dir { + if metadata.is_dir { scan_queue_tx .send(ScanJob { abs_path: event.path, @@ -2166,10 +2199,14 @@ async fn refresh_entry( root_char_bag = snapshot.root_char_bag; next_entry_id = snapshot.next_entry_id.clone(); } - let entry = fs - .entry(root_char_bag, &next_entry_id, path, abs_path) - .await? - .ok_or_else(|| anyhow!("could not read saved file metadata"))?; + let entry = Entry::new( + path, + &fs.metadata(abs_path) + .await? + .ok_or_else(|| anyhow!("could not read saved file metadata"))?, + &next_entry_id, + root_char_bag, + ); Ok(snapshot.lock().insert_entry(entry)) } @@ -2918,16 +2955,14 @@ mod tests { root_char_bag: Default::default(), next_entry_id: next_entry_id.clone(), }; - initial_snapshot.insert_entry( - smol::block_on(fs.entry( - Default::default(), - &next_entry_id, - Path::new("").into(), - root_dir.path().into(), - )) - .unwrap() - .unwrap(), - ); + initial_snapshot.insert_entry(Entry::new( + Path::new("").into(), + &smol::block_on(fs.metadata(root_dir.path())) + .unwrap() + .unwrap(), + &next_entry_id, + Default::default(), + )); let mut scanner = BackgroundScanner::new( Arc::new(Mutex::new(initial_snapshot.clone())), notify_tx, diff --git a/zed/src/worktree/fs.rs b/zed/src/worktree/fs.rs index 405580ab97eea8cd5694fb5710b2aa9c6df01e49..5dc44ffb49691b96fb0722fa759d8b8b15dfb98a 100644 --- a/zed/src/worktree/fs.rs +++ b/zed/src/worktree/fs.rs @@ -1,8 +1,7 @@ -use super::{char_bag::CharBag, char_bag_for_path, Entry, EntryKind, Rope}; -use anyhow::{anyhow, Context, Result}; -use atomic::Ordering::SeqCst; +use super::Rope; +use anyhow::{anyhow, Result}; use fsevent::EventStream; -use futures::{future::BoxFuture, Stream, StreamExt}; +use futures::{Stream, StreamExt}; use postage::prelude::Sink as _; use smol::io::{AsyncReadExt, AsyncWriteExt}; use std::{ @@ -10,33 +9,20 @@ use std::{ os::unix::fs::MetadataExt, path::{Path, PathBuf}, pin::Pin, - sync::{ - atomic::{self, AtomicUsize}, - Arc, - }, time::{Duration, SystemTime}, }; #[async_trait::async_trait] pub trait Fs: Send + Sync { - async fn entry( - &self, - root_char_bag: CharBag, - next_entry_id: &AtomicUsize, - path: Arc, - abs_path: &Path, - ) -> Result>; - async fn child_entries<'a>( - &self, - root_char_bag: CharBag, - next_entry_id: &'a AtomicUsize, - path: &'a Path, - abs_path: &'a Path, - ) -> Result> + Send>>>; async fn load(&self, path: &Path) -> Result; async fn save(&self, path: &Path, text: &Rope) -> Result<()>; async fn canonicalize(&self, path: &Path) -> Result; async fn is_file(&self, path: &Path) -> bool; + async fn metadata(&self, path: &Path) -> Result>; + async fn read_dir( + &self, + path: &Path, + ) -> Result>>>>; async fn watch( &self, path: &Path, @@ -45,87 +31,17 @@ pub trait Fs: Send + Sync { fn is_fake(&self) -> bool; } +pub struct Metadata { + pub ino: u64, + pub mtime: SystemTime, + pub is_symlink: bool, + pub is_dir: bool, +} + pub struct RealFs; #[async_trait::async_trait] impl Fs for RealFs { - async fn entry( - &self, - root_char_bag: CharBag, - next_entry_id: &AtomicUsize, - path: Arc, - abs_path: &Path, - ) -> Result> { - let metadata = match smol::fs::metadata(&abs_path).await { - Err(err) => { - return match (err.kind(), err.raw_os_error()) { - (io::ErrorKind::NotFound, _) => Ok(None), - (io::ErrorKind::Other, Some(libc::ENOTDIR)) => Ok(None), - _ => Err(anyhow::Error::new(err)), - } - } - Ok(metadata) => metadata, - }; - let inode = metadata.ino(); - let mtime = metadata.modified()?; - let is_symlink = smol::fs::symlink_metadata(&abs_path) - .await - .context("failed to read symlink metadata")? - .file_type() - .is_symlink(); - - let entry = Entry { - id: next_entry_id.fetch_add(1, SeqCst), - kind: if metadata.file_type().is_dir() { - EntryKind::PendingDir - } else { - EntryKind::File(char_bag_for_path(root_char_bag, &path)) - }, - path: Arc::from(path), - inode, - mtime, - is_symlink, - is_ignored: false, - }; - - Ok(Some(entry)) - } - - async fn child_entries<'a>( - &self, - root_char_bag: CharBag, - next_entry_id: &'a AtomicUsize, - path: &'a Path, - abs_path: &'a Path, - ) -> Result> + Send>>> { - let entries = smol::fs::read_dir(abs_path).await?; - Ok(entries - .then(move |entry| async move { - let child_entry = entry?; - let child_name = child_entry.file_name(); - let child_path: Arc = path.join(&child_name).into(); - let child_abs_path = abs_path.join(&child_name); - let child_is_symlink = child_entry.metadata().await?.file_type().is_symlink(); - let child_metadata = smol::fs::metadata(child_abs_path).await?; - let child_inode = child_metadata.ino(); - let child_mtime = child_metadata.modified()?; - Ok(Entry { - id: next_entry_id.fetch_add(1, SeqCst), - kind: if child_metadata.file_type().is_dir() { - EntryKind::PendingDir - } else { - EntryKind::File(char_bag_for_path(root_char_bag, &child_path)) - }, - path: child_path, - inode: child_inode, - mtime: child_mtime, - is_symlink: child_is_symlink, - is_ignored: false, - }) - }) - .boxed()) - } - async fn load(&self, path: &Path) -> Result { let mut file = smol::fs::File::open(path).await?; let mut text = String::new(); @@ -154,6 +70,43 @@ impl Fs for RealFs { .map_or(false, |metadata| metadata.is_file()) } + async fn metadata(&self, path: &Path) -> Result> { + let symlink_metadata = match smol::fs::symlink_metadata(path).await { + Ok(metadata) => metadata, + Err(err) => { + return match (err.kind(), err.raw_os_error()) { + (io::ErrorKind::NotFound, _) => Ok(None), + (io::ErrorKind::Other, Some(libc::ENOTDIR)) => Ok(None), + _ => Err(anyhow::Error::new(err)), + } + } + }; + + let is_symlink = symlink_metadata.file_type().is_symlink(); + let metadata = if is_symlink { + smol::fs::metadata(path).await? + } else { + symlink_metadata + }; + Ok(Some(Metadata { + ino: metadata.ino(), + mtime: metadata.modified().unwrap(), + is_symlink, + is_dir: metadata.file_type().is_dir(), + })) + } + + async fn read_dir( + &self, + path: &Path, + ) -> Result>>>> { + let result = smol::fs::read_dir(path).await?.map(|entry| match entry { + Ok(entry) => Ok(entry.path()), + Err(error) => Err(anyhow!("failed to read dir entry {:?}", error)), + }); + Ok(Box::pin(result)) + } + async fn watch( &self, path: &Path, @@ -295,7 +248,7 @@ impl FakeFs { &'a self, path: impl 'a + AsRef + Send, tree: serde_json::Value, - ) -> BoxFuture<'a, ()> { + ) -> futures::future::BoxFuture<'a, ()> { use futures::FutureExt as _; use serde_json::Value::*; @@ -364,65 +317,6 @@ impl FakeFs { #[cfg(any(test, feature = "test-support"))] #[async_trait::async_trait] impl Fs for FakeFs { - async fn entry( - &self, - root_char_bag: CharBag, - next_entry_id: &AtomicUsize, - path: Arc, - abs_path: &Path, - ) -> Result> { - let state = self.state.lock().await; - if let Some(entry) = state.entries.get(abs_path) { - Ok(Some(Entry { - id: next_entry_id.fetch_add(1, SeqCst), - kind: if entry.is_dir { - EntryKind::PendingDir - } else { - EntryKind::File(char_bag_for_path(root_char_bag, &path)) - }, - path: Arc::from(path), - inode: entry.inode, - mtime: entry.mtime, - is_symlink: entry.is_symlink, - is_ignored: false, - })) - } else { - Ok(None) - } - } - - async fn child_entries<'a>( - &self, - root_char_bag: CharBag, - next_entry_id: &'a AtomicUsize, - path: &'a Path, - abs_path: &'a Path, - ) -> Result> + Send>>> { - use futures::{future, stream}; - - let state = self.state.lock().await; - Ok(stream::iter(state.entries.clone()) - .filter(move |(child_path, _)| future::ready(child_path.parent() == Some(abs_path))) - .then(move |(child_abs_path, child_entry)| async move { - smol::future::yield_now().await; - let child_path = Arc::from(path.join(child_abs_path.file_name().unwrap())); - Ok(Entry { - id: next_entry_id.fetch_add(1, SeqCst), - kind: if child_entry.is_dir { - EntryKind::PendingDir - } else { - EntryKind::File(char_bag_for_path(root_char_bag, &child_path)) - }, - path: child_path, - inode: child_entry.inode, - mtime: child_entry.mtime, - is_symlink: child_entry.is_symlink, - is_ignored: false, - }) - }) - .boxed()) - } - async fn load(&self, path: &Path) -> Result { let state = self.state.lock().await; let text = state @@ -470,6 +364,34 @@ impl Fs for FakeFs { state.entries.get(path).map_or(false, |entry| !entry.is_dir) } + async fn metadata(&self, path: &Path) -> Result> { + let state = self.state.lock().await; + Ok(state.entries.get(path).map(|entry| Metadata { + ino: entry.inode, + mtime: entry.mtime, + is_dir: entry.is_dir, + is_symlink: entry.is_symlink, + })) + } + + async fn read_dir( + &self, + abs_path: &Path, + ) -> Result>>>> { + use futures::{future, stream}; + let state = self.state.lock().await; + let abs_path = abs_path.to_path_buf(); + Ok(Box::pin(stream::iter(state.entries.clone()).filter_map( + move |(child_path, _)| { + future::ready(if child_path.parent() == Some(&abs_path) { + Some(Ok(child_path)) + } else { + None + }) + }, + ))) + } + async fn watch( &self, path: &Path,