Allow to exclude files from worktrees (#3356)

Kirill Bulatov created

* Part of https://github.com/zed-industries/community/issues/70

Allows to fully remove from Zed certain files or file groups: no items
will be scanned or added into worktrees, so nothing will be shown in
project tree, project search and go to file would not see those,
corresponding FS events will be ignored.

One exclusion is .git files: those are still not shown or accessible by
default, yet tracked in the worktrees.

By default, is configured to 
```json
  "file_scan_exclusions": [
    "**/.git",
    "**/.svn",
    "**/.hg",
    "**/CVS",
    "**/.DS_Store",
    "**/Thumbs.db",
    "**/.classpath",
    "**/.settings"
  ],
```

* In addition, contains code preparations for "search in included files"
feature: new SearchOptions variant, search crate and RPC adjustments

Release Notes:

- Added `file_scan_exclusions` section to project settings to completely
ignore certain files in Zed

Change summary

assets/settings/default.json                                   |   13 
crates/collab/src/tests/integration_tests.rs                   |    2 
crates/collab/src/tests/random_project_collaboration_tests.rs  |    3 
crates/collab2/src/tests/integration_tests.rs                  |    2 
crates/collab2/src/tests/random_project_collaboration_tests.rs |    3 
crates/collab_ui/src/chat_panel/message_editor.rs              |   10 
crates/collab_ui2/src/chat_panel/message_editor.rs             |   10 
crates/project/src/ignore.rs                                   |    4 
crates/project/src/project.rs                                  |   32 
crates/project/src/project_settings.rs                         |    2 
crates/project/src/project_tests.rs                            |   16 
crates/project/src/search.rs                                   |   20 
crates/project/src/worktree.rs                                 |  346 
crates/project/src/worktree_tests.rs                           |  172 
crates/project2/src/ignore.rs                                  |    4 
crates/project2/src/project2.rs                                |   32 
crates/project2/src/project_settings.rs                        |    2 
crates/project2/src/project_tests.rs                           |   16 
crates/project2/src/search.rs                                  |   20 
crates/project2/src/worktree.rs                                |  342 
crates/project2/src/worktree_tests.rs                          | 4451 ++-
crates/project_panel/src/project_panel.rs                      |  125 
crates/project_panel2/src/project_panel.rs                     |  126 
crates/rpc/proto/zed.proto                                     |    1 
crates/rpc2/proto/zed.proto                                    |    1 
crates/search/src/buffer_search.rs                             |    2 
crates/search/src/project_search.rs                            |   26 
crates/search/src/search.rs                                    |   24 
crates/util/src/paths.rs                                       |   32 
29 files changed, 3,422 insertions(+), 2,417 deletions(-)

Detailed changes

assets/settings/default.json 🔗

@@ -268,6 +268,19 @@
     // Whether to show warnings or not by default.
     "include_warnings": true
   },
+  // Add files or globs of files that will be excluded by Zed entirely:
+  // they will be skipped during FS scan(s), file tree and file search
+  // will lack the corresponding file entries.
+  "file_scan_exclusions": [
+    "**/.git",
+    "**/.svn",
+    "**/.hg",
+    "**/CVS",
+    "**/.DS_Store",
+    "**/Thumbs.db",
+    "**/.classpath",
+    "**/.settings"
+  ],
   // Git gutter behavior configuration.
   "git": {
     // Control whether the git gutter is shown. May take 2 values:

crates/collab/src/tests/integration_tests.rs 🔗

@@ -5052,7 +5052,7 @@ async fn test_project_search(
     let mut results = HashMap::default();
     let mut search_rx = project_b.update(cx_b, |project, cx| {
         project.search(
-            SearchQuery::text("world", false, false, Vec::new(), Vec::new()).unwrap(),
+            SearchQuery::text("world", false, false, false, Vec::new(), Vec::new()).unwrap(),
             cx,
         )
     });

crates/collab/src/tests/random_project_collaboration_tests.rs 🔗

@@ -869,7 +869,8 @@ impl RandomizedTest for ProjectCollaborationTest {
 
                 let mut search = project.update(cx, |project, cx| {
                     project.search(
-                        SearchQuery::text(query, false, false, Vec::new(), Vec::new()).unwrap(),
+                        SearchQuery::text(query, false, false, false, Vec::new(), Vec::new())
+                            .unwrap(),
                         cx,
                     )
                 });

crates/collab2/src/tests/integration_tests.rs 🔗

@@ -4599,7 +4599,7 @@ async fn test_project_search(
     let mut results = HashMap::default();
     let mut search_rx = project_b.update(cx_b, |project, cx| {
         project.search(
-            SearchQuery::text("world", false, false, Vec::new(), Vec::new()).unwrap(),
+            SearchQuery::text("world", false, false, false, Vec::new(), Vec::new()).unwrap(),
             cx,
         )
     });

crates/collab2/src/tests/random_project_collaboration_tests.rs 🔗

@@ -870,7 +870,8 @@ impl RandomizedTest for ProjectCollaborationTest {
 
                 let mut search = project.update(cx, |project, cx| {
                     project.search(
-                        SearchQuery::text(query, false, false, Vec::new(), Vec::new()).unwrap(),
+                        SearchQuery::text(query, false, false, false, Vec::new(), Vec::new())
+                            .unwrap(),
                         cx,
                     )
                 });

crates/collab_ui/src/chat_panel/message_editor.rs 🔗

@@ -14,14 +14,8 @@ use std::{sync::Arc, time::Duration};
 const MENTIONS_DEBOUNCE_INTERVAL: Duration = Duration::from_millis(50);
 
 lazy_static! {
-    static ref MENTIONS_SEARCH: SearchQuery = SearchQuery::regex(
-        "@[-_\\w]+",
-        false,
-        false,
-        Default::default(),
-        Default::default()
-    )
-    .unwrap();
+    static ref MENTIONS_SEARCH: SearchQuery =
+        SearchQuery::regex("@[-_\\w]+", false, false, false, Vec::new(), Vec::new()).unwrap();
 }
 
 pub struct MessageEditor {

crates/collab_ui2/src/chat_panel/message_editor.rs 🔗

@@ -14,14 +14,8 @@ use std::{sync::Arc, time::Duration};
 const MENTIONS_DEBOUNCE_INTERVAL: Duration = Duration::from_millis(50);
 
 lazy_static! {
-    static ref MENTIONS_SEARCH: SearchQuery = SearchQuery::regex(
-        "@[-_\\w]+",
-        false,
-        false,
-        Default::default(),
-        Default::default()
-    )
-    .unwrap();
+    static ref MENTIONS_SEARCH: SearchQuery =
+        SearchQuery::regex("@[-_\\w]+", false, false, false, Vec::new(), Vec::new()).unwrap();
 }
 
 pub struct MessageEditor {

crates/project/src/ignore.rs 🔗

@@ -20,10 +20,6 @@ impl IgnoreStack {
         Arc::new(Self::All)
     }
 
-    pub fn is_all(&self) -> bool {
-        matches!(self, IgnoreStack::All)
-    }
-
     pub fn append(self: Arc<Self>, abs_base_path: Arc<Path>, ignore: Arc<Gitignore>) -> Arc<Self> {
         match self.as_ref() {
             IgnoreStack::All => self,

crates/project/src/project.rs 🔗

@@ -5548,7 +5548,16 @@ impl Project {
             .collect::<Vec<_>>();
 
         let background = cx.background().clone();
-        let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
+        let path_count: usize = snapshots
+            .iter()
+            .map(|s| {
+                if query.include_ignored() {
+                    s.file_count()
+                } else {
+                    s.visible_file_count()
+                }
+            })
+            .sum();
         if path_count == 0 {
             let (_, rx) = smol::channel::bounded(1024);
             return rx;
@@ -5561,8 +5570,16 @@ impl Project {
             .iter()
             .filter_map(|(_, b)| {
                 let buffer = b.upgrade(cx)?;
-                let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
-                if let Some(path) = snapshot.file().map(|file| file.path()) {
+                let (is_ignored, snapshot) = buffer.update(cx, |buffer, cx| {
+                    let is_ignored = buffer
+                        .project_path(cx)
+                        .and_then(|path| self.entry_for_path(&path, cx))
+                        .map_or(false, |entry| entry.is_ignored);
+                    (is_ignored, buffer.snapshot())
+                });
+                if is_ignored && !query.include_ignored() {
+                    return None;
+                } else if let Some(path) = snapshot.file().map(|file| file.path()) {
                     Some((path.clone(), (buffer, snapshot)))
                 } else {
                     unnamed_files.push(buffer);
@@ -5735,7 +5752,12 @@ impl Project {
                         let mut snapshot_start_ix = 0;
                         let mut abs_path = PathBuf::new();
                         for snapshot in snapshots {
-                            let snapshot_end_ix = snapshot_start_ix + snapshot.visible_file_count();
+                            let snapshot_end_ix = snapshot_start_ix
+                                + if query.include_ignored() {
+                                    snapshot.file_count()
+                                } else {
+                                    snapshot.visible_file_count()
+                                };
                             if worker_end_ix <= snapshot_start_ix {
                                 break;
                             } else if worker_start_ix > snapshot_end_ix {
@@ -5748,7 +5770,7 @@ impl Project {
                                     cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
 
                                 for entry in snapshot
-                                    .files(false, start_in_snapshot)
+                                    .files(query.include_ignored(), start_in_snapshot)
                                     .take(end_in_snapshot - start_in_snapshot)
                                 {
                                     if matching_paths_tx.is_closed() {

crates/project/src/project_settings.rs 🔗

@@ -10,6 +10,8 @@ pub struct ProjectSettings {
     pub lsp: HashMap<Arc<str>, LspSettings>,
     #[serde(default)]
     pub git: GitSettings,
+    #[serde(default)]
+    pub file_scan_exclusions: Option<Vec<String>>,
 }
 
 #[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]

crates/project/src/project_tests.rs 🔗

@@ -3598,7 +3598,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) {
     assert_eq!(
         search(
             &project,
-            SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
+            SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
             cx
         )
         .await
@@ -3623,7 +3623,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) {
     assert_eq!(
         search(
             &project,
-            SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
+            SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
             cx
         )
         .await
@@ -3662,6 +3662,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 vec![PathMatcher::new("*.odd").unwrap()],
                 Vec::new()
             )
@@ -3681,6 +3682,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 vec![PathMatcher::new("*.rs").unwrap()],
                 Vec::new()
             )
@@ -3703,6 +3705,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 vec![
                     PathMatcher::new("*.ts").unwrap(),
                     PathMatcher::new("*.odd").unwrap(),
@@ -3727,6 +3730,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 vec![
                     PathMatcher::new("*.rs").unwrap(),
                     PathMatcher::new("*.ts").unwrap(),
@@ -3774,6 +3778,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 Vec::new(),
                 vec![PathMatcher::new("*.odd").unwrap()],
             )
@@ -3798,6 +3803,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 Vec::new(),
                 vec![PathMatcher::new("*.rs").unwrap()],
             )
@@ -3820,6 +3826,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 Vec::new(),
                 vec![
                     PathMatcher::new("*.ts").unwrap(),
@@ -3844,6 +3851,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 Vec::new(),
                 vec![
                     PathMatcher::new("*.rs").unwrap(),
@@ -3885,6 +3893,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
                 search_query,
                 false,
                 true,
+                false,
                 vec![PathMatcher::new("*.odd").unwrap()],
                 vec![PathMatcher::new("*.odd").unwrap()],
             )
@@ -3904,6 +3913,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
                 search_query,
                 false,
                 true,
+                false,
                 vec![PathMatcher::new("*.ts").unwrap()],
                 vec![PathMatcher::new("*.ts").unwrap()],
             ).unwrap(),
@@ -3922,6 +3932,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
                 search_query,
                 false,
                 true,
+                false,
                 vec![
                     PathMatcher::new("*.ts").unwrap(),
                     PathMatcher::new("*.odd").unwrap()
@@ -3947,6 +3958,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
                 search_query,
                 false,
                 true,
+                false,
                 vec![
                     PathMatcher::new("*.ts").unwrap(),
                     PathMatcher::new("*.odd").unwrap()

crates/project/src/search.rs 🔗

@@ -39,6 +39,7 @@ pub enum SearchQuery {
         replacement: Option<String>,
         whole_word: bool,
         case_sensitive: bool,
+        include_ignored: bool,
         inner: SearchInputs,
     },
 
@@ -48,6 +49,7 @@ pub enum SearchQuery {
         multiline: bool,
         whole_word: bool,
         case_sensitive: bool,
+        include_ignored: bool,
         inner: SearchInputs,
     },
 }
@@ -57,6 +59,7 @@ impl SearchQuery {
         query: impl ToString,
         whole_word: bool,
         case_sensitive: bool,
+        include_ignored: bool,
         files_to_include: Vec<PathMatcher>,
         files_to_exclude: Vec<PathMatcher>,
     ) -> Result<Self> {
@@ -74,6 +77,7 @@ impl SearchQuery {
             replacement: None,
             whole_word,
             case_sensitive,
+            include_ignored,
             inner,
         })
     }
@@ -82,6 +86,7 @@ impl SearchQuery {
         query: impl ToString,
         whole_word: bool,
         case_sensitive: bool,
+        include_ignored: bool,
         files_to_include: Vec<PathMatcher>,
         files_to_exclude: Vec<PathMatcher>,
     ) -> Result<Self> {
@@ -111,6 +116,7 @@ impl SearchQuery {
             multiline,
             whole_word,
             case_sensitive,
+            include_ignored,
             inner,
         })
     }
@@ -121,6 +127,7 @@ impl SearchQuery {
                 message.query,
                 message.whole_word,
                 message.case_sensitive,
+                message.include_ignored,
                 deserialize_path_matches(&message.files_to_include)?,
                 deserialize_path_matches(&message.files_to_exclude)?,
             )
@@ -129,6 +136,7 @@ impl SearchQuery {
                 message.query,
                 message.whole_word,
                 message.case_sensitive,
+                message.include_ignored,
                 deserialize_path_matches(&message.files_to_include)?,
                 deserialize_path_matches(&message.files_to_exclude)?,
             )
@@ -156,6 +164,7 @@ impl SearchQuery {
             regex: self.is_regex(),
             whole_word: self.whole_word(),
             case_sensitive: self.case_sensitive(),
+            include_ignored: self.include_ignored(),
             files_to_include: self
                 .files_to_include()
                 .iter()
@@ -336,6 +345,17 @@ impl SearchQuery {
         }
     }
 
+    pub fn include_ignored(&self) -> bool {
+        match self {
+            Self::Text {
+                include_ignored, ..
+            } => *include_ignored,
+            Self::Regex {
+                include_ignored, ..
+            } => *include_ignored,
+        }
+    }
+
     pub fn is_regex(&self) -> bool {
         matches!(self, Self::Regex { .. })
     }

crates/project/src/worktree.rs 🔗

@@ -1,5 +1,6 @@
 use crate::{
-    copy_recursive, ignore::IgnoreStack, DiagnosticSummary, ProjectEntryId, RemoveOptions,
+    copy_recursive, ignore::IgnoreStack, project_settings::ProjectSettings, DiagnosticSummary,
+    ProjectEntryId, RemoveOptions,
 };
 use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
 use anyhow::{anyhow, Context, Result};
@@ -21,7 +22,10 @@ use futures::{
 };
 use fuzzy::CharBag;
 use git::{DOT_GIT, GITIGNORE};
-use gpui::{executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task};
+use gpui::{
+    executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Subscription, Task,
+};
+use itertools::Itertools;
 use language::{
     proto::{
         deserialize_fingerprint, deserialize_version, serialize_fingerprint, serialize_line_ending,
@@ -36,6 +40,7 @@ use postage::{
     prelude::{Sink as _, Stream as _},
     watch,
 };
+use settings::SettingsStore;
 use smol::channel::{self, Sender};
 use std::{
     any::Any,
@@ -55,7 +60,10 @@ use std::{
     time::{Duration, SystemTime},
 };
 use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
-use util::{paths::HOME, ResultExt};
+use util::{
+    paths::{PathMatcher, HOME},
+    ResultExt,
+};
 
 #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
 pub struct WorktreeId(usize);
@@ -70,7 +78,8 @@ pub struct LocalWorktree {
     scan_requests_tx: channel::Sender<ScanRequest>,
     path_prefixes_to_scan_tx: channel::Sender<Arc<Path>>,
     is_scanning: (watch::Sender<bool>, watch::Receiver<bool>),
-    _background_scanner_task: Task<()>,
+    _settings_subscription: Subscription,
+    _background_scanner_tasks: Vec<Task<()>>,
     share: Option<ShareState>,
     diagnostics: HashMap<
         Arc<Path>,
@@ -216,6 +225,7 @@ pub struct LocalSnapshot {
     /// All of the git repositories in the worktree, indexed by the project entry
     /// id of their parent directory.
     git_repositories: TreeMap<ProjectEntryId, LocalRepositoryEntry>,
+    file_scan_exclusions: Vec<PathMatcher>,
 }
 
 struct BackgroundScannerState {
@@ -299,17 +309,54 @@ impl Worktree {
             .await
             .context("failed to stat worktree path")?;
 
+        let closure_fs = Arc::clone(&fs);
+        let closure_next_entry_id = Arc::clone(&next_entry_id);
+        let closure_abs_path = abs_path.to_path_buf();
         Ok(cx.add_model(move |cx: &mut ModelContext<Worktree>| {
+            let settings_subscription = cx.observe_global::<SettingsStore, _>(move |this, cx| {
+                if let Self::Local(this) = this {
+                    let new_file_scan_exclusions =
+                        file_scan_exclusions(settings::get::<ProjectSettings>(cx));
+                    if new_file_scan_exclusions != this.snapshot.file_scan_exclusions {
+                        this.snapshot.file_scan_exclusions = new_file_scan_exclusions;
+                        log::info!(
+                            "Re-scanning directories, new scan exclude files: {:?}",
+                            this.snapshot
+                                .file_scan_exclusions
+                                .iter()
+                                .map(ToString::to_string)
+                                .collect::<Vec<_>>()
+                        );
+
+                        let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
+                        let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) =
+                            channel::unbounded();
+                        this.scan_requests_tx = scan_requests_tx;
+                        this.path_prefixes_to_scan_tx = path_prefixes_to_scan_tx;
+                        this._background_scanner_tasks = start_background_scan_tasks(
+                            &closure_abs_path,
+                            this.snapshot(),
+                            scan_requests_rx,
+                            path_prefixes_to_scan_rx,
+                            Arc::clone(&closure_next_entry_id),
+                            Arc::clone(&closure_fs),
+                            cx,
+                        );
+                        this.is_scanning = watch::channel_with(true);
+                    }
+                }
+            });
+
             let root_name = abs_path
                 .file_name()
                 .map_or(String::new(), |f| f.to_string_lossy().to_string());
-
             let mut snapshot = LocalSnapshot {
+                file_scan_exclusions: file_scan_exclusions(settings::get::<ProjectSettings>(cx)),
                 ignores_by_parent_abs_path: Default::default(),
                 git_repositories: Default::default(),
                 snapshot: Snapshot {
                     id: WorktreeId::from_usize(cx.model_id()),
-                    abs_path: abs_path.clone(),
+                    abs_path: abs_path.to_path_buf().into(),
                     root_name: root_name.clone(),
                     root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(),
                     entries_by_path: Default::default(),
@@ -334,60 +381,23 @@ impl Worktree {
 
             let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
             let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded();
-            let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
-
-            cx.spawn_weak(|this, mut cx| async move {
-                while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade(&cx)) {
-                    this.update(&mut cx, |this, cx| {
-                        let this = this.as_local_mut().unwrap();
-                        match state {
-                            ScanState::Started => {
-                                *this.is_scanning.0.borrow_mut() = true;
-                            }
-                            ScanState::Updated {
-                                snapshot,
-                                changes,
-                                barrier,
-                                scanning,
-                            } => {
-                                *this.is_scanning.0.borrow_mut() = scanning;
-                                this.set_snapshot(snapshot, changes, cx);
-                                drop(barrier);
-                            }
-                        }
-                        cx.notify();
-                    });
-                }
-            })
-            .detach();
-
-            let background_scanner_task = cx.background().spawn({
-                let fs = fs.clone();
-                let snapshot = snapshot.clone();
-                let background = cx.background().clone();
-                async move {
-                    let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
-                    BackgroundScanner::new(
-                        snapshot,
-                        next_entry_id,
-                        fs,
-                        scan_states_tx,
-                        background,
-                        scan_requests_rx,
-                        path_prefixes_to_scan_rx,
-                    )
-                    .run(events)
-                    .await;
-                }
-            });
-
+            let task_snapshot = snapshot.clone();
             Worktree::Local(LocalWorktree {
                 snapshot,
                 is_scanning: watch::channel_with(true),
                 share: None,
                 scan_requests_tx,
                 path_prefixes_to_scan_tx,
-                _background_scanner_task: background_scanner_task,
+                _settings_subscription: settings_subscription,
+                _background_scanner_tasks: start_background_scan_tasks(
+                    &abs_path,
+                    task_snapshot,
+                    scan_requests_rx,
+                    path_prefixes_to_scan_rx,
+                    Arc::clone(&next_entry_id),
+                    Arc::clone(&fs),
+                    cx,
+                ),
                 diagnostics: Default::default(),
                 diagnostic_summaries: Default::default(),
                 client,
@@ -584,6 +594,76 @@ impl Worktree {
     }
 }
 
+fn start_background_scan_tasks(
+    abs_path: &Path,
+    snapshot: LocalSnapshot,
+    scan_requests_rx: channel::Receiver<ScanRequest>,
+    path_prefixes_to_scan_rx: channel::Receiver<Arc<Path>>,
+    next_entry_id: Arc<AtomicUsize>,
+    fs: Arc<dyn Fs>,
+    cx: &mut ModelContext<'_, Worktree>,
+) -> Vec<Task<()>> {
+    let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
+    let background_scanner = cx.background().spawn({
+        let abs_path = abs_path.to_path_buf();
+        let background = cx.background().clone();
+        async move {
+            let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
+            BackgroundScanner::new(
+                snapshot,
+                next_entry_id,
+                fs,
+                scan_states_tx,
+                background,
+                scan_requests_rx,
+                path_prefixes_to_scan_rx,
+            )
+            .run(events)
+            .await;
+        }
+    });
+    let scan_state_updater = cx.spawn_weak(|this, mut cx| async move {
+        while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade(&cx)) {
+            this.update(&mut cx, |this, cx| {
+                let this = this.as_local_mut().unwrap();
+                match state {
+                    ScanState::Started => {
+                        *this.is_scanning.0.borrow_mut() = true;
+                    }
+                    ScanState::Updated {
+                        snapshot,
+                        changes,
+                        barrier,
+                        scanning,
+                    } => {
+                        *this.is_scanning.0.borrow_mut() = scanning;
+                        this.set_snapshot(snapshot, changes, cx);
+                        drop(barrier);
+                    }
+                }
+                cx.notify();
+            });
+        }
+    });
+    vec![background_scanner, scan_state_updater]
+}
+
+fn file_scan_exclusions(project_settings: &ProjectSettings) -> Vec<PathMatcher> {
+    project_settings.file_scan_exclusions.as_deref().unwrap_or(&[]).iter()
+    .sorted()
+    .filter_map(|pattern| {
+        PathMatcher::new(pattern)
+            .map(Some)
+            .unwrap_or_else(|e| {
+                log::error!(
+                    "Skipping pattern {pattern} in `file_scan_exclusions` project settings due to parsing error: {e:#}"
+                );
+                None
+            })
+    })
+    .collect()
+}
+
 impl LocalWorktree {
     pub fn contains_abs_path(&self, path: &Path) -> bool {
         path.starts_with(&self.abs_path)
@@ -1481,7 +1561,7 @@ impl Snapshot {
         self.entries_by_id.get(&entry_id, &()).is_some()
     }
 
-    pub(crate) fn insert_entry(&mut self, entry: proto::Entry) -> Result<Entry> {
+    fn insert_entry(&mut self, entry: proto::Entry) -> Result<Entry> {
         let entry = Entry::try_from((&self.root_char_bag, entry))?;
         let old_entry = self.entries_by_id.insert_or_replace(
             PathEntry {
@@ -2145,6 +2225,12 @@ impl LocalSnapshot {
         paths.sort_by(|a, b| a.0.cmp(b.0));
         paths
     }
+
+    fn is_abs_path_excluded(&self, abs_path: &Path) -> bool {
+        self.file_scan_exclusions
+            .iter()
+            .any(|exclude_matcher| exclude_matcher.is_match(abs_path))
+    }
 }
 
 impl BackgroundScannerState {
@@ -2167,7 +2253,7 @@ impl BackgroundScannerState {
         let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true);
         let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path);
         let mut containing_repository = None;
-        if !ignore_stack.is_all() {
+        if !ignore_stack.is_abs_path_ignored(&abs_path, true) {
             if let Some((workdir_path, repo)) = self.snapshot.local_repo_for_path(&path) {
                 if let Ok(repo_path) = path.strip_prefix(&workdir_path.0) {
                     containing_repository = Some((
@@ -2378,18 +2464,30 @@ impl BackgroundScannerState {
 
         // Remove any git repositories whose .git entry no longer exists.
         let snapshot = &mut self.snapshot;
-        let mut repositories = mem::take(&mut snapshot.git_repositories);
-        let mut repository_entries = mem::take(&mut snapshot.repository_entries);
-        repositories.retain(|work_directory_id, _| {
-            snapshot
-                .entry_for_id(*work_directory_id)
+        let mut ids_to_preserve = HashSet::default();
+        for (&work_directory_id, entry) in snapshot.git_repositories.iter() {
+            let exists_in_snapshot = snapshot
+                .entry_for_id(work_directory_id)
                 .map_or(false, |entry| {
                     snapshot.entry_for_path(entry.path.join(*DOT_GIT)).is_some()
-                })
-        });
-        repository_entries.retain(|_, entry| repositories.get(&entry.work_directory.0).is_some());
-        snapshot.git_repositories = repositories;
-        snapshot.repository_entries = repository_entries;
+                });
+            if exists_in_snapshot {
+                ids_to_preserve.insert(work_directory_id);
+            } else {
+                let git_dir_abs_path = snapshot.abs_path().join(&entry.git_dir_path);
+                if snapshot.is_abs_path_excluded(&git_dir_abs_path)
+                    && !matches!(smol::block_on(fs.metadata(&git_dir_abs_path)), Ok(None))
+                {
+                    ids_to_preserve.insert(work_directory_id);
+                }
+            }
+        }
+        snapshot
+            .git_repositories
+            .retain(|work_directory_id, _| ids_to_preserve.contains(work_directory_id));
+        snapshot
+            .repository_entries
+            .retain(|_, entry| ids_to_preserve.contains(&entry.work_directory.0));
     }
 
     fn build_git_repository(
@@ -3094,7 +3192,7 @@ impl BackgroundScanner {
                 let ignore_stack = state
                     .snapshot
                     .ignore_stack_for_abs_path(&root_abs_path, true);
-                if ignore_stack.is_all() {
+                if ignore_stack.is_abs_path_ignored(&root_abs_path, true) {
                     root_entry.is_ignored = true;
                     state.insert_entry(root_entry.clone(), self.fs.as_ref());
                 }
@@ -3231,14 +3329,22 @@ impl BackgroundScanner {
                         return false;
                     };
 
-                let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| {
-                    snapshot
-                        .entry_for_path(parent)
-                        .map_or(false, |entry| entry.kind == EntryKind::Dir)
-                });
-                if !parent_dir_is_loaded {
-                    log::debug!("ignoring event {relative_path:?} within unloaded directory");
-                    return false;
+                if !is_git_related(&abs_path) {
+                    let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| {
+                        snapshot
+                            .entry_for_path(parent)
+                            .map_or(false, |entry| entry.kind == EntryKind::Dir)
+                    });
+                    if !parent_dir_is_loaded {
+                        log::debug!("ignoring event {relative_path:?} within unloaded directory");
+                        return false;
+                    }
+                    if snapshot.is_abs_path_excluded(abs_path) {
+                        log::debug!(
+                        "ignoring FS event for path {relative_path:?} within excluded directory"
+                    );
+                        return false;
+                    }
                 }
 
                 relative_paths.push(relative_path);
@@ -3401,18 +3507,26 @@ impl BackgroundScanner {
     }
 
     async fn scan_dir(&self, job: &ScanJob) -> Result<()> {
-        log::debug!("scan directory {:?}", job.path);
-
-        let mut ignore_stack = job.ignore_stack.clone();
-        let mut new_ignore = None;
-        let (root_abs_path, root_char_bag, next_entry_id) = {
-            let snapshot = &self.state.lock().snapshot;
-            (
-                snapshot.abs_path().clone(),
-                snapshot.root_char_bag,
-                self.next_entry_id.clone(),
-            )
-        };
+        let root_abs_path;
+        let mut ignore_stack;
+        let mut new_ignore;
+        let root_char_bag;
+        let next_entry_id;
+        {
+            let state = self.state.lock();
+            let snapshot = &state.snapshot;
+            root_abs_path = snapshot.abs_path().clone();
+            if snapshot.is_abs_path_excluded(&job.abs_path) {
+                log::error!("skipping excluded directory {:?}", job.path);
+                return Ok(());
+            }
+            log::debug!("scanning directory {:?}", job.path);
+            ignore_stack = job.ignore_stack.clone();
+            new_ignore = None;
+            root_char_bag = snapshot.root_char_bag;
+            next_entry_id = self.next_entry_id.clone();
+            drop(state);
+        }
 
         let mut dotgit_path = None;
         let mut root_canonical_path = None;
@@ -3427,18 +3541,8 @@ impl BackgroundScanner {
                     continue;
                 }
             };
-
             let child_name = child_abs_path.file_name().unwrap();
             let child_path: Arc<Path> = job.path.join(child_name).into();
-            let child_metadata = match self.fs.metadata(&child_abs_path).await {
-                Ok(Some(metadata)) => metadata,
-                Ok(None) => continue,
-                Err(err) => {
-                    log::error!("error processing {:?}: {:?}", child_abs_path, err);
-                    continue;
-                }
-            };
-
             // If we find a .gitignore, add it to the stack of ignores used to determine which paths are ignored
             if child_name == *GITIGNORE {
                 match build_gitignore(&child_abs_path, self.fs.as_ref()).await {
@@ -3482,6 +3586,26 @@ impl BackgroundScanner {
                 dotgit_path = Some(child_path.clone());
             }
 
+            {
+                let mut state = self.state.lock();
+                if state.snapshot.is_abs_path_excluded(&child_abs_path) {
+                    let relative_path = job.path.join(child_name);
+                    log::debug!("skipping excluded child entry {relative_path:?}");
+                    state.remove_path(&relative_path);
+                    continue;
+                }
+                drop(state);
+            }
+
+            let child_metadata = match self.fs.metadata(&child_abs_path).await {
+                Ok(Some(metadata)) => metadata,
+                Ok(None) => continue,
+                Err(err) => {
+                    log::error!("error processing {child_abs_path:?}: {err:?}");
+                    continue;
+                }
+            };
+
             let mut child_entry = Entry::new(
                 child_path.clone(),
                 &child_metadata,
@@ -3662,19 +3786,16 @@ impl BackgroundScanner {
                         self.next_entry_id.as_ref(),
                         state.snapshot.root_char_bag,
                     );
-                    fs_entry.is_ignored = ignore_stack.is_all();
+                    let is_dir = fs_entry.is_dir();
+                    fs_entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, is_dir);
                     fs_entry.is_external = !canonical_path.starts_with(&root_canonical_path);
 
-                    if !fs_entry.is_ignored {
-                        if !fs_entry.is_dir() {
-                            if let Some((work_dir, repo)) =
-                                state.snapshot.local_repo_for_path(&path)
-                            {
-                                if let Ok(repo_path) = path.strip_prefix(work_dir.0) {
-                                    let repo_path = RepoPath(repo_path.into());
-                                    let repo = repo.repo_ptr.lock();
-                                    fs_entry.git_status = repo.status(&repo_path, fs_entry.mtime);
-                                }
+                    if !is_dir && !fs_entry.is_ignored {
+                        if let Some((work_dir, repo)) = state.snapshot.local_repo_for_path(&path) {
+                            if let Ok(repo_path) = path.strip_prefix(work_dir.0) {
+                                let repo_path = RepoPath(repo_path.into());
+                                let repo = repo.repo_ptr.lock();
+                                fs_entry.git_status = repo.status(&repo_path, fs_entry.mtime);
                             }
                         }
                     }
@@ -3833,8 +3954,7 @@ impl BackgroundScanner {
                     ignore_stack.clone()
                 };
 
-                // Scan any directories that were previously ignored and weren't
-                // previously scanned.
+                // Scan any directories that were previously ignored and weren't previously scanned.
                 if was_ignored && !entry.is_ignored && entry.kind.is_unloaded() {
                     let state = self.state.lock();
                     if state.should_scan_directory(&entry) {
@@ -4010,6 +4130,12 @@ impl BackgroundScanner {
     }
 }
 
+fn is_git_related(abs_path: &Path) -> bool {
+    abs_path
+        .components()
+        .any(|c| c.as_os_str() == *DOT_GIT || c.as_os_str() == *GITIGNORE)
+}
+
 fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag {
     let mut result = root_char_bag;
     result.extend(

crates/project/src/worktree_tests.rs 🔗

@@ -1,6 +1,7 @@
 use crate::{
+    project_settings::ProjectSettings,
     worktree::{Event, Snapshot, WorktreeModelHandle},
-    Entry, EntryKind, PathChange, Worktree,
+    Entry, EntryKind, PathChange, Project, Worktree,
 };
 use anyhow::Result;
 use client::Client;
@@ -12,6 +13,7 @@ use postage::stream::Stream;
 use pretty_assertions::assert_eq;
 use rand::prelude::*;
 use serde_json::json;
+use settings::SettingsStore;
 use std::{
     env,
     fmt::Write,
@@ -23,6 +25,7 @@ use util::{http::FakeHttpClient, test::temp_tree, ResultExt};
 
 #[gpui::test]
 async fn test_traversal(cx: &mut TestAppContext) {
+    init_test(cx);
     let fs = FakeFs::new(cx.background());
     fs.insert_tree(
         "/root",
@@ -78,6 +81,7 @@ async fn test_traversal(cx: &mut TestAppContext) {
 
 #[gpui::test]
 async fn test_descendent_entries(cx: &mut TestAppContext) {
+    init_test(cx);
     let fs = FakeFs::new(cx.background());
     fs.insert_tree(
         "/root",
@@ -185,6 +189,7 @@ async fn test_descendent_entries(cx: &mut TestAppContext) {
 
 #[gpui::test(iterations = 10)]
 async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
+    init_test(cx);
     let fs = FakeFs::new(cx.background());
     fs.insert_tree(
         "/root",
@@ -264,6 +269,7 @@ async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppCo
 
 #[gpui::test]
 async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
+    init_test(cx);
     let fs = FakeFs::new(cx.background());
     fs.insert_tree(
         "/root",
@@ -439,6 +445,7 @@ async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
 
 #[gpui::test]
 async fn test_open_gitignored_files(cx: &mut TestAppContext) {
+    init_test(cx);
     let fs = FakeFs::new(cx.background());
     fs.insert_tree(
         "/root",
@@ -599,6 +606,7 @@ async fn test_open_gitignored_files(cx: &mut TestAppContext) {
 
 #[gpui::test]
 async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
+    init_test(cx);
     let fs = FakeFs::new(cx.background());
     fs.insert_tree(
         "/root",
@@ -722,6 +730,14 @@ async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
 
 #[gpui::test(iterations = 10)]
 async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
+    init_test(cx);
+    cx.update(|cx| {
+        cx.update_global::<SettingsStore, _, _>(|store, cx| {
+            store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+                project_settings.file_scan_exclusions = Some(Vec::new());
+            });
+        });
+    });
     let fs = FakeFs::new(cx.background());
     fs.insert_tree(
         "/root",
@@ -827,6 +843,7 @@ async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
 
 #[gpui::test]
 async fn test_write_file(cx: &mut TestAppContext) {
+    init_test(cx);
     let dir = temp_tree(json!({
         ".git": {},
         ".gitignore": "ignored-dir\n",
@@ -877,8 +894,105 @@ async fn test_write_file(cx: &mut TestAppContext) {
     });
 }
 
+#[gpui::test]
+async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
+    init_test(cx);
+    let dir = temp_tree(json!({
+        ".gitignore": "**/target\n/node_modules\n",
+        "target": {
+            "index": "blah2"
+        },
+        "node_modules": {
+            ".DS_Store": "",
+            "prettier": {
+                "package.json": "{}",
+            },
+        },
+        "src": {
+            ".DS_Store": "",
+            "foo": {
+                "foo.rs": "mod another;\n",
+                "another.rs": "// another",
+            },
+            "bar": {
+                "bar.rs": "// bar",
+            },
+            "lib.rs": "mod foo;\nmod bar;\n",
+        },
+        ".DS_Store": "",
+    }));
+    cx.update(|cx| {
+        cx.update_global::<SettingsStore, _, _>(|store, cx| {
+            store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+                project_settings.file_scan_exclusions =
+                    Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]);
+            });
+        });
+    });
+
+    let tree = Worktree::local(
+        build_client(cx),
+        dir.path(),
+        true,
+        Arc::new(RealFs),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+    tree.flush_fs_events(cx).await;
+    tree.read_with(cx, |tree, _| {
+        check_worktree_entries(
+            tree,
+            &[
+                "src/foo/foo.rs",
+                "src/foo/another.rs",
+                "node_modules/.DS_Store",
+                "src/.DS_Store",
+                ".DS_Store",
+            ],
+            &["target", "node_modules"],
+            &["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
+        )
+    });
+
+    cx.update(|cx| {
+        cx.update_global::<SettingsStore, _, _>(|store, cx| {
+            store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+                project_settings.file_scan_exclusions =
+                    Some(vec!["**/node_modules/**".to_string()]);
+            });
+        });
+    });
+    tree.flush_fs_events(cx).await;
+    cx.foreground().run_until_parked();
+    tree.read_with(cx, |tree, _| {
+        check_worktree_entries(
+            tree,
+            &[
+                "node_modules/prettier/package.json",
+                "node_modules/.DS_Store",
+                "node_modules",
+            ],
+            &["target"],
+            &[
+                ".gitignore",
+                "src/lib.rs",
+                "src/bar/bar.rs",
+                "src/foo/foo.rs",
+                "src/foo/another.rs",
+                "src/.DS_Store",
+                ".DS_Store",
+            ],
+        )
+    });
+}
+
 #[gpui::test(iterations = 30)]
 async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
+    init_test(cx);
     let fs = FakeFs::new(cx.background());
     fs.insert_tree(
         "/root",
@@ -938,6 +1052,7 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
 
 #[gpui::test]
 async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
+    init_test(cx);
     let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
 
     let fs_fake = FakeFs::new(cx.background());
@@ -1054,6 +1169,7 @@ async fn test_random_worktree_operations_during_initial_scan(
     cx: &mut TestAppContext,
     mut rng: StdRng,
 ) {
+    init_test(cx);
     let operations = env::var("OPERATIONS")
         .map(|o| o.parse().unwrap())
         .unwrap_or(5);
@@ -1143,6 +1259,7 @@ async fn test_random_worktree_operations_during_initial_scan(
 
 #[gpui::test(iterations = 100)]
 async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
+    init_test(cx);
     let operations = env::var("OPERATIONS")
         .map(|o| o.parse().unwrap())
         .unwrap_or(40);
@@ -1557,6 +1674,7 @@ fn random_filename(rng: &mut impl Rng) -> String {
 
 #[gpui::test]
 async fn test_rename_work_directory(cx: &mut TestAppContext) {
+    init_test(cx);
     let root = temp_tree(json!({
         "projects": {
             "project1": {
@@ -1627,6 +1745,7 @@ async fn test_rename_work_directory(cx: &mut TestAppContext) {
 
 #[gpui::test]
 async fn test_git_repository_for_path(cx: &mut TestAppContext) {
+    init_test(cx);
     let root = temp_tree(json!({
         "c.txt": "",
         "dir1": {
@@ -1747,6 +1866,15 @@ async fn test_git_repository_for_path(cx: &mut TestAppContext) {
 
 #[gpui::test]
 async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
+    init_test(cx);
+    cx.update(|cx| {
+        cx.update_global::<SettingsStore, _, _>(|store, cx| {
+            store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+                project_settings.file_scan_exclusions =
+                    Some(vec!["**/.git".to_string(), "**/.gitignore".to_string()]);
+            });
+        });
+    });
     const IGNORE_RULE: &'static str = "**/target";
 
     let root = temp_tree(json!({
@@ -1935,6 +2063,7 @@ async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppCont
 
 #[gpui::test]
 async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
+    init_test(cx);
     let fs = FakeFs::new(cx.background());
     fs.insert_tree(
         "/root",
@@ -2139,3 +2268,44 @@ fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Sta
         .map(|status| (status.path().unwrap().to_string(), status.status()))
         .collect()
 }
+
+#[track_caller]
+fn check_worktree_entries(
+    tree: &Worktree,
+    expected_excluded_paths: &[&str],
+    expected_ignored_paths: &[&str],
+    expected_tracked_paths: &[&str],
+) {
+    for path in expected_excluded_paths {
+        let entry = tree.entry_for_path(path);
+        assert!(
+            entry.is_none(),
+            "expected path '{path}' to be excluded, but got entry: {entry:?}",
+        );
+    }
+    for path in expected_ignored_paths {
+        let entry = tree
+            .entry_for_path(path)
+            .unwrap_or_else(|| panic!("Missing entry for expected ignored path '{path}'"));
+        assert!(
+            entry.is_ignored,
+            "expected path '{path}' to be ignored, but got entry: {entry:?}",
+        );
+    }
+    for path in expected_tracked_paths {
+        let entry = tree
+            .entry_for_path(path)
+            .unwrap_or_else(|| panic!("Missing entry for expected tracked path '{path}'"));
+        assert!(
+            !entry.is_ignored,
+            "expected path '{path}' to be tracked, but got entry: {entry:?}",
+        );
+    }
+}
+
+fn init_test(cx: &mut gpui::TestAppContext) {
+    cx.update(|cx| {
+        cx.set_global(SettingsStore::test(cx));
+        Project::init_settings(cx);
+    });
+}

crates/project2/src/ignore.rs 🔗

@@ -20,10 +20,6 @@ impl IgnoreStack {
         Arc::new(Self::All)
     }
 
-    pub fn is_all(&self) -> bool {
-        matches!(self, IgnoreStack::All)
-    }
-
     pub fn append(self: Arc<Self>, abs_base_path: Arc<Path>, ignore: Arc<Gitignore>) -> Arc<Self> {
         match self.as_ref() {
             IgnoreStack::All => self,

crates/project2/src/project2.rs 🔗

@@ -5618,7 +5618,16 @@ impl Project {
             .collect::<Vec<_>>();
 
         let background = cx.background_executor().clone();
-        let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
+        let path_count: usize = snapshots
+            .iter()
+            .map(|s| {
+                if query.include_ignored() {
+                    s.file_count()
+                } else {
+                    s.visible_file_count()
+                }
+            })
+            .sum();
         if path_count == 0 {
             let (_, rx) = smol::channel::bounded(1024);
             return rx;
@@ -5631,8 +5640,16 @@ impl Project {
             .iter()
             .filter_map(|(_, b)| {
                 let buffer = b.upgrade()?;
-                let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
-                if let Some(path) = snapshot.file().map(|file| file.path()) {
+                let (is_ignored, snapshot) = buffer.update(cx, |buffer, cx| {
+                    let is_ignored = buffer
+                        .project_path(cx)
+                        .and_then(|path| self.entry_for_path(&path, cx))
+                        .map_or(false, |entry| entry.is_ignored);
+                    (is_ignored, buffer.snapshot())
+                });
+                if is_ignored && !query.include_ignored() {
+                    return None;
+                } else if let Some(path) = snapshot.file().map(|file| file.path()) {
                     Some((path.clone(), (buffer, snapshot)))
                 } else {
                     unnamed_files.push(buffer);
@@ -5806,7 +5823,12 @@ impl Project {
                         let mut snapshot_start_ix = 0;
                         let mut abs_path = PathBuf::new();
                         for snapshot in snapshots {
-                            let snapshot_end_ix = snapshot_start_ix + snapshot.visible_file_count();
+                            let snapshot_end_ix = snapshot_start_ix
+                                + if query.include_ignored() {
+                                    snapshot.file_count()
+                                } else {
+                                    snapshot.visible_file_count()
+                                };
                             if worker_end_ix <= snapshot_start_ix {
                                 break;
                             } else if worker_start_ix > snapshot_end_ix {
@@ -5819,7 +5841,7 @@ impl Project {
                                     cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
 
                                 for entry in snapshot
-                                    .files(false, start_in_snapshot)
+                                    .files(query.include_ignored(), start_in_snapshot)
                                     .take(end_in_snapshot - start_in_snapshot)
                                 {
                                     if matching_paths_tx.is_closed() {

crates/project2/src/project_settings.rs 🔗

@@ -11,6 +11,8 @@ pub struct ProjectSettings {
     pub lsp: HashMap<Arc<str>, LspSettings>,
     #[serde(default)]
     pub git: GitSettings,
+    #[serde(default)]
+    pub file_scan_exclusions: Option<Vec<String>>,
 }
 
 #[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]

crates/project2/src/project_tests.rs 🔗

@@ -3730,7 +3730,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) {
     assert_eq!(
         search(
             &project,
-            SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
+            SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
             cx
         )
         .await
@@ -3755,7 +3755,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) {
     assert_eq!(
         search(
             &project,
-            SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
+            SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
             cx
         )
         .await
@@ -3794,6 +3794,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 vec![PathMatcher::new("*.odd").unwrap()],
                 Vec::new()
             )
@@ -3813,6 +3814,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 vec![PathMatcher::new("*.rs").unwrap()],
                 Vec::new()
             )
@@ -3835,6 +3837,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 vec![
                     PathMatcher::new("*.ts").unwrap(),
                     PathMatcher::new("*.odd").unwrap(),
@@ -3859,6 +3862,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 vec![
                     PathMatcher::new("*.rs").unwrap(),
                     PathMatcher::new("*.ts").unwrap(),
@@ -3906,6 +3910,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 Vec::new(),
                 vec![PathMatcher::new("*.odd").unwrap()],
             )
@@ -3930,6 +3935,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 Vec::new(),
                 vec![PathMatcher::new("*.rs").unwrap()],
             )
@@ -3952,6 +3958,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 Vec::new(),
                 vec![
                     PathMatcher::new("*.ts").unwrap(),
@@ -3976,6 +3983,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
                 search_query,
                 false,
                 true,
+                false,
                 Vec::new(),
                 vec![
                     PathMatcher::new("*.rs").unwrap(),
@@ -4017,6 +4025,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
                 search_query,
                 false,
                 true,
+                false,
                 vec![PathMatcher::new("*.odd").unwrap()],
                 vec![PathMatcher::new("*.odd").unwrap()],
             )
@@ -4036,6 +4045,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
                 search_query,
                 false,
                 true,
+                false,
                 vec![PathMatcher::new("*.ts").unwrap()],
                 vec![PathMatcher::new("*.ts").unwrap()],
             ).unwrap(),
@@ -4054,6 +4064,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
                 search_query,
                 false,
                 true,
+                false,
                 vec![
                     PathMatcher::new("*.ts").unwrap(),
                     PathMatcher::new("*.odd").unwrap()
@@ -4079,6 +4090,7 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
                 search_query,
                 false,
                 true,
+                false,
                 vec![
                     PathMatcher::new("*.ts").unwrap(),
                     PathMatcher::new("*.odd").unwrap()

crates/project2/src/search.rs 🔗

@@ -39,6 +39,7 @@ pub enum SearchQuery {
         replacement: Option<String>,
         whole_word: bool,
         case_sensitive: bool,
+        include_ignored: bool,
         inner: SearchInputs,
     },
 
@@ -48,6 +49,7 @@ pub enum SearchQuery {
         multiline: bool,
         whole_word: bool,
         case_sensitive: bool,
+        include_ignored: bool,
         inner: SearchInputs,
     },
 }
@@ -57,6 +59,7 @@ impl SearchQuery {
         query: impl ToString,
         whole_word: bool,
         case_sensitive: bool,
+        include_ignored: bool,
         files_to_include: Vec<PathMatcher>,
         files_to_exclude: Vec<PathMatcher>,
     ) -> Result<Self> {
@@ -74,6 +77,7 @@ impl SearchQuery {
             replacement: None,
             whole_word,
             case_sensitive,
+            include_ignored,
             inner,
         })
     }
@@ -82,6 +86,7 @@ impl SearchQuery {
         query: impl ToString,
         whole_word: bool,
         case_sensitive: bool,
+        include_ignored: bool,
         files_to_include: Vec<PathMatcher>,
         files_to_exclude: Vec<PathMatcher>,
     ) -> Result<Self> {
@@ -111,6 +116,7 @@ impl SearchQuery {
             multiline,
             whole_word,
             case_sensitive,
+            include_ignored,
             inner,
         })
     }
@@ -121,6 +127,7 @@ impl SearchQuery {
                 message.query,
                 message.whole_word,
                 message.case_sensitive,
+                message.include_ignored,
                 deserialize_path_matches(&message.files_to_include)?,
                 deserialize_path_matches(&message.files_to_exclude)?,
             )
@@ -129,6 +136,7 @@ impl SearchQuery {
                 message.query,
                 message.whole_word,
                 message.case_sensitive,
+                message.include_ignored,
                 deserialize_path_matches(&message.files_to_include)?,
                 deserialize_path_matches(&message.files_to_exclude)?,
             )
@@ -156,6 +164,7 @@ impl SearchQuery {
             regex: self.is_regex(),
             whole_word: self.whole_word(),
             case_sensitive: self.case_sensitive(),
+            include_ignored: self.include_ignored(),
             files_to_include: self
                 .files_to_include()
                 .iter()
@@ -336,6 +345,17 @@ impl SearchQuery {
         }
     }
 
+    pub fn include_ignored(&self) -> bool {
+        match self {
+            Self::Text {
+                include_ignored, ..
+            } => *include_ignored,
+            Self::Regex {
+                include_ignored, ..
+            } => *include_ignored,
+        }
+    }
+
     pub fn is_regex(&self) -> bool {
         matches!(self, Self::Regex { .. })
     }

crates/project2/src/worktree.rs 🔗

@@ -1,5 +1,6 @@
 use crate::{
-    copy_recursive, ignore::IgnoreStack, DiagnosticSummary, ProjectEntryId, RemoveOptions,
+    copy_recursive, ignore::IgnoreStack, project_settings::ProjectSettings, DiagnosticSummary,
+    ProjectEntryId, RemoveOptions,
 };
 use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
 use anyhow::{anyhow, Context as _, Result};
@@ -25,6 +26,7 @@ use gpui::{
     AppContext, AsyncAppContext, BackgroundExecutor, Context, EventEmitter, Model, ModelContext,
     Task,
 };
+use itertools::Itertools;
 use language::{
     proto::{
         deserialize_fingerprint, deserialize_version, serialize_fingerprint, serialize_line_ending,
@@ -39,6 +41,7 @@ use postage::{
     prelude::{Sink as _, Stream as _},
     watch,
 };
+use settings::{Settings, SettingsStore};
 use smol::channel::{self, Sender};
 use std::{
     any::Any,
@@ -58,7 +61,10 @@ use std::{
     time::{Duration, SystemTime},
 };
 use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
-use util::{paths::HOME, ResultExt};
+use util::{
+    paths::{PathMatcher, HOME},
+    ResultExt,
+};
 
 #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
 pub struct WorktreeId(usize);
@@ -73,7 +79,7 @@ pub struct LocalWorktree {
     scan_requests_tx: channel::Sender<ScanRequest>,
     path_prefixes_to_scan_tx: channel::Sender<Arc<Path>>,
     is_scanning: (watch::Sender<bool>, watch::Receiver<bool>),
-    _background_scanner_task: Task<()>,
+    _background_scanner_tasks: Vec<Task<()>>,
     share: Option<ShareState>,
     diagnostics: HashMap<
         Arc<Path>,
@@ -219,6 +225,7 @@ pub struct LocalSnapshot {
     /// All of the git repositories in the worktree, indexed by the project entry
     /// id of their parent directory.
     git_repositories: TreeMap<ProjectEntryId, LocalRepositoryEntry>,
+    file_scan_exclusions: Vec<PathMatcher>,
 }
 
 struct BackgroundScannerState {
@@ -302,17 +309,56 @@ impl Worktree {
             .await
             .context("failed to stat worktree path")?;
 
+        let closure_fs = Arc::clone(&fs);
+        let closure_next_entry_id = Arc::clone(&next_entry_id);
+        let closure_abs_path = abs_path.to_path_buf();
         cx.build_model(move |cx: &mut ModelContext<Worktree>| {
+            cx.observe_global::<SettingsStore>(move |this, cx| {
+                if let Self::Local(this) = this {
+                    let new_file_scan_exclusions =
+                        file_scan_exclusions(ProjectSettings::get_global(cx));
+                    if new_file_scan_exclusions != this.snapshot.file_scan_exclusions {
+                        this.snapshot.file_scan_exclusions = new_file_scan_exclusions;
+                        log::info!(
+                            "Re-scanning directories, new scan exclude files: {:?}",
+                            this.snapshot
+                                .file_scan_exclusions
+                                .iter()
+                                .map(ToString::to_string)
+                                .collect::<Vec<_>>()
+                        );
+
+                        let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
+                        let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) =
+                            channel::unbounded();
+                        this.scan_requests_tx = scan_requests_tx;
+                        this.path_prefixes_to_scan_tx = path_prefixes_to_scan_tx;
+                        this._background_scanner_tasks = start_background_scan_tasks(
+                            &closure_abs_path,
+                            this.snapshot(),
+                            scan_requests_rx,
+                            path_prefixes_to_scan_rx,
+                            Arc::clone(&closure_next_entry_id),
+                            Arc::clone(&closure_fs),
+                            cx,
+                        );
+                        this.is_scanning = watch::channel_with(true);
+                    }
+                }
+            })
+            .detach();
+
             let root_name = abs_path
                 .file_name()
                 .map_or(String::new(), |f| f.to_string_lossy().to_string());
 
             let mut snapshot = LocalSnapshot {
+                file_scan_exclusions: file_scan_exclusions(ProjectSettings::get_global(cx)),
                 ignores_by_parent_abs_path: Default::default(),
                 git_repositories: Default::default(),
                 snapshot: Snapshot {
                     id: WorktreeId::from_usize(cx.entity_id().as_u64() as usize),
-                    abs_path: abs_path.clone(),
+                    abs_path: abs_path.to_path_buf().into(),
                     root_name: root_name.clone(),
                     root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(),
                     entries_by_path: Default::default(),
@@ -337,61 +383,22 @@ impl Worktree {
 
             let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
             let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded();
-            let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
-
-            cx.spawn(|this, mut cx| async move {
-                while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade()) {
-                    this.update(&mut cx, |this, cx| {
-                        let this = this.as_local_mut().unwrap();
-                        match state {
-                            ScanState::Started => {
-                                *this.is_scanning.0.borrow_mut() = true;
-                            }
-                            ScanState::Updated {
-                                snapshot,
-                                changes,
-                                barrier,
-                                scanning,
-                            } => {
-                                *this.is_scanning.0.borrow_mut() = scanning;
-                                this.set_snapshot(snapshot, changes, cx);
-                                drop(barrier);
-                            }
-                        }
-                        cx.notify();
-                    })
-                    .ok();
-                }
-            })
-            .detach();
-
-            let background_scanner_task = cx.background_executor().spawn({
-                let fs = fs.clone();
-                let snapshot = snapshot.clone();
-                let background = cx.background_executor().clone();
-                async move {
-                    let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
-                    BackgroundScanner::new(
-                        snapshot,
-                        next_entry_id,
-                        fs,
-                        scan_states_tx,
-                        background,
-                        scan_requests_rx,
-                        path_prefixes_to_scan_rx,
-                    )
-                    .run(events)
-                    .await;
-                }
-            });
-
+            let task_snapshot = snapshot.clone();
             Worktree::Local(LocalWorktree {
                 snapshot,
                 is_scanning: watch::channel_with(true),
                 share: None,
                 scan_requests_tx,
                 path_prefixes_to_scan_tx,
-                _background_scanner_task: background_scanner_task,
+                _background_scanner_tasks: start_background_scan_tasks(
+                    &abs_path,
+                    task_snapshot,
+                    scan_requests_rx,
+                    path_prefixes_to_scan_rx,
+                    Arc::clone(&next_entry_id),
+                    Arc::clone(&fs),
+                    cx,
+                ),
                 diagnostics: Default::default(),
                 diagnostic_summaries: Default::default(),
                 client,
@@ -584,6 +591,77 @@ impl Worktree {
     }
 }
 
+fn start_background_scan_tasks(
+    abs_path: &Path,
+    snapshot: LocalSnapshot,
+    scan_requests_rx: channel::Receiver<ScanRequest>,
+    path_prefixes_to_scan_rx: channel::Receiver<Arc<Path>>,
+    next_entry_id: Arc<AtomicUsize>,
+    fs: Arc<dyn Fs>,
+    cx: &mut ModelContext<'_, Worktree>,
+) -> Vec<Task<()>> {
+    let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
+    let background_scanner = cx.background_executor().spawn({
+        let abs_path = abs_path.to_path_buf();
+        let background = cx.background_executor().clone();
+        async move {
+            let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
+            BackgroundScanner::new(
+                snapshot,
+                next_entry_id,
+                fs,
+                scan_states_tx,
+                background,
+                scan_requests_rx,
+                path_prefixes_to_scan_rx,
+            )
+            .run(events)
+            .await;
+        }
+    });
+    let scan_state_updater = cx.spawn(|this, mut cx| async move {
+        while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade()) {
+            this.update(&mut cx, |this, cx| {
+                let this = this.as_local_mut().unwrap();
+                match state {
+                    ScanState::Started => {
+                        *this.is_scanning.0.borrow_mut() = true;
+                    }
+                    ScanState::Updated {
+                        snapshot,
+                        changes,
+                        barrier,
+                        scanning,
+                    } => {
+                        *this.is_scanning.0.borrow_mut() = scanning;
+                        this.set_snapshot(snapshot, changes, cx);
+                        drop(barrier);
+                    }
+                }
+                cx.notify();
+            })
+            .ok();
+        }
+    });
+    vec![background_scanner, scan_state_updater]
+}
+
+fn file_scan_exclusions(project_settings: &ProjectSettings) -> Vec<PathMatcher> {
+    project_settings.file_scan_exclusions.as_deref().unwrap_or(&[]).iter()
+    .sorted()
+    .filter_map(|pattern| {
+        PathMatcher::new(pattern)
+            .map(Some)
+            .unwrap_or_else(|e| {
+                log::error!(
+                    "Skipping pattern {pattern} in `file_scan_exclusions` project settings due to parsing error: {e:#}"
+                );
+                None
+            })
+    })
+    .collect()
+}
+
 impl LocalWorktree {
     pub fn contains_abs_path(&self, path: &Path) -> bool {
         path.starts_with(&self.abs_path)
@@ -1482,7 +1560,7 @@ impl Snapshot {
         self.entries_by_id.get(&entry_id, &()).is_some()
     }
 
-    pub(crate) fn insert_entry(&mut self, entry: proto::Entry) -> Result<Entry> {
+    fn insert_entry(&mut self, entry: proto::Entry) -> Result<Entry> {
         let entry = Entry::try_from((&self.root_char_bag, entry))?;
         let old_entry = self.entries_by_id.insert_or_replace(
             PathEntry {
@@ -2143,6 +2221,12 @@ impl LocalSnapshot {
         paths.sort_by(|a, b| a.0.cmp(b.0));
         paths
     }
+
+    fn is_abs_path_excluded(&self, abs_path: &Path) -> bool {
+        self.file_scan_exclusions
+            .iter()
+            .any(|exclude_matcher| exclude_matcher.is_match(abs_path))
+    }
 }
 
 impl BackgroundScannerState {
@@ -2165,7 +2249,7 @@ impl BackgroundScannerState {
         let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true);
         let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path);
         let mut containing_repository = None;
-        if !ignore_stack.is_all() {
+        if !ignore_stack.is_abs_path_ignored(&abs_path, true) {
             if let Some((workdir_path, repo)) = self.snapshot.local_repo_for_path(&path) {
                 if let Ok(repo_path) = path.strip_prefix(&workdir_path.0) {
                     containing_repository = Some((
@@ -2376,18 +2460,30 @@ impl BackgroundScannerState {
 
         // Remove any git repositories whose .git entry no longer exists.
         let snapshot = &mut self.snapshot;
-        let mut repositories = mem::take(&mut snapshot.git_repositories);
-        let mut repository_entries = mem::take(&mut snapshot.repository_entries);
-        repositories.retain(|work_directory_id, _| {
-            snapshot
-                .entry_for_id(*work_directory_id)
+        let mut ids_to_preserve = HashSet::default();
+        for (&work_directory_id, entry) in snapshot.git_repositories.iter() {
+            let exists_in_snapshot = snapshot
+                .entry_for_id(work_directory_id)
                 .map_or(false, |entry| {
                     snapshot.entry_for_path(entry.path.join(*DOT_GIT)).is_some()
-                })
-        });
-        repository_entries.retain(|_, entry| repositories.get(&entry.work_directory.0).is_some());
-        snapshot.git_repositories = repositories;
-        snapshot.repository_entries = repository_entries;
+                });
+            if exists_in_snapshot {
+                ids_to_preserve.insert(work_directory_id);
+            } else {
+                let git_dir_abs_path = snapshot.abs_path().join(&entry.git_dir_path);
+                if snapshot.is_abs_path_excluded(&git_dir_abs_path)
+                    && !matches!(smol::block_on(fs.metadata(&git_dir_abs_path)), Ok(None))
+                {
+                    ids_to_preserve.insert(work_directory_id);
+                }
+            }
+        }
+        snapshot
+            .git_repositories
+            .retain(|work_directory_id, _| ids_to_preserve.contains(work_directory_id));
+        snapshot
+            .repository_entries
+            .retain(|_, entry| ids_to_preserve.contains(&entry.work_directory.0));
     }
 
     fn build_git_repository(
@@ -3085,7 +3181,7 @@ impl BackgroundScanner {
                 let ignore_stack = state
                     .snapshot
                     .ignore_stack_for_abs_path(&root_abs_path, true);
-                if ignore_stack.is_all() {
+                if ignore_stack.is_abs_path_ignored(&root_abs_path, true) {
                     root_entry.is_ignored = true;
                     state.insert_entry(root_entry.clone(), self.fs.as_ref());
                 }
@@ -3222,14 +3318,22 @@ impl BackgroundScanner {
                         return false;
                     };
 
-                let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| {
-                    snapshot
-                        .entry_for_path(parent)
-                        .map_or(false, |entry| entry.kind == EntryKind::Dir)
-                });
-                if !parent_dir_is_loaded {
-                    log::debug!("ignoring event {relative_path:?} within unloaded directory");
-                    return false;
+                if !is_git_related(&abs_path) {
+                    let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| {
+                        snapshot
+                            .entry_for_path(parent)
+                            .map_or(false, |entry| entry.kind == EntryKind::Dir)
+                    });
+                    if !parent_dir_is_loaded {
+                        log::debug!("ignoring event {relative_path:?} within unloaded directory");
+                        return false;
+                    }
+                    if snapshot.is_abs_path_excluded(abs_path) {
+                        log::debug!(
+                        "ignoring FS event for path {relative_path:?} within excluded directory"
+                    );
+                        return false;
+                    }
                 }
 
                 relative_paths.push(relative_path);
@@ -3392,18 +3496,26 @@ impl BackgroundScanner {
     }
 
     async fn scan_dir(&self, job: &ScanJob) -> Result<()> {
-        log::debug!("scan directory {:?}", job.path);
-
-        let mut ignore_stack = job.ignore_stack.clone();
-        let mut new_ignore = None;
-        let (root_abs_path, root_char_bag, next_entry_id) = {
-            let snapshot = &self.state.lock().snapshot;
-            (
-                snapshot.abs_path().clone(),
-                snapshot.root_char_bag,
-                self.next_entry_id.clone(),
-            )
-        };
+        let root_abs_path;
+        let mut ignore_stack;
+        let mut new_ignore;
+        let root_char_bag;
+        let next_entry_id;
+        {
+            let state = self.state.lock();
+            let snapshot = &state.snapshot;
+            root_abs_path = snapshot.abs_path().clone();
+            if snapshot.is_abs_path_excluded(&job.abs_path) {
+                log::error!("skipping excluded directory {:?}", job.path);
+                return Ok(());
+            }
+            log::debug!("scanning directory {:?}", job.path);
+            ignore_stack = job.ignore_stack.clone();
+            new_ignore = None;
+            root_char_bag = snapshot.root_char_bag;
+            next_entry_id = self.next_entry_id.clone();
+            drop(state);
+        }
 
         let mut dotgit_path = None;
         let mut root_canonical_path = None;
@@ -3418,18 +3530,8 @@ impl BackgroundScanner {
                     continue;
                 }
             };
-
             let child_name = child_abs_path.file_name().unwrap();
             let child_path: Arc<Path> = job.path.join(child_name).into();
-            let child_metadata = match self.fs.metadata(&child_abs_path).await {
-                Ok(Some(metadata)) => metadata,
-                Ok(None) => continue,
-                Err(err) => {
-                    log::error!("error processing {:?}: {:?}", child_abs_path, err);
-                    continue;
-                }
-            };
-
             // If we find a .gitignore, add it to the stack of ignores used to determine which paths are ignored
             if child_name == *GITIGNORE {
                 match build_gitignore(&child_abs_path, self.fs.as_ref()).await {
@@ -3473,6 +3575,26 @@ impl BackgroundScanner {
                 dotgit_path = Some(child_path.clone());
             }
 
+            {
+                let mut state = self.state.lock();
+                if state.snapshot.is_abs_path_excluded(&child_abs_path) {
+                    let relative_path = job.path.join(child_name);
+                    log::debug!("skipping excluded child entry {relative_path:?}");
+                    state.remove_path(&relative_path);
+                    continue;
+                }
+                drop(state);
+            }
+
+            let child_metadata = match self.fs.metadata(&child_abs_path).await {
+                Ok(Some(metadata)) => metadata,
+                Ok(None) => continue,
+                Err(err) => {
+                    log::error!("error processing {child_abs_path:?}: {err:?}");
+                    continue;
+                }
+            };
+
             let mut child_entry = Entry::new(
                 child_path.clone(),
                 &child_metadata,
@@ -3653,19 +3775,16 @@ impl BackgroundScanner {
                         self.next_entry_id.as_ref(),
                         state.snapshot.root_char_bag,
                     );
-                    fs_entry.is_ignored = ignore_stack.is_all();
+                    let is_dir = fs_entry.is_dir();
+                    fs_entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, is_dir);
                     fs_entry.is_external = !canonical_path.starts_with(&root_canonical_path);
 
-                    if !fs_entry.is_ignored {
-                        if !fs_entry.is_dir() {
-                            if let Some((work_dir, repo)) =
-                                state.snapshot.local_repo_for_path(&path)
-                            {
-                                if let Ok(repo_path) = path.strip_prefix(work_dir.0) {
-                                    let repo_path = RepoPath(repo_path.into());
-                                    let repo = repo.repo_ptr.lock();
-                                    fs_entry.git_status = repo.status(&repo_path, fs_entry.mtime);
-                                }
+                    if !is_dir && !fs_entry.is_ignored {
+                        if let Some((work_dir, repo)) = state.snapshot.local_repo_for_path(&path) {
+                            if let Ok(repo_path) = path.strip_prefix(work_dir.0) {
+                                let repo_path = RepoPath(repo_path.into());
+                                let repo = repo.repo_ptr.lock();
+                                fs_entry.git_status = repo.status(&repo_path, fs_entry.mtime);
                             }
                         }
                     }
@@ -3824,8 +3943,7 @@ impl BackgroundScanner {
                     ignore_stack.clone()
                 };
 
-                // Scan any directories that were previously ignored and weren't
-                // previously scanned.
+                // Scan any directories that were previously ignored and weren't previously scanned.
                 if was_ignored && !entry.is_ignored && entry.kind.is_unloaded() {
                     let state = self.state.lock();
                     if state.should_scan_directory(&entry) {
@@ -4001,6 +4119,12 @@ impl BackgroundScanner {
     }
 }
 
+fn is_git_related(abs_path: &Path) -> bool {
+    abs_path
+        .components()
+        .any(|c| c.as_os_str() == *DOT_GIT || c.as_os_str() == *GITIGNORE)
+}
+
 fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag {
     let mut result = root_char_bag;
     result.extend(

crates/project2/src/worktree_tests.rs 🔗

@@ -1,2141 +1,2310 @@
-// use crate::{
-//     worktree::{Event, Snapshot, WorktreeModelHandle},
-//     Entry, EntryKind, PathChange, Worktree,
-// };
-// use anyhow::Result;
-// use client2::Client;
-// use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions};
-// use git::GITIGNORE;
-// use gpui::{executor::Deterministic, ModelContext, Task, TestAppContext};
-// use parking_lot::Mutex;
-// use postage::stream::Stream;
-// use pretty_assertions::assert_eq;
-// use rand::prelude::*;
-// use serde_json::json;
-// use std::{
-//     env,
-//     fmt::Write,
-//     mem,
-//     path::{Path, PathBuf},
-//     sync::Arc,
-// };
-// use util::{http::FakeHttpClient, test::temp_tree, ResultExt};
-
-// #[gpui::test]
-// async fn test_traversal(cx: &mut TestAppContext) {
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/root",
-//         json!({
-//            ".gitignore": "a/b\n",
-//            "a": {
-//                "b": "",
-//                "c": "",
-//            }
-//         }),
-//     )
-//     .await;
-
-//     let tree = Worktree::local(
-//         build_client(cx),
-//         Path::new("/root"),
-//         true,
-//         fs,
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-//     cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-//         .await;
-
-//     tree.read_with(cx, |tree, _| {
-//         assert_eq!(
-//             tree.entries(false)
-//                 .map(|entry| entry.path.as_ref())
-//                 .collect::<Vec<_>>(),
-//             vec![
-//                 Path::new(""),
-//                 Path::new(".gitignore"),
-//                 Path::new("a"),
-//                 Path::new("a/c"),
-//             ]
-//         );
-//         assert_eq!(
-//             tree.entries(true)
-//                 .map(|entry| entry.path.as_ref())
-//                 .collect::<Vec<_>>(),
-//             vec![
-//                 Path::new(""),
-//                 Path::new(".gitignore"),
-//                 Path::new("a"),
-//                 Path::new("a/b"),
-//                 Path::new("a/c"),
-//             ]
-//         );
-//     })
-// }
-
-// #[gpui::test]
-// async fn test_descendent_entries(cx: &mut TestAppContext) {
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/root",
-//         json!({
-//             "a": "",
-//             "b": {
-//                "c": {
-//                    "d": ""
-//                },
-//                "e": {}
-//             },
-//             "f": "",
-//             "g": {
-//                 "h": {}
-//             },
-//             "i": {
-//                 "j": {
-//                     "k": ""
-//                 },
-//                 "l": {
-
-//                 }
-//             },
-//             ".gitignore": "i/j\n",
-//         }),
-//     )
-//     .await;
-
-//     let tree = Worktree::local(
-//         build_client(cx),
-//         Path::new("/root"),
-//         true,
-//         fs,
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-//     cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-//         .await;
-
-//     tree.read_with(cx, |tree, _| {
-//         assert_eq!(
-//             tree.descendent_entries(false, false, Path::new("b"))
-//                 .map(|entry| entry.path.as_ref())
-//                 .collect::<Vec<_>>(),
-//             vec![Path::new("b/c/d"),]
-//         );
-//         assert_eq!(
-//             tree.descendent_entries(true, false, Path::new("b"))
-//                 .map(|entry| entry.path.as_ref())
-//                 .collect::<Vec<_>>(),
-//             vec![
-//                 Path::new("b"),
-//                 Path::new("b/c"),
-//                 Path::new("b/c/d"),
-//                 Path::new("b/e"),
-//             ]
-//         );
-
-//         assert_eq!(
-//             tree.descendent_entries(false, false, Path::new("g"))
-//                 .map(|entry| entry.path.as_ref())
-//                 .collect::<Vec<_>>(),
-//             Vec::<PathBuf>::new()
-//         );
-//         assert_eq!(
-//             tree.descendent_entries(true, false, Path::new("g"))
-//                 .map(|entry| entry.path.as_ref())
-//                 .collect::<Vec<_>>(),
-//             vec![Path::new("g"), Path::new("g/h"),]
-//         );
-//     });
-
-//     // Expand gitignored directory.
-//     tree.read_with(cx, |tree, _| {
-//         tree.as_local()
-//             .unwrap()
-//             .refresh_entries_for_paths(vec![Path::new("i/j").into()])
-//     })
-//     .recv()
-//     .await;
-
-//     tree.read_with(cx, |tree, _| {
-//         assert_eq!(
-//             tree.descendent_entries(false, false, Path::new("i"))
-//                 .map(|entry| entry.path.as_ref())
-//                 .collect::<Vec<_>>(),
-//             Vec::<PathBuf>::new()
-//         );
-//         assert_eq!(
-//             tree.descendent_entries(false, true, Path::new("i"))
-//                 .map(|entry| entry.path.as_ref())
-//                 .collect::<Vec<_>>(),
-//             vec![Path::new("i/j/k")]
-//         );
-//         assert_eq!(
-//             tree.descendent_entries(true, false, Path::new("i"))
-//                 .map(|entry| entry.path.as_ref())
-//                 .collect::<Vec<_>>(),
-//             vec![Path::new("i"), Path::new("i/l"),]
-//         );
-//     })
-// }
-
-// #[gpui::test(iterations = 10)]
-// async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/root",
-//         json!({
-//             "lib": {
-//                 "a": {
-//                     "a.txt": ""
-//                 },
-//                 "b": {
-//                     "b.txt": ""
-//                 }
-//             }
-//         }),
-//     )
-//     .await;
-//     fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
-//     fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
-
-//     let tree = Worktree::local(
-//         build_client(cx),
-//         Path::new("/root"),
-//         true,
-//         fs.clone(),
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-
-//     cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-//         .await;
-
-//     tree.read_with(cx, |tree, _| {
-//         assert_eq!(
-//             tree.entries(false)
-//                 .map(|entry| entry.path.as_ref())
-//                 .collect::<Vec<_>>(),
-//             vec![
-//                 Path::new(""),
-//                 Path::new("lib"),
-//                 Path::new("lib/a"),
-//                 Path::new("lib/a/a.txt"),
-//                 Path::new("lib/a/lib"),
-//                 Path::new("lib/b"),
-//                 Path::new("lib/b/b.txt"),
-//                 Path::new("lib/b/lib"),
-//             ]
-//         );
-//     });
-
-//     fs.rename(
-//         Path::new("/root/lib/a/lib"),
-//         Path::new("/root/lib/a/lib-2"),
-//         Default::default(),
-//     )
-//     .await
-//     .unwrap();
-//     executor.run_until_parked();
-//     tree.read_with(cx, |tree, _| {
-//         assert_eq!(
-//             tree.entries(false)
-//                 .map(|entry| entry.path.as_ref())
-//                 .collect::<Vec<_>>(),
-//             vec![
-//                 Path::new(""),
-//                 Path::new("lib"),
-//                 Path::new("lib/a"),
-//                 Path::new("lib/a/a.txt"),
-//                 Path::new("lib/a/lib-2"),
-//                 Path::new("lib/b"),
-//                 Path::new("lib/b/b.txt"),
-//                 Path::new("lib/b/lib"),
-//             ]
-//         );
-//     });
-// }
-
-// #[gpui::test]
-// async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/root",
-//         json!({
-//             "dir1": {
-//                 "deps": {
-//                     // symlinks here
-//                 },
-//                 "src": {
-//                     "a.rs": "",
-//                     "b.rs": "",
-//                 },
-//             },
-//             "dir2": {
-//                 "src": {
-//                     "c.rs": "",
-//                     "d.rs": "",
-//                 }
-//             },
-//             "dir3": {
-//                 "deps": {},
-//                 "src": {
-//                     "e.rs": "",
-//                     "f.rs": "",
-//                 },
-//             }
-//         }),
-//     )
-//     .await;
-
-//     // These symlinks point to directories outside of the worktree's root, dir1.
-//     fs.insert_symlink("/root/dir1/deps/dep-dir2", "../../dir2".into())
-//         .await;
-//     fs.insert_symlink("/root/dir1/deps/dep-dir3", "../../dir3".into())
-//         .await;
-
-//     let tree = Worktree::local(
-//         build_client(cx),
-//         Path::new("/root/dir1"),
-//         true,
-//         fs.clone(),
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-
-//     cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-//         .await;
-
-//     let tree_updates = Arc::new(Mutex::new(Vec::new()));
-//     tree.update(cx, |_, cx| {
-//         let tree_updates = tree_updates.clone();
-//         cx.subscribe(&tree, move |_, _, event, _| {
-//             if let Event::UpdatedEntries(update) = event {
-//                 tree_updates.lock().extend(
-//                     update
-//                         .iter()
-//                         .map(|(path, _, change)| (path.clone(), *change)),
-//                 );
-//             }
-//         })
-//         .detach();
-//     });
-
-//     // The symlinked directories are not scanned by default.
-//     tree.read_with(cx, |tree, _| {
-//         assert_eq!(
-//             tree.entries(true)
-//                 .map(|entry| (entry.path.as_ref(), entry.is_external))
-//                 .collect::<Vec<_>>(),
-//             vec![
-//                 (Path::new(""), false),
-//                 (Path::new("deps"), false),
-//                 (Path::new("deps/dep-dir2"), true),
-//                 (Path::new("deps/dep-dir3"), true),
-//                 (Path::new("src"), false),
-//                 (Path::new("src/a.rs"), false),
-//                 (Path::new("src/b.rs"), false),
-//             ]
-//         );
-
-//         assert_eq!(
-//             tree.entry_for_path("deps/dep-dir2").unwrap().kind,
-//             EntryKind::UnloadedDir
-//         );
-//     });
-
-//     // Expand one of the symlinked directories.
-//     tree.read_with(cx, |tree, _| {
-//         tree.as_local()
-//             .unwrap()
-//             .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3").into()])
-//     })
-//     .recv()
-//     .await;
-
-//     // The expanded directory's contents are loaded. Subdirectories are
-//     // not scanned yet.
-//     tree.read_with(cx, |tree, _| {
-//         assert_eq!(
-//             tree.entries(true)
-//                 .map(|entry| (entry.path.as_ref(), entry.is_external))
-//                 .collect::<Vec<_>>(),
-//             vec![
-//                 (Path::new(""), false),
-//                 (Path::new("deps"), false),
-//                 (Path::new("deps/dep-dir2"), true),
-//                 (Path::new("deps/dep-dir3"), true),
-//                 (Path::new("deps/dep-dir3/deps"), true),
-//                 (Path::new("deps/dep-dir3/src"), true),
-//                 (Path::new("src"), false),
-//                 (Path::new("src/a.rs"), false),
-//                 (Path::new("src/b.rs"), false),
-//             ]
-//         );
-//     });
-//     assert_eq!(
-//         mem::take(&mut *tree_updates.lock()),
-//         &[
-//             (Path::new("deps/dep-dir3").into(), PathChange::Loaded),
-//             (Path::new("deps/dep-dir3/deps").into(), PathChange::Loaded),
-//             (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded)
-//         ]
-//     );
-
-//     // Expand a subdirectory of one of the symlinked directories.
-//     tree.read_with(cx, |tree, _| {
-//         tree.as_local()
-//             .unwrap()
-//             .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3/src").into()])
-//     })
-//     .recv()
-//     .await;
-
-//     // The expanded subdirectory's contents are loaded.
-//     tree.read_with(cx, |tree, _| {
-//         assert_eq!(
-//             tree.entries(true)
-//                 .map(|entry| (entry.path.as_ref(), entry.is_external))
-//                 .collect::<Vec<_>>(),
-//             vec![
-//                 (Path::new(""), false),
-//                 (Path::new("deps"), false),
-//                 (Path::new("deps/dep-dir2"), true),
-//                 (Path::new("deps/dep-dir3"), true),
-//                 (Path::new("deps/dep-dir3/deps"), true),
-//                 (Path::new("deps/dep-dir3/src"), true),
-//                 (Path::new("deps/dep-dir3/src/e.rs"), true),
-//                 (Path::new("deps/dep-dir3/src/f.rs"), true),
-//                 (Path::new("src"), false),
-//                 (Path::new("src/a.rs"), false),
-//                 (Path::new("src/b.rs"), false),
-//             ]
-//         );
-//     });
-
-//     assert_eq!(
-//         mem::take(&mut *tree_updates.lock()),
-//         &[
-//             (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded),
-//             (
-//                 Path::new("deps/dep-dir3/src/e.rs").into(),
-//                 PathChange::Loaded
-//             ),
-//             (
-//                 Path::new("deps/dep-dir3/src/f.rs").into(),
-//                 PathChange::Loaded
-//             )
-//         ]
-//     );
-// }
-
-// #[gpui::test]
-// async fn test_open_gitignored_files(cx: &mut TestAppContext) {
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/root",
-//         json!({
-//             ".gitignore": "node_modules\n",
-//             "one": {
-//                 "node_modules": {
-//                     "a": {
-//                         "a1.js": "a1",
-//                         "a2.js": "a2",
-//                     },
-//                     "b": {
-//                         "b1.js": "b1",
-//                         "b2.js": "b2",
-//                     },
-//                     "c": {
-//                         "c1.js": "c1",
-//                         "c2.js": "c2",
-//                     }
-//                 },
-//             },
-//             "two": {
-//                 "x.js": "",
-//                 "y.js": "",
-//             },
-//         }),
-//     )
-//     .await;
-
-//     let tree = Worktree::local(
-//         build_client(cx),
-//         Path::new("/root"),
-//         true,
-//         fs.clone(),
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-
-//     cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-//         .await;
-
-//     tree.read_with(cx, |tree, _| {
-//         assert_eq!(
-//             tree.entries(true)
-//                 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
-//                 .collect::<Vec<_>>(),
-//             vec![
-//                 (Path::new(""), false),
-//                 (Path::new(".gitignore"), false),
-//                 (Path::new("one"), false),
-//                 (Path::new("one/node_modules"), true),
-//                 (Path::new("two"), false),
-//                 (Path::new("two/x.js"), false),
-//                 (Path::new("two/y.js"), false),
-//             ]
-//         );
-//     });
-
-//     // Open a file that is nested inside of a gitignored directory that
-//     // has not yet been expanded.
-//     let prev_read_dir_count = fs.read_dir_call_count();
-//     let buffer = tree
-//         .update(cx, |tree, cx| {
-//             tree.as_local_mut()
-//                 .unwrap()
-//                 .load_buffer(0, "one/node_modules/b/b1.js".as_ref(), cx)
-//         })
-//         .await
-//         .unwrap();
-
-//     tree.read_with(cx, |tree, cx| {
-//         assert_eq!(
-//             tree.entries(true)
-//                 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
-//                 .collect::<Vec<_>>(),
-//             vec![
-//                 (Path::new(""), false),
-//                 (Path::new(".gitignore"), false),
-//                 (Path::new("one"), false),
-//                 (Path::new("one/node_modules"), true),
-//                 (Path::new("one/node_modules/a"), true),
-//                 (Path::new("one/node_modules/b"), true),
-//                 (Path::new("one/node_modules/b/b1.js"), true),
-//                 (Path::new("one/node_modules/b/b2.js"), true),
-//                 (Path::new("one/node_modules/c"), true),
-//                 (Path::new("two"), false),
-//                 (Path::new("two/x.js"), false),
-//                 (Path::new("two/y.js"), false),
-//             ]
-//         );
-
-//         assert_eq!(
-//             buffer.read(cx).file().unwrap().path().as_ref(),
-//             Path::new("one/node_modules/b/b1.js")
-//         );
-
-//         // Only the newly-expanded directories are scanned.
-//         assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
-//     });
-
-//     // Open another file in a different subdirectory of the same
-//     // gitignored directory.
-//     let prev_read_dir_count = fs.read_dir_call_count();
-//     let buffer = tree
-//         .update(cx, |tree, cx| {
-//             tree.as_local_mut()
-//                 .unwrap()
-//                 .load_buffer(0, "one/node_modules/a/a2.js".as_ref(), cx)
-//         })
-//         .await
-//         .unwrap();
-
-//     tree.read_with(cx, |tree, cx| {
-//         assert_eq!(
-//             tree.entries(true)
-//                 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
-//                 .collect::<Vec<_>>(),
-//             vec![
-//                 (Path::new(""), false),
-//                 (Path::new(".gitignore"), false),
-//                 (Path::new("one"), false),
-//                 (Path::new("one/node_modules"), true),
-//                 (Path::new("one/node_modules/a"), true),
-//                 (Path::new("one/node_modules/a/a1.js"), true),
-//                 (Path::new("one/node_modules/a/a2.js"), true),
-//                 (Path::new("one/node_modules/b"), true),
-//                 (Path::new("one/node_modules/b/b1.js"), true),
-//                 (Path::new("one/node_modules/b/b2.js"), true),
-//                 (Path::new("one/node_modules/c"), true),
-//                 (Path::new("two"), false),
-//                 (Path::new("two/x.js"), false),
-//                 (Path::new("two/y.js"), false),
-//             ]
-//         );
-
-//         assert_eq!(
-//             buffer.read(cx).file().unwrap().path().as_ref(),
-//             Path::new("one/node_modules/a/a2.js")
-//         );
-
-//         // Only the newly-expanded directory is scanned.
-//         assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
-//     });
-
-//     // No work happens when files and directories change within an unloaded directory.
-//     let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count();
-//     fs.create_dir("/root/one/node_modules/c/lib".as_ref())
-//         .await
-//         .unwrap();
-//     cx.foreground().run_until_parked();
-//     assert_eq!(
-//         fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count,
-//         0
-//     );
-// }
-
-// #[gpui::test]
-// async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/root",
-//         json!({
-//             ".gitignore": "node_modules\n",
-//             "a": {
-//                 "a.js": "",
-//             },
-//             "b": {
-//                 "b.js": "",
-//             },
-//             "node_modules": {
-//                 "c": {
-//                     "c.js": "",
-//                 },
-//                 "d": {
-//                     "d.js": "",
-//                     "e": {
-//                         "e1.js": "",
-//                         "e2.js": "",
-//                     },
-//                     "f": {
-//                         "f1.js": "",
-//                         "f2.js": "",
-//                     }
-//                 },
-//             },
-//         }),
-//     )
-//     .await;
-
-//     let tree = Worktree::local(
-//         build_client(cx),
-//         Path::new("/root"),
-//         true,
-//         fs.clone(),
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-
-//     cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-//         .await;
-
-//     // Open a file within the gitignored directory, forcing some of its
-//     // subdirectories to be read, but not all.
-//     let read_dir_count_1 = fs.read_dir_call_count();
-//     tree.read_with(cx, |tree, _| {
-//         tree.as_local()
-//             .unwrap()
-//             .refresh_entries_for_paths(vec![Path::new("node_modules/d/d.js").into()])
-//     })
-//     .recv()
-//     .await;
-
-//     // Those subdirectories are now loaded.
-//     tree.read_with(cx, |tree, _| {
-//         assert_eq!(
-//             tree.entries(true)
-//                 .map(|e| (e.path.as_ref(), e.is_ignored))
-//                 .collect::<Vec<_>>(),
-//             &[
-//                 (Path::new(""), false),
-//                 (Path::new(".gitignore"), false),
-//                 (Path::new("a"), false),
-//                 (Path::new("a/a.js"), false),
-//                 (Path::new("b"), false),
-//                 (Path::new("b/b.js"), false),
-//                 (Path::new("node_modules"), true),
-//                 (Path::new("node_modules/c"), true),
-//                 (Path::new("node_modules/d"), true),
-//                 (Path::new("node_modules/d/d.js"), true),
-//                 (Path::new("node_modules/d/e"), true),
-//                 (Path::new("node_modules/d/f"), true),
-//             ]
-//         );
-//     });
-//     let read_dir_count_2 = fs.read_dir_call_count();
-//     assert_eq!(read_dir_count_2 - read_dir_count_1, 2);
-
-//     // Update the gitignore so that node_modules is no longer ignored,
-//     // but a subdirectory is ignored
-//     fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
-//         .await
-//         .unwrap();
-//     cx.foreground().run_until_parked();
-
-//     // All of the directories that are no longer ignored are now loaded.
-//     tree.read_with(cx, |tree, _| {
-//         assert_eq!(
-//             tree.entries(true)
-//                 .map(|e| (e.path.as_ref(), e.is_ignored))
-//                 .collect::<Vec<_>>(),
-//             &[
-//                 (Path::new(""), false),
-//                 (Path::new(".gitignore"), false),
-//                 (Path::new("a"), false),
-//                 (Path::new("a/a.js"), false),
-//                 (Path::new("b"), false),
-//                 (Path::new("b/b.js"), false),
-//                 // This directory is no longer ignored
-//                 (Path::new("node_modules"), false),
-//                 (Path::new("node_modules/c"), false),
-//                 (Path::new("node_modules/c/c.js"), false),
-//                 (Path::new("node_modules/d"), false),
-//                 (Path::new("node_modules/d/d.js"), false),
-//                 // This subdirectory is now ignored
-//                 (Path::new("node_modules/d/e"), true),
-//                 (Path::new("node_modules/d/f"), false),
-//                 (Path::new("node_modules/d/f/f1.js"), false),
-//                 (Path::new("node_modules/d/f/f2.js"), false),
-//             ]
-//         );
-//     });
-
-//     // Each of the newly-loaded directories is scanned only once.
-//     let read_dir_count_3 = fs.read_dir_call_count();
-//     assert_eq!(read_dir_count_3 - read_dir_count_2, 2);
-// }
-
-// #[gpui::test(iterations = 10)]
-// async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/root",
-//         json!({
-//             ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
-//             "tree": {
-//                 ".git": {},
-//                 ".gitignore": "ignored-dir\n",
-//                 "tracked-dir": {
-//                     "tracked-file1": "",
-//                     "ancestor-ignored-file1": "",
-//                 },
-//                 "ignored-dir": {
-//                     "ignored-file1": ""
-//                 }
-//             }
-//         }),
-//     )
-//     .await;
-
-//     let tree = Worktree::local(
-//         build_client(cx),
-//         "/root/tree".as_ref(),
-//         true,
-//         fs.clone(),
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-//     cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-//         .await;
-
-//     tree.read_with(cx, |tree, _| {
-//         tree.as_local()
-//             .unwrap()
-//             .refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
-//     })
-//     .recv()
-//     .await;
-
-//     cx.read(|cx| {
-//         let tree = tree.read(cx);
-//         assert!(
-//             !tree
-//                 .entry_for_path("tracked-dir/tracked-file1")
-//                 .unwrap()
-//                 .is_ignored
-//         );
-//         assert!(
-//             tree.entry_for_path("tracked-dir/ancestor-ignored-file1")
-//                 .unwrap()
-//                 .is_ignored
-//         );
-//         assert!(
-//             tree.entry_for_path("ignored-dir/ignored-file1")
-//                 .unwrap()
-//                 .is_ignored
-//         );
-//     });
-
-//     fs.create_file(
-//         "/root/tree/tracked-dir/tracked-file2".as_ref(),
-//         Default::default(),
-//     )
-//     .await
-//     .unwrap();
-//     fs.create_file(
-//         "/root/tree/tracked-dir/ancestor-ignored-file2".as_ref(),
-//         Default::default(),
-//     )
-//     .await
-//     .unwrap();
-//     fs.create_file(
-//         "/root/tree/ignored-dir/ignored-file2".as_ref(),
-//         Default::default(),
-//     )
-//     .await
-//     .unwrap();
-
-//     cx.foreground().run_until_parked();
-//     cx.read(|cx| {
-//         let tree = tree.read(cx);
-//         assert!(
-//             !tree
-//                 .entry_for_path("tracked-dir/tracked-file2")
-//                 .unwrap()
-//                 .is_ignored
-//         );
-//         assert!(
-//             tree.entry_for_path("tracked-dir/ancestor-ignored-file2")
-//                 .unwrap()
-//                 .is_ignored
-//         );
-//         assert!(
-//             tree.entry_for_path("ignored-dir/ignored-file2")
-//                 .unwrap()
-//                 .is_ignored
-//         );
-//         assert!(tree.entry_for_path(".git").unwrap().is_ignored);
-//     });
-// }
-
-// #[gpui::test]
-// async fn test_write_file(cx: &mut TestAppContext) {
-//     let dir = temp_tree(json!({
-//         ".git": {},
-//         ".gitignore": "ignored-dir\n",
-//         "tracked-dir": {},
-//         "ignored-dir": {}
-//     }));
-
-//     let tree = Worktree::local(
-//         build_client(cx),
-//         dir.path(),
-//         true,
-//         Arc::new(RealFs),
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-//     cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-//         .await;
-//     tree.flush_fs_events(cx).await;
-
-//     tree.update(cx, |tree, cx| {
-//         tree.as_local().unwrap().write_file(
-//             Path::new("tracked-dir/file.txt"),
-//             "hello".into(),
-//             Default::default(),
-//             cx,
-//         )
-//     })
-//     .await
-//     .unwrap();
-//     tree.update(cx, |tree, cx| {
-//         tree.as_local().unwrap().write_file(
-//             Path::new("ignored-dir/file.txt"),
-//             "world".into(),
-//             Default::default(),
-//             cx,
-//         )
-//     })
-//     .await
-//     .unwrap();
-
-//     tree.read_with(cx, |tree, _| {
-//         let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
-//         let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
-//         assert!(!tracked.is_ignored);
-//         assert!(ignored.is_ignored);
-//     });
-// }
-
-// #[gpui::test(iterations = 30)]
-// async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/root",
-//         json!({
-//             "b": {},
-//             "c": {},
-//             "d": {},
-//         }),
-//     )
-//     .await;
-
-//     let tree = Worktree::local(
-//         build_client(cx),
-//         "/root".as_ref(),
-//         true,
-//         fs,
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-
-//     let snapshot1 = tree.update(cx, |tree, cx| {
-//         let tree = tree.as_local_mut().unwrap();
-//         let snapshot = Arc::new(Mutex::new(tree.snapshot()));
-//         let _ = tree.observe_updates(0, cx, {
-//             let snapshot = snapshot.clone();
-//             move |update| {
-//                 snapshot.lock().apply_remote_update(update).unwrap();
-//                 async { true }
-//             }
-//         });
-//         snapshot
-//     });
-
-//     let entry = tree
-//         .update(cx, |tree, cx| {
-//             tree.as_local_mut()
-//                 .unwrap()
-//                 .create_entry("a/e".as_ref(), true, cx)
-//         })
-//         .await
-//         .unwrap();
-//     assert!(entry.is_dir());
-
-//     cx.foreground().run_until_parked();
-//     tree.read_with(cx, |tree, _| {
-//         assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
-//     });
-
-//     let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
-//     assert_eq!(
-//         snapshot1.lock().entries(true).collect::<Vec<_>>(),
-//         snapshot2.entries(true).collect::<Vec<_>>()
-//     );
-// }
-
-// #[gpui::test]
-// async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
-//     let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
-
-//     let fs_fake = FakeFs::new(cx.background());
-//     fs_fake
-//         .insert_tree(
-//             "/root",
-//             json!({
-//                 "a": {},
-//             }),
-//         )
-//         .await;
-
-//     let tree_fake = Worktree::local(
-//         client_fake,
-//         "/root".as_ref(),
-//         true,
-//         fs_fake,
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-
-//     let entry = tree_fake
-//         .update(cx, |tree, cx| {
-//             tree.as_local_mut()
-//                 .unwrap()
-//                 .create_entry("a/b/c/d.txt".as_ref(), false, cx)
-//         })
-//         .await
-//         .unwrap();
-//     assert!(entry.is_file());
-
-//     cx.foreground().run_until_parked();
-//     tree_fake.read_with(cx, |tree, _| {
-//         assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
-//         assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
-//         assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
-//     });
-
-//     let client_real = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
-
-//     let fs_real = Arc::new(RealFs);
-//     let temp_root = temp_tree(json!({
-//         "a": {}
-//     }));
-
-//     let tree_real = Worktree::local(
-//         client_real,
-//         temp_root.path(),
-//         true,
-//         fs_real,
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-
-//     let entry = tree_real
-//         .update(cx, |tree, cx| {
-//             tree.as_local_mut()
-//                 .unwrap()
-//                 .create_entry("a/b/c/d.txt".as_ref(), false, cx)
-//         })
-//         .await
-//         .unwrap();
-//     assert!(entry.is_file());
-
-//     cx.foreground().run_until_parked();
-//     tree_real.read_with(cx, |tree, _| {
-//         assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
-//         assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
-//         assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
-//     });
-
-//     // Test smallest change
-//     let entry = tree_real
-//         .update(cx, |tree, cx| {
-//             tree.as_local_mut()
-//                 .unwrap()
-//                 .create_entry("a/b/c/e.txt".as_ref(), false, cx)
-//         })
-//         .await
-//         .unwrap();
-//     assert!(entry.is_file());
-
-//     cx.foreground().run_until_parked();
-//     tree_real.read_with(cx, |tree, _| {
-//         assert!(tree.entry_for_path("a/b/c/e.txt").unwrap().is_file());
-//     });
-
-//     // Test largest change
-//     let entry = tree_real
-//         .update(cx, |tree, cx| {
-//             tree.as_local_mut()
-//                 .unwrap()
-//                 .create_entry("d/e/f/g.txt".as_ref(), false, cx)
-//         })
-//         .await
-//         .unwrap();
-//     assert!(entry.is_file());
-
-//     cx.foreground().run_until_parked();
-//     tree_real.read_with(cx, |tree, _| {
-//         assert!(tree.entry_for_path("d/e/f/g.txt").unwrap().is_file());
-//         assert!(tree.entry_for_path("d/e/f").unwrap().is_dir());
-//         assert!(tree.entry_for_path("d/e/").unwrap().is_dir());
-//         assert!(tree.entry_for_path("d/").unwrap().is_dir());
-//     });
-// }
-
-// #[gpui::test(iterations = 100)]
-// async fn test_random_worktree_operations_during_initial_scan(
-//     cx: &mut TestAppContext,
-//     mut rng: StdRng,
-// ) {
-//     let operations = env::var("OPERATIONS")
-//         .map(|o| o.parse().unwrap())
-//         .unwrap_or(5);
-//     let initial_entries = env::var("INITIAL_ENTRIES")
-//         .map(|o| o.parse().unwrap())
-//         .unwrap_or(20);
-
-//     let root_dir = Path::new("/test");
-//     let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
-//     fs.as_fake().insert_tree(root_dir, json!({})).await;
-//     for _ in 0..initial_entries {
-//         randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
-//     }
-//     log::info!("generated initial tree");
-
-//     let worktree = Worktree::local(
-//         build_client(cx),
-//         root_dir,
-//         true,
-//         fs.clone(),
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-
-//     let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
-//     let updates = Arc::new(Mutex::new(Vec::new()));
-//     worktree.update(cx, |tree, cx| {
-//         check_worktree_change_events(tree, cx);
-
-//         let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
-//             let updates = updates.clone();
-//             move |update| {
-//                 updates.lock().push(update);
-//                 async { true }
-//             }
-//         });
-//     });
-
-//     for _ in 0..operations {
-//         worktree
-//             .update(cx, |worktree, cx| {
-//                 randomly_mutate_worktree(worktree, &mut rng, cx)
-//             })
-//             .await
-//             .log_err();
-//         worktree.read_with(cx, |tree, _| {
-//             tree.as_local().unwrap().snapshot().check_invariants(true)
-//         });
-
-//         if rng.gen_bool(0.6) {
-//             snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
-//         }
-//     }
-
-//     worktree
-//         .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
-//         .await;
-
-//     cx.foreground().run_until_parked();
-
-//     let final_snapshot = worktree.read_with(cx, |tree, _| {
-//         let tree = tree.as_local().unwrap();
-//         let snapshot = tree.snapshot();
-//         snapshot.check_invariants(true);
-//         snapshot
-//     });
-
-//     for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
-//         let mut updated_snapshot = snapshot.clone();
-//         for update in updates.lock().iter() {
-//             if update.scan_id >= updated_snapshot.scan_id() as u64 {
-//                 updated_snapshot
-//                     .apply_remote_update(update.clone())
-//                     .unwrap();
-//             }
-//         }
-
-//         assert_eq!(
-//             updated_snapshot.entries(true).collect::<Vec<_>>(),
-//             final_snapshot.entries(true).collect::<Vec<_>>(),
-//             "wrong updates after snapshot {i}: {snapshot:#?} {updates:#?}",
-//         );
-//     }
-// }
-
-// #[gpui::test(iterations = 100)]
-// async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
-//     let operations = env::var("OPERATIONS")
-//         .map(|o| o.parse().unwrap())
-//         .unwrap_or(40);
-//     let initial_entries = env::var("INITIAL_ENTRIES")
-//         .map(|o| o.parse().unwrap())
-//         .unwrap_or(20);
-
-//     let root_dir = Path::new("/test");
-//     let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
-//     fs.as_fake().insert_tree(root_dir, json!({})).await;
-//     for _ in 0..initial_entries {
-//         randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
-//     }
-//     log::info!("generated initial tree");
-
-//     let worktree = Worktree::local(
-//         build_client(cx),
-//         root_dir,
-//         true,
-//         fs.clone(),
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-
-//     let updates = Arc::new(Mutex::new(Vec::new()));
-//     worktree.update(cx, |tree, cx| {
-//         check_worktree_change_events(tree, cx);
-
-//         let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
-//             let updates = updates.clone();
-//             move |update| {
-//                 updates.lock().push(update);
-//                 async { true }
-//             }
-//         });
-//     });
-
-//     worktree
-//         .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
-//         .await;
-
-//     fs.as_fake().pause_events();
-//     let mut snapshots = Vec::new();
-//     let mut mutations_len = operations;
-//     while mutations_len > 1 {
-//         if rng.gen_bool(0.2) {
-//             worktree
-//                 .update(cx, |worktree, cx| {
-//                     randomly_mutate_worktree(worktree, &mut rng, cx)
-//                 })
-//                 .await
-//                 .log_err();
-//         } else {
-//             randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
-//         }
-
-//         let buffered_event_count = fs.as_fake().buffered_event_count();
-//         if buffered_event_count > 0 && rng.gen_bool(0.3) {
-//             let len = rng.gen_range(0..=buffered_event_count);
-//             log::info!("flushing {} events", len);
-//             fs.as_fake().flush_events(len);
-//         } else {
-//             randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
-//             mutations_len -= 1;
-//         }
-
-//         cx.foreground().run_until_parked();
-//         if rng.gen_bool(0.2) {
-//             log::info!("storing snapshot {}", snapshots.len());
-//             let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
-//             snapshots.push(snapshot);
-//         }
-//     }
-
-//     log::info!("quiescing");
-//     fs.as_fake().flush_events(usize::MAX);
-//     cx.foreground().run_until_parked();
-
-//     let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
-//     snapshot.check_invariants(true);
-//     let expanded_paths = snapshot
-//         .expanded_entries()
-//         .map(|e| e.path.clone())
-//         .collect::<Vec<_>>();
-
-//     {
-//         let new_worktree = Worktree::local(
-//             build_client(cx),
-//             root_dir,
-//             true,
-//             fs.clone(),
-//             Default::default(),
-//             &mut cx.to_async(),
-//         )
-//         .await
-//         .unwrap();
-//         new_worktree
-//             .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
-//             .await;
-//         new_worktree
-//             .update(cx, |tree, _| {
-//                 tree.as_local_mut()
-//                     .unwrap()
-//                     .refresh_entries_for_paths(expanded_paths)
-//             })
-//             .recv()
-//             .await;
-//         let new_snapshot =
-//             new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
-//         assert_eq!(
-//             snapshot.entries_without_ids(true),
-//             new_snapshot.entries_without_ids(true)
-//         );
-//     }
-
-//     for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
-//         for update in updates.lock().iter() {
-//             if update.scan_id >= prev_snapshot.scan_id() as u64 {
-//                 prev_snapshot.apply_remote_update(update.clone()).unwrap();
-//             }
-//         }
-
-//         assert_eq!(
-//             prev_snapshot
-//                 .entries(true)
-//                 .map(ignore_pending_dir)
-//                 .collect::<Vec<_>>(),
-//             snapshot
-//                 .entries(true)
-//                 .map(ignore_pending_dir)
-//                 .collect::<Vec<_>>(),
-//             "wrong updates after snapshot {i}: {updates:#?}",
-//         );
-//     }
-
-//     fn ignore_pending_dir(entry: &Entry) -> Entry {
-//         let mut entry = entry.clone();
-//         if entry.kind.is_dir() {
-//             entry.kind = EntryKind::Dir
-//         }
-//         entry
-//     }
-// }
-
-// // The worktree's `UpdatedEntries` event can be used to follow along with
-// // all changes to the worktree's snapshot.
-// fn check_worktree_change_events(tree: &mut Worktree, cx: &mut ModelContext<Worktree>) {
-//     let mut entries = tree.entries(true).cloned().collect::<Vec<_>>();
-//     cx.subscribe(&cx.handle(), move |tree, _, event, _| {
-//         if let Event::UpdatedEntries(changes) = event {
-//             for (path, _, change_type) in changes.iter() {
-//                 let entry = tree.entry_for_path(&path).cloned();
-//                 let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
-//                     Ok(ix) | Err(ix) => ix,
-//                 };
-//                 match change_type {
-//                     PathChange::Added => entries.insert(ix, entry.unwrap()),
-//                     PathChange::Removed => drop(entries.remove(ix)),
-//                     PathChange::Updated => {
-//                         let entry = entry.unwrap();
-//                         let existing_entry = entries.get_mut(ix).unwrap();
-//                         assert_eq!(existing_entry.path, entry.path);
-//                         *existing_entry = entry;
-//                     }
-//                     PathChange::AddedOrUpdated | PathChange::Loaded => {
-//                         let entry = entry.unwrap();
-//                         if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
-//                             *entries.get_mut(ix).unwrap() = entry;
-//                         } else {
-//                             entries.insert(ix, entry);
-//                         }
-//                     }
-//                 }
-//             }
-
-//             let new_entries = tree.entries(true).cloned().collect::<Vec<_>>();
-//             assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
-//         }
-//     })
-//     .detach();
-// }
-
-// fn randomly_mutate_worktree(
-//     worktree: &mut Worktree,
-//     rng: &mut impl Rng,
-//     cx: &mut ModelContext<Worktree>,
-// ) -> Task<Result<()>> {
-//     log::info!("mutating worktree");
-//     let worktree = worktree.as_local_mut().unwrap();
-//     let snapshot = worktree.snapshot();
-//     let entry = snapshot.entries(false).choose(rng).unwrap();
-
-//     match rng.gen_range(0_u32..100) {
-//         0..=33 if entry.path.as_ref() != Path::new("") => {
-//             log::info!("deleting entry {:?} ({})", entry.path, entry.id.0);
-//             worktree.delete_entry(entry.id, cx).unwrap()
-//         }
-//         ..=66 if entry.path.as_ref() != Path::new("") => {
-//             let other_entry = snapshot.entries(false).choose(rng).unwrap();
-//             let new_parent_path = if other_entry.is_dir() {
-//                 other_entry.path.clone()
-//             } else {
-//                 other_entry.path.parent().unwrap().into()
-//             };
-//             let mut new_path = new_parent_path.join(random_filename(rng));
-//             if new_path.starts_with(&entry.path) {
-//                 new_path = random_filename(rng).into();
-//             }
-
-//             log::info!(
-//                 "renaming entry {:?} ({}) to {:?}",
-//                 entry.path,
-//                 entry.id.0,
-//                 new_path
-//             );
-//             let task = worktree.rename_entry(entry.id, new_path, cx).unwrap();
-//             cx.foreground().spawn(async move {
-//                 task.await?;
-//                 Ok(())
-//             })
-//         }
-//         _ => {
-//             let task = if entry.is_dir() {
-//                 let child_path = entry.path.join(random_filename(rng));
-//                 let is_dir = rng.gen_bool(0.3);
-//                 log::info!(
-//                     "creating {} at {:?}",
-//                     if is_dir { "dir" } else { "file" },
-//                     child_path,
-//                 );
-//                 worktree.create_entry(child_path, is_dir, cx)
-//             } else {
-//                 log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
-//                 worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx)
-//             };
-//             cx.foreground().spawn(async move {
-//                 task.await?;
-//                 Ok(())
-//             })
-//         }
-//     }
-// }
-
-// async fn randomly_mutate_fs(
-//     fs: &Arc<dyn Fs>,
-//     root_path: &Path,
-//     insertion_probability: f64,
-//     rng: &mut impl Rng,
-// ) {
-//     log::info!("mutating fs");
-//     let mut files = Vec::new();
-//     let mut dirs = Vec::new();
-//     for path in fs.as_fake().paths(false) {
-//         if path.starts_with(root_path) {
-//             if fs.is_file(&path).await {
-//                 files.push(path);
-//             } else {
-//                 dirs.push(path);
-//             }
-//         }
-//     }
-
-//     if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
-//         let path = dirs.choose(rng).unwrap();
-//         let new_path = path.join(random_filename(rng));
-
-//         if rng.gen() {
-//             log::info!(
-//                 "creating dir {:?}",
-//                 new_path.strip_prefix(root_path).unwrap()
-//             );
-//             fs.create_dir(&new_path).await.unwrap();
-//         } else {
-//             log::info!(
-//                 "creating file {:?}",
-//                 new_path.strip_prefix(root_path).unwrap()
-//             );
-//             fs.create_file(&new_path, Default::default()).await.unwrap();
-//         }
-//     } else if rng.gen_bool(0.05) {
-//         let ignore_dir_path = dirs.choose(rng).unwrap();
-//         let ignore_path = ignore_dir_path.join(&*GITIGNORE);
-
-//         let subdirs = dirs
-//             .iter()
-//             .filter(|d| d.starts_with(&ignore_dir_path))
-//             .cloned()
-//             .collect::<Vec<_>>();
-//         let subfiles = files
-//             .iter()
-//             .filter(|d| d.starts_with(&ignore_dir_path))
-//             .cloned()
-//             .collect::<Vec<_>>();
-//         let files_to_ignore = {
-//             let len = rng.gen_range(0..=subfiles.len());
-//             subfiles.choose_multiple(rng, len)
-//         };
-//         let dirs_to_ignore = {
-//             let len = rng.gen_range(0..subdirs.len());
-//             subdirs.choose_multiple(rng, len)
-//         };
-
-//         let mut ignore_contents = String::new();
-//         for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
-//             writeln!(
-//                 ignore_contents,
-//                 "{}",
-//                 path_to_ignore
-//                     .strip_prefix(&ignore_dir_path)
-//                     .unwrap()
-//                     .to_str()
-//                     .unwrap()
-//             )
-//             .unwrap();
-//         }
-//         log::info!(
-//             "creating gitignore {:?} with contents:\n{}",
-//             ignore_path.strip_prefix(&root_path).unwrap(),
-//             ignore_contents
-//         );
-//         fs.save(
-//             &ignore_path,
-//             &ignore_contents.as_str().into(),
-//             Default::default(),
-//         )
-//         .await
-//         .unwrap();
-//     } else {
-//         let old_path = {
-//             let file_path = files.choose(rng);
-//             let dir_path = dirs[1..].choose(rng);
-//             file_path.into_iter().chain(dir_path).choose(rng).unwrap()
-//         };
-
-//         let is_rename = rng.gen();
-//         if is_rename {
-//             let new_path_parent = dirs
-//                 .iter()
-//                 .filter(|d| !d.starts_with(old_path))
-//                 .choose(rng)
-//                 .unwrap();
-
-//             let overwrite_existing_dir =
-//                 !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
-//             let new_path = if overwrite_existing_dir {
-//                 fs.remove_dir(
-//                     &new_path_parent,
-//                     RemoveOptions {
-//                         recursive: true,
-//                         ignore_if_not_exists: true,
-//                     },
-//                 )
-//                 .await
-//                 .unwrap();
-//                 new_path_parent.to_path_buf()
-//             } else {
-//                 new_path_parent.join(random_filename(rng))
-//             };
-
-//             log::info!(
-//                 "renaming {:?} to {}{:?}",
-//                 old_path.strip_prefix(&root_path).unwrap(),
-//                 if overwrite_existing_dir {
-//                     "overwrite "
-//                 } else {
-//                     ""
-//                 },
-//                 new_path.strip_prefix(&root_path).unwrap()
-//             );
-//             fs.rename(
-//                 &old_path,
-//                 &new_path,
-//                 fs::RenameOptions {
-//                     overwrite: true,
-//                     ignore_if_exists: true,
-//                 },
-//             )
-//             .await
-//             .unwrap();
-//         } else if fs.is_file(&old_path).await {
-//             log::info!(
-//                 "deleting file {:?}",
-//                 old_path.strip_prefix(&root_path).unwrap()
-//             );
-//             fs.remove_file(old_path, Default::default()).await.unwrap();
-//         } else {
-//             log::info!(
-//                 "deleting dir {:?}",
-//                 old_path.strip_prefix(&root_path).unwrap()
-//             );
-//             fs.remove_dir(
-//                 &old_path,
-//                 RemoveOptions {
-//                     recursive: true,
-//                     ignore_if_not_exists: true,
-//                 },
-//             )
-//             .await
-//             .unwrap();
-//         }
-//     }
-// }
-
-// fn random_filename(rng: &mut impl Rng) -> String {
-//     (0..6)
-//         .map(|_| rng.sample(rand::distributions::Alphanumeric))
-//         .map(char::from)
-//         .collect()
-// }
-
-// #[gpui::test]
-// async fn test_rename_work_directory(cx: &mut TestAppContext) {
-//     let root = temp_tree(json!({
-//         "projects": {
-//             "project1": {
-//                 "a": "",
-//                 "b": "",
-//             }
-//         },
-
-//     }));
-//     let root_path = root.path();
-
-//     let tree = Worktree::local(
-//         build_client(cx),
-//         root_path,
-//         true,
-//         Arc::new(RealFs),
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-
-//     let repo = git_init(&root_path.join("projects/project1"));
-//     git_add("a", &repo);
-//     git_commit("init", &repo);
-//     std::fs::write(root_path.join("projects/project1/a"), "aa").ok();
-
-//     cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-//         .await;
-
-//     tree.flush_fs_events(cx).await;
-
-//     cx.read(|cx| {
-//         let tree = tree.read(cx);
-//         let (work_dir, _) = tree.repositories().next().unwrap();
-//         assert_eq!(work_dir.as_ref(), Path::new("projects/project1"));
-//         assert_eq!(
-//             tree.status_for_file(Path::new("projects/project1/a")),
-//             Some(GitFileStatus::Modified)
-//         );
-//         assert_eq!(
-//             tree.status_for_file(Path::new("projects/project1/b")),
-//             Some(GitFileStatus::Added)
-//         );
-//     });
-
-//     std::fs::rename(
-//         root_path.join("projects/project1"),
-//         root_path.join("projects/project2"),
-//     )
-//     .ok();
-//     tree.flush_fs_events(cx).await;
-
-//     cx.read(|cx| {
-//         let tree = tree.read(cx);
-//         let (work_dir, _) = tree.repositories().next().unwrap();
-//         assert_eq!(work_dir.as_ref(), Path::new("projects/project2"));
-//         assert_eq!(
-//             tree.status_for_file(Path::new("projects/project2/a")),
-//             Some(GitFileStatus::Modified)
-//         );
-//         assert_eq!(
-//             tree.status_for_file(Path::new("projects/project2/b")),
-//             Some(GitFileStatus::Added)
-//         );
-//     });
-// }
-
-// #[gpui::test]
-// async fn test_git_repository_for_path(cx: &mut TestAppContext) {
-//     let root = temp_tree(json!({
-//         "c.txt": "",
-//         "dir1": {
-//             ".git": {},
-//             "deps": {
-//                 "dep1": {
-//                     ".git": {},
-//                     "src": {
-//                         "a.txt": ""
-//                     }
-//                 }
-//             },
-//             "src": {
-//                 "b.txt": ""
-//             }
-//         },
-//     }));
-
-//     let tree = Worktree::local(
-//         build_client(cx),
-//         root.path(),
-//         true,
-//         Arc::new(RealFs),
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-
-//     cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-//         .await;
-//     tree.flush_fs_events(cx).await;
-
-//     tree.read_with(cx, |tree, _cx| {
-//         let tree = tree.as_local().unwrap();
-
-//         assert!(tree.repository_for_path("c.txt".as_ref()).is_none());
-
-//         let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap();
-//         assert_eq!(
-//             entry
-//                 .work_directory(tree)
-//                 .map(|directory| directory.as_ref().to_owned()),
-//             Some(Path::new("dir1").to_owned())
-//         );
-
-//         let entry = tree
-//             .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref())
-//             .unwrap();
-//         assert_eq!(
-//             entry
-//                 .work_directory(tree)
-//                 .map(|directory| directory.as_ref().to_owned()),
-//             Some(Path::new("dir1/deps/dep1").to_owned())
-//         );
-
-//         let entries = tree.files(false, 0);
-
-//         let paths_with_repos = tree
-//             .entries_with_repositories(entries)
-//             .map(|(entry, repo)| {
-//                 (
-//                     entry.path.as_ref(),
-//                     repo.and_then(|repo| {
-//                         repo.work_directory(&tree)
-//                             .map(|work_directory| work_directory.0.to_path_buf())
-//                     }),
-//                 )
-//             })
-//             .collect::<Vec<_>>();
-
-//         assert_eq!(
-//             paths_with_repos,
-//             &[
-//                 (Path::new("c.txt"), None),
-//                 (
-//                     Path::new("dir1/deps/dep1/src/a.txt"),
-//                     Some(Path::new("dir1/deps/dep1").into())
-//                 ),
-//                 (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())),
-//             ]
-//         );
-//     });
-
-//     let repo_update_events = Arc::new(Mutex::new(vec![]));
-//     tree.update(cx, |_, cx| {
-//         let repo_update_events = repo_update_events.clone();
-//         cx.subscribe(&tree, move |_, _, event, _| {
-//             if let Event::UpdatedGitRepositories(update) = event {
-//                 repo_update_events.lock().push(update.clone());
-//             }
-//         })
-//         .detach();
-//     });
-
-//     std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
-//     tree.flush_fs_events(cx).await;
-
-//     assert_eq!(
-//         repo_update_events.lock()[0]
-//             .iter()
-//             .map(|e| e.0.clone())
-//             .collect::<Vec<Arc<Path>>>(),
-//         vec![Path::new("dir1").into()]
-//     );
-
-//     std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
-//     tree.flush_fs_events(cx).await;
-
-//     tree.read_with(cx, |tree, _cx| {
-//         let tree = tree.as_local().unwrap();
-
-//         assert!(tree
-//             .repository_for_path("dir1/src/b.txt".as_ref())
-//             .is_none());
-//     });
-// }
-
-// #[gpui::test]
-// async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
-//     const IGNORE_RULE: &'static str = "**/target";
-
-//     let root = temp_tree(json!({
-//         "project": {
-//             "a.txt": "a",
-//             "b.txt": "bb",
-//             "c": {
-//                 "d": {
-//                     "e.txt": "eee"
-//                 }
-//             },
-//             "f.txt": "ffff",
-//             "target": {
-//                 "build_file": "???"
-//             },
-//             ".gitignore": IGNORE_RULE
-//         },
-
-//     }));
-
-//     const A_TXT: &'static str = "a.txt";
-//     const B_TXT: &'static str = "b.txt";
-//     const E_TXT: &'static str = "c/d/e.txt";
-//     const F_TXT: &'static str = "f.txt";
-//     const DOTGITIGNORE: &'static str = ".gitignore";
-//     const BUILD_FILE: &'static str = "target/build_file";
-//     let project_path = Path::new("project");
-
-//     // Set up git repository before creating the worktree.
-//     let work_dir = root.path().join("project");
-//     let mut repo = git_init(work_dir.as_path());
-//     repo.add_ignore_rule(IGNORE_RULE).unwrap();
-//     git_add(A_TXT, &repo);
-//     git_add(E_TXT, &repo);
-//     git_add(DOTGITIGNORE, &repo);
-//     git_commit("Initial commit", &repo);
-
-//     let tree = Worktree::local(
-//         build_client(cx),
-//         root.path(),
-//         true,
-//         Arc::new(RealFs),
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-
-//     tree.flush_fs_events(cx).await;
-//     cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-//         .await;
-//     deterministic.run_until_parked();
-
-//     // Check that the right git state is observed on startup
-//     tree.read_with(cx, |tree, _cx| {
-//         let snapshot = tree.snapshot();
-//         assert_eq!(snapshot.repositories().count(), 1);
-//         let (dir, _) = snapshot.repositories().next().unwrap();
-//         assert_eq!(dir.as_ref(), Path::new("project"));
-
-//         assert_eq!(
-//             snapshot.status_for_file(project_path.join(B_TXT)),
-//             Some(GitFileStatus::Added)
-//         );
-//         assert_eq!(
-//             snapshot.status_for_file(project_path.join(F_TXT)),
-//             Some(GitFileStatus::Added)
-//         );
-//     });
-
-//     // Modify a file in the working copy.
-//     std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
-//     tree.flush_fs_events(cx).await;
-//     deterministic.run_until_parked();
-
-//     // The worktree detects that the file's git status has changed.
-//     tree.read_with(cx, |tree, _cx| {
-//         let snapshot = tree.snapshot();
-//         assert_eq!(
-//             snapshot.status_for_file(project_path.join(A_TXT)),
-//             Some(GitFileStatus::Modified)
-//         );
-//     });
-
-//     // Create a commit in the git repository.
-//     git_add(A_TXT, &repo);
-//     git_add(B_TXT, &repo);
-//     git_commit("Committing modified and added", &repo);
-//     tree.flush_fs_events(cx).await;
-//     deterministic.run_until_parked();
-
-//     // The worktree detects that the files' git status have changed.
-//     tree.read_with(cx, |tree, _cx| {
-//         let snapshot = tree.snapshot();
-//         assert_eq!(
-//             snapshot.status_for_file(project_path.join(F_TXT)),
-//             Some(GitFileStatus::Added)
-//         );
-//         assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None);
-//         assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
-//     });
-
-//     // Modify files in the working copy and perform git operations on other files.
-//     git_reset(0, &repo);
-//     git_remove_index(Path::new(B_TXT), &repo);
-//     git_stash(&mut repo);
-//     std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
-//     std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
-//     tree.flush_fs_events(cx).await;
-//     deterministic.run_until_parked();
-
-//     // Check that more complex repo changes are tracked
-//     tree.read_with(cx, |tree, _cx| {
-//         let snapshot = tree.snapshot();
-
-//         assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
-//         assert_eq!(
-//             snapshot.status_for_file(project_path.join(B_TXT)),
-//             Some(GitFileStatus::Added)
-//         );
-//         assert_eq!(
-//             snapshot.status_for_file(project_path.join(E_TXT)),
-//             Some(GitFileStatus::Modified)
-//         );
-//     });
-
-//     std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
-//     std::fs::remove_dir_all(work_dir.join("c")).unwrap();
-//     std::fs::write(
-//         work_dir.join(DOTGITIGNORE),
-//         [IGNORE_RULE, "f.txt"].join("\n"),
-//     )
-//     .unwrap();
-
-//     git_add(Path::new(DOTGITIGNORE), &repo);
-//     git_commit("Committing modified git ignore", &repo);
-
-//     tree.flush_fs_events(cx).await;
-//     deterministic.run_until_parked();
-
-//     let mut renamed_dir_name = "first_directory/second_directory";
-//     const RENAMED_FILE: &'static str = "rf.txt";
-
-//     std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
-//     std::fs::write(
-//         work_dir.join(renamed_dir_name).join(RENAMED_FILE),
-//         "new-contents",
-//     )
-//     .unwrap();
-
-//     tree.flush_fs_events(cx).await;
-//     deterministic.run_until_parked();
-
-//     tree.read_with(cx, |tree, _cx| {
-//         let snapshot = tree.snapshot();
-//         assert_eq!(
-//             snapshot.status_for_file(&project_path.join(renamed_dir_name).join(RENAMED_FILE)),
-//             Some(GitFileStatus::Added)
-//         );
-//     });
-
-//     renamed_dir_name = "new_first_directory/second_directory";
-
-//     std::fs::rename(
-//         work_dir.join("first_directory"),
-//         work_dir.join("new_first_directory"),
-//     )
-//     .unwrap();
-
-//     tree.flush_fs_events(cx).await;
-//     deterministic.run_until_parked();
-
-//     tree.read_with(cx, |tree, _cx| {
-//         let snapshot = tree.snapshot();
-
-//         assert_eq!(
-//             snapshot.status_for_file(
-//                 project_path
-//                     .join(Path::new(renamed_dir_name))
-//                     .join(RENAMED_FILE)
-//             ),
-//             Some(GitFileStatus::Added)
-//         );
-//     });
-// }
-
-// #[gpui::test]
-// async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
-//     let fs = FakeFs::new(cx.background());
-//     fs.insert_tree(
-//         "/root",
-//         json!({
-//             ".git": {},
-//             "a": {
-//                 "b": {
-//                     "c1.txt": "",
-//                     "c2.txt": "",
-//                 },
-//                 "d": {
-//                     "e1.txt": "",
-//                     "e2.txt": "",
-//                     "e3.txt": "",
-//                 }
-//             },
-//             "f": {
-//                 "no-status.txt": ""
-//             },
-//             "g": {
-//                 "h1.txt": "",
-//                 "h2.txt": ""
-//             },
-
-//         }),
-//     )
-//     .await;
-
-//     fs.set_status_for_repo_via_git_operation(
-//         &Path::new("/root/.git"),
-//         &[
-//             (Path::new("a/b/c1.txt"), GitFileStatus::Added),
-//             (Path::new("a/d/e2.txt"), GitFileStatus::Modified),
-//             (Path::new("g/h2.txt"), GitFileStatus::Conflict),
-//         ],
-//     );
-
-//     let tree = Worktree::local(
-//         build_client(cx),
-//         Path::new("/root"),
-//         true,
-//         fs.clone(),
-//         Default::default(),
-//         &mut cx.to_async(),
-//     )
-//     .await
-//     .unwrap();
-
-//     cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-//         .await;
-
-//     cx.foreground().run_until_parked();
-//     let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
-
-//     check_propagated_statuses(
-//         &snapshot,
-//         &[
-//             (Path::new(""), Some(GitFileStatus::Conflict)),
-//             (Path::new("a"), Some(GitFileStatus::Modified)),
-//             (Path::new("a/b"), Some(GitFileStatus::Added)),
-//             (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
-//             (Path::new("a/b/c2.txt"), None),
-//             (Path::new("a/d"), Some(GitFileStatus::Modified)),
-//             (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
-//             (Path::new("f"), None),
-//             (Path::new("f/no-status.txt"), None),
-//             (Path::new("g"), Some(GitFileStatus::Conflict)),
-//             (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)),
-//         ],
-//     );
-
-//     check_propagated_statuses(
-//         &snapshot,
-//         &[
-//             (Path::new("a/b"), Some(GitFileStatus::Added)),
-//             (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
-//             (Path::new("a/b/c2.txt"), None),
-//             (Path::new("a/d"), Some(GitFileStatus::Modified)),
-//             (Path::new("a/d/e1.txt"), None),
-//             (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
-//             (Path::new("f"), None),
-//             (Path::new("f/no-status.txt"), None),
-//             (Path::new("g"), Some(GitFileStatus::Conflict)),
-//         ],
-//     );
-
-//     check_propagated_statuses(
-//         &snapshot,
-//         &[
-//             (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
-//             (Path::new("a/b/c2.txt"), None),
-//             (Path::new("a/d/e1.txt"), None),
-//             (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
-//             (Path::new("f/no-status.txt"), None),
-//         ],
-//     );
-
-//     #[track_caller]
-//     fn check_propagated_statuses(
-//         snapshot: &Snapshot,
-//         expected_statuses: &[(&Path, Option<GitFileStatus>)],
-//     ) {
-//         let mut entries = expected_statuses
-//             .iter()
-//             .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone())
-//             .collect::<Vec<_>>();
-//         snapshot.propagate_git_statuses(&mut entries);
-//         assert_eq!(
-//             entries
-//                 .iter()
-//                 .map(|e| (e.path.as_ref(), e.git_status))
-//                 .collect::<Vec<_>>(),
-//             expected_statuses
-//         );
-//     }
-// }
-
-// fn build_client(cx: &mut TestAppContext) -> Arc<Client> {
-//     let http_client = FakeHttpClient::with_404_response();
-//     cx.read(|cx| Client::new(http_client, cx))
-// }
-
-// #[track_caller]
-// fn git_init(path: &Path) -> git2::Repository {
-//     git2::Repository::init(path).expect("Failed to initialize git repository")
-// }
-
-// #[track_caller]
-// fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
-//     let path = path.as_ref();
-//     let mut index = repo.index().expect("Failed to get index");
-//     index.add_path(path).expect("Failed to add a.txt");
-//     index.write().expect("Failed to write index");
-// }
-
-// #[track_caller]
-// fn git_remove_index(path: &Path, repo: &git2::Repository) {
-//     let mut index = repo.index().expect("Failed to get index");
-//     index.remove_path(path).expect("Failed to add a.txt");
-//     index.write().expect("Failed to write index");
-// }
-
-// #[track_caller]
-// fn git_commit(msg: &'static str, repo: &git2::Repository) {
-//     use git2::Signature;
-
-//     let signature = Signature::now("test", "test@zed.dev").unwrap();
-//     let oid = repo.index().unwrap().write_tree().unwrap();
-//     let tree = repo.find_tree(oid).unwrap();
-//     if let Some(head) = repo.head().ok() {
-//         let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
-
-//         let parent_commit = parent_obj.as_commit().unwrap();
-
-//         repo.commit(
-//             Some("HEAD"),
-//             &signature,
-//             &signature,
-//             msg,
-//             &tree,
-//             &[parent_commit],
-//         )
-//         .expect("Failed to commit with parent");
-//     } else {
-//         repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
-//             .expect("Failed to commit");
-//     }
-// }
-
-// #[track_caller]
-// fn git_stash(repo: &mut git2::Repository) {
-//     use git2::Signature;
-
-//     let signature = Signature::now("test", "test@zed.dev").unwrap();
-//     repo.stash_save(&signature, "N/A", None)
-//         .expect("Failed to stash");
-// }
-
-// #[track_caller]
-// fn git_reset(offset: usize, repo: &git2::Repository) {
-//     let head = repo.head().expect("Couldn't get repo head");
-//     let object = head.peel(git2::ObjectType::Commit).unwrap();
-//     let commit = object.as_commit().unwrap();
-//     let new_head = commit
-//         .parents()
-//         .inspect(|parnet| {
-//             parnet.message();
-//         })
-//         .skip(offset)
-//         .next()
-//         .expect("Not enough history");
-//     repo.reset(&new_head.as_object(), git2::ResetType::Soft, None)
-//         .expect("Could not reset");
-// }
-
-// #[allow(dead_code)]
-// #[track_caller]
-// fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
-//     repo.statuses(None)
-//         .unwrap()
-//         .iter()
-//         .map(|status| (status.path().unwrap().to_string(), status.status()))
-//         .collect()
-// }
+use crate::{
+    project_settings::ProjectSettings,
+    worktree::{Event, Snapshot, WorktreeModelHandle},
+    Entry, EntryKind, PathChange, Project, Worktree,
+};
+use anyhow::Result;
+use client::Client;
+use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions};
+use git::GITIGNORE;
+use gpui::{ModelContext, Task, TestAppContext};
+use parking_lot::Mutex;
+use postage::stream::Stream;
+use pretty_assertions::assert_eq;
+use rand::prelude::*;
+use serde_json::json;
+use settings::SettingsStore;
+use std::{
+    env,
+    fmt::Write,
+    mem,
+    path::{Path, PathBuf},
+    sync::Arc,
+};
+use util::{http::FakeHttpClient, test::temp_tree, ResultExt};
+
+#[gpui::test]
+async fn test_traversal(cx: &mut TestAppContext) {
+    init_test(cx);
+    let fs = FakeFs::new(cx.background_executor.clone());
+    fs.insert_tree(
+        "/root",
+        json!({
+           ".gitignore": "a/b\n",
+           "a": {
+               "b": "",
+               "c": "",
+           }
+        }),
+    )
+    .await;
+
+    let tree = Worktree::local(
+        build_client(cx),
+        Path::new("/root"),
+        true,
+        fs,
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+
+    tree.read_with(cx, |tree, _| {
+        assert_eq!(
+            tree.entries(false)
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            vec![
+                Path::new(""),
+                Path::new(".gitignore"),
+                Path::new("a"),
+                Path::new("a/c"),
+            ]
+        );
+        assert_eq!(
+            tree.entries(true)
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            vec![
+                Path::new(""),
+                Path::new(".gitignore"),
+                Path::new("a"),
+                Path::new("a/b"),
+                Path::new("a/c"),
+            ]
+        );
+    })
+}
+
+#[gpui::test]
+async fn test_descendent_entries(cx: &mut TestAppContext) {
+    init_test(cx);
+    let fs = FakeFs::new(cx.background_executor.clone());
+    fs.insert_tree(
+        "/root",
+        json!({
+            "a": "",
+            "b": {
+               "c": {
+                   "d": ""
+               },
+               "e": {}
+            },
+            "f": "",
+            "g": {
+                "h": {}
+            },
+            "i": {
+                "j": {
+                    "k": ""
+                },
+                "l": {
+
+                }
+            },
+            ".gitignore": "i/j\n",
+        }),
+    )
+    .await;
+
+    let tree = Worktree::local(
+        build_client(cx),
+        Path::new("/root"),
+        true,
+        fs,
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+
+    tree.read_with(cx, |tree, _| {
+        assert_eq!(
+            tree.descendent_entries(false, false, Path::new("b"))
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            vec![Path::new("b/c/d"),]
+        );
+        assert_eq!(
+            tree.descendent_entries(true, false, Path::new("b"))
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            vec![
+                Path::new("b"),
+                Path::new("b/c"),
+                Path::new("b/c/d"),
+                Path::new("b/e"),
+            ]
+        );
+
+        assert_eq!(
+            tree.descendent_entries(false, false, Path::new("g"))
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            Vec::<PathBuf>::new()
+        );
+        assert_eq!(
+            tree.descendent_entries(true, false, Path::new("g"))
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            vec![Path::new("g"), Path::new("g/h"),]
+        );
+    });
+
+    // Expand gitignored directory.
+    tree.read_with(cx, |tree, _| {
+        tree.as_local()
+            .unwrap()
+            .refresh_entries_for_paths(vec![Path::new("i/j").into()])
+    })
+    .recv()
+    .await;
+
+    tree.read_with(cx, |tree, _| {
+        assert_eq!(
+            tree.descendent_entries(false, false, Path::new("i"))
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            Vec::<PathBuf>::new()
+        );
+        assert_eq!(
+            tree.descendent_entries(false, true, Path::new("i"))
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            vec![Path::new("i/j/k")]
+        );
+        assert_eq!(
+            tree.descendent_entries(true, false, Path::new("i"))
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            vec![Path::new("i"), Path::new("i/l"),]
+        );
+    })
+}
+
+#[gpui::test(iterations = 10)]
+async fn test_circular_symlinks(cx: &mut TestAppContext) {
+    init_test(cx);
+    let fs = FakeFs::new(cx.background_executor.clone());
+    fs.insert_tree(
+        "/root",
+        json!({
+            "lib": {
+                "a": {
+                    "a.txt": ""
+                },
+                "b": {
+                    "b.txt": ""
+                }
+            }
+        }),
+    )
+    .await;
+    fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
+    fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
+
+    let tree = Worktree::local(
+        build_client(cx),
+        Path::new("/root"),
+        true,
+        fs.clone(),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+
+    tree.read_with(cx, |tree, _| {
+        assert_eq!(
+            tree.entries(false)
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            vec![
+                Path::new(""),
+                Path::new("lib"),
+                Path::new("lib/a"),
+                Path::new("lib/a/a.txt"),
+                Path::new("lib/a/lib"),
+                Path::new("lib/b"),
+                Path::new("lib/b/b.txt"),
+                Path::new("lib/b/lib"),
+            ]
+        );
+    });
+
+    fs.rename(
+        Path::new("/root/lib/a/lib"),
+        Path::new("/root/lib/a/lib-2"),
+        Default::default(),
+    )
+    .await
+    .unwrap();
+    cx.executor().run_until_parked();
+    tree.read_with(cx, |tree, _| {
+        assert_eq!(
+            tree.entries(false)
+                .map(|entry| entry.path.as_ref())
+                .collect::<Vec<_>>(),
+            vec![
+                Path::new(""),
+                Path::new("lib"),
+                Path::new("lib/a"),
+                Path::new("lib/a/a.txt"),
+                Path::new("lib/a/lib-2"),
+                Path::new("lib/b"),
+                Path::new("lib/b/b.txt"),
+                Path::new("lib/b/lib"),
+            ]
+        );
+    });
+}
+
+#[gpui::test]
+async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
+    init_test(cx);
+    let fs = FakeFs::new(cx.background_executor.clone());
+    fs.insert_tree(
+        "/root",
+        json!({
+            "dir1": {
+                "deps": {
+                    // symlinks here
+                },
+                "src": {
+                    "a.rs": "",
+                    "b.rs": "",
+                },
+            },
+            "dir2": {
+                "src": {
+                    "c.rs": "",
+                    "d.rs": "",
+                }
+            },
+            "dir3": {
+                "deps": {},
+                "src": {
+                    "e.rs": "",
+                    "f.rs": "",
+                },
+            }
+        }),
+    )
+    .await;
+
+    // These symlinks point to directories outside of the worktree's root, dir1.
+    fs.insert_symlink("/root/dir1/deps/dep-dir2", "../../dir2".into())
+        .await;
+    fs.insert_symlink("/root/dir1/deps/dep-dir3", "../../dir3".into())
+        .await;
+
+    let tree = Worktree::local(
+        build_client(cx),
+        Path::new("/root/dir1"),
+        true,
+        fs.clone(),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+
+    let tree_updates = Arc::new(Mutex::new(Vec::new()));
+    tree.update(cx, |_, cx| {
+        let tree_updates = tree_updates.clone();
+        cx.subscribe(&tree, move |_, _, event, _| {
+            if let Event::UpdatedEntries(update) = event {
+                tree_updates.lock().extend(
+                    update
+                        .iter()
+                        .map(|(path, _, change)| (path.clone(), *change)),
+                );
+            }
+        })
+        .detach();
+    });
+
+    // The symlinked directories are not scanned by default.
+    tree.read_with(cx, |tree, _| {
+        assert_eq!(
+            tree.entries(true)
+                .map(|entry| (entry.path.as_ref(), entry.is_external))
+                .collect::<Vec<_>>(),
+            vec![
+                (Path::new(""), false),
+                (Path::new("deps"), false),
+                (Path::new("deps/dep-dir2"), true),
+                (Path::new("deps/dep-dir3"), true),
+                (Path::new("src"), false),
+                (Path::new("src/a.rs"), false),
+                (Path::new("src/b.rs"), false),
+            ]
+        );
+
+        assert_eq!(
+            tree.entry_for_path("deps/dep-dir2").unwrap().kind,
+            EntryKind::UnloadedDir
+        );
+    });
+
+    // Expand one of the symlinked directories.
+    tree.read_with(cx, |tree, _| {
+        tree.as_local()
+            .unwrap()
+            .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3").into()])
+    })
+    .recv()
+    .await;
+
+    // The expanded directory's contents are loaded. Subdirectories are
+    // not scanned yet.
+    tree.read_with(cx, |tree, _| {
+        assert_eq!(
+            tree.entries(true)
+                .map(|entry| (entry.path.as_ref(), entry.is_external))
+                .collect::<Vec<_>>(),
+            vec![
+                (Path::new(""), false),
+                (Path::new("deps"), false),
+                (Path::new("deps/dep-dir2"), true),
+                (Path::new("deps/dep-dir3"), true),
+                (Path::new("deps/dep-dir3/deps"), true),
+                (Path::new("deps/dep-dir3/src"), true),
+                (Path::new("src"), false),
+                (Path::new("src/a.rs"), false),
+                (Path::new("src/b.rs"), false),
+            ]
+        );
+    });
+    assert_eq!(
+        mem::take(&mut *tree_updates.lock()),
+        &[
+            (Path::new("deps/dep-dir3").into(), PathChange::Loaded),
+            (Path::new("deps/dep-dir3/deps").into(), PathChange::Loaded),
+            (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded)
+        ]
+    );
+
+    // Expand a subdirectory of one of the symlinked directories.
+    tree.read_with(cx, |tree, _| {
+        tree.as_local()
+            .unwrap()
+            .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3/src").into()])
+    })
+    .recv()
+    .await;
+
+    // The expanded subdirectory's contents are loaded.
+    tree.read_with(cx, |tree, _| {
+        assert_eq!(
+            tree.entries(true)
+                .map(|entry| (entry.path.as_ref(), entry.is_external))
+                .collect::<Vec<_>>(),
+            vec![
+                (Path::new(""), false),
+                (Path::new("deps"), false),
+                (Path::new("deps/dep-dir2"), true),
+                (Path::new("deps/dep-dir3"), true),
+                (Path::new("deps/dep-dir3/deps"), true),
+                (Path::new("deps/dep-dir3/src"), true),
+                (Path::new("deps/dep-dir3/src/e.rs"), true),
+                (Path::new("deps/dep-dir3/src/f.rs"), true),
+                (Path::new("src"), false),
+                (Path::new("src/a.rs"), false),
+                (Path::new("src/b.rs"), false),
+            ]
+        );
+    });
+
+    assert_eq!(
+        mem::take(&mut *tree_updates.lock()),
+        &[
+            (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded),
+            (
+                Path::new("deps/dep-dir3/src/e.rs").into(),
+                PathChange::Loaded
+            ),
+            (
+                Path::new("deps/dep-dir3/src/f.rs").into(),
+                PathChange::Loaded
+            )
+        ]
+    );
+}
+
+#[gpui::test]
+async fn test_open_gitignored_files(cx: &mut TestAppContext) {
+    init_test(cx);
+    let fs = FakeFs::new(cx.background_executor.clone());
+    fs.insert_tree(
+        "/root",
+        json!({
+            ".gitignore": "node_modules\n",
+            "one": {
+                "node_modules": {
+                    "a": {
+                        "a1.js": "a1",
+                        "a2.js": "a2",
+                    },
+                    "b": {
+                        "b1.js": "b1",
+                        "b2.js": "b2",
+                    },
+                    "c": {
+                        "c1.js": "c1",
+                        "c2.js": "c2",
+                    }
+                },
+            },
+            "two": {
+                "x.js": "",
+                "y.js": "",
+            },
+        }),
+    )
+    .await;
+
+    let tree = Worktree::local(
+        build_client(cx),
+        Path::new("/root"),
+        true,
+        fs.clone(),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+
+    tree.read_with(cx, |tree, _| {
+        assert_eq!(
+            tree.entries(true)
+                .map(|entry| (entry.path.as_ref(), entry.is_ignored))
+                .collect::<Vec<_>>(),
+            vec![
+                (Path::new(""), false),
+                (Path::new(".gitignore"), false),
+                (Path::new("one"), false),
+                (Path::new("one/node_modules"), true),
+                (Path::new("two"), false),
+                (Path::new("two/x.js"), false),
+                (Path::new("two/y.js"), false),
+            ]
+        );
+    });
+
+    // Open a file that is nested inside of a gitignored directory that
+    // has not yet been expanded.
+    let prev_read_dir_count = fs.read_dir_call_count();
+    let buffer = tree
+        .update(cx, |tree, cx| {
+            tree.as_local_mut()
+                .unwrap()
+                .load_buffer(0, "one/node_modules/b/b1.js".as_ref(), cx)
+        })
+        .await
+        .unwrap();
+
+    tree.read_with(cx, |tree, cx| {
+        assert_eq!(
+            tree.entries(true)
+                .map(|entry| (entry.path.as_ref(), entry.is_ignored))
+                .collect::<Vec<_>>(),
+            vec![
+                (Path::new(""), false),
+                (Path::new(".gitignore"), false),
+                (Path::new("one"), false),
+                (Path::new("one/node_modules"), true),
+                (Path::new("one/node_modules/a"), true),
+                (Path::new("one/node_modules/b"), true),
+                (Path::new("one/node_modules/b/b1.js"), true),
+                (Path::new("one/node_modules/b/b2.js"), true),
+                (Path::new("one/node_modules/c"), true),
+                (Path::new("two"), false),
+                (Path::new("two/x.js"), false),
+                (Path::new("two/y.js"), false),
+            ]
+        );
+
+        assert_eq!(
+            buffer.read(cx).file().unwrap().path().as_ref(),
+            Path::new("one/node_modules/b/b1.js")
+        );
+
+        // Only the newly-expanded directories are scanned.
+        assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
+    });
+
+    // Open another file in a different subdirectory of the same
+    // gitignored directory.
+    let prev_read_dir_count = fs.read_dir_call_count();
+    let buffer = tree
+        .update(cx, |tree, cx| {
+            tree.as_local_mut()
+                .unwrap()
+                .load_buffer(0, "one/node_modules/a/a2.js".as_ref(), cx)
+        })
+        .await
+        .unwrap();
+
+    tree.read_with(cx, |tree, cx| {
+        assert_eq!(
+            tree.entries(true)
+                .map(|entry| (entry.path.as_ref(), entry.is_ignored))
+                .collect::<Vec<_>>(),
+            vec![
+                (Path::new(""), false),
+                (Path::new(".gitignore"), false),
+                (Path::new("one"), false),
+                (Path::new("one/node_modules"), true),
+                (Path::new("one/node_modules/a"), true),
+                (Path::new("one/node_modules/a/a1.js"), true),
+                (Path::new("one/node_modules/a/a2.js"), true),
+                (Path::new("one/node_modules/b"), true),
+                (Path::new("one/node_modules/b/b1.js"), true),
+                (Path::new("one/node_modules/b/b2.js"), true),
+                (Path::new("one/node_modules/c"), true),
+                (Path::new("two"), false),
+                (Path::new("two/x.js"), false),
+                (Path::new("two/y.js"), false),
+            ]
+        );
+
+        assert_eq!(
+            buffer.read(cx).file().unwrap().path().as_ref(),
+            Path::new("one/node_modules/a/a2.js")
+        );
+
+        // Only the newly-expanded directory is scanned.
+        assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
+    });
+
+    // No work happens when files and directories change within an unloaded directory.
+    let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count();
+    fs.create_dir("/root/one/node_modules/c/lib".as_ref())
+        .await
+        .unwrap();
+    cx.executor().run_until_parked();
+    assert_eq!(
+        fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count,
+        0
+    );
+}
+
+#[gpui::test]
+async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
+    init_test(cx);
+    let fs = FakeFs::new(cx.background_executor.clone());
+    fs.insert_tree(
+        "/root",
+        json!({
+            ".gitignore": "node_modules\n",
+            "a": {
+                "a.js": "",
+            },
+            "b": {
+                "b.js": "",
+            },
+            "node_modules": {
+                "c": {
+                    "c.js": "",
+                },
+                "d": {
+                    "d.js": "",
+                    "e": {
+                        "e1.js": "",
+                        "e2.js": "",
+                    },
+                    "f": {
+                        "f1.js": "",
+                        "f2.js": "",
+                    }
+                },
+            },
+        }),
+    )
+    .await;
+
+    let tree = Worktree::local(
+        build_client(cx),
+        Path::new("/root"),
+        true,
+        fs.clone(),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+
+    // Open a file within the gitignored directory, forcing some of its
+    // subdirectories to be read, but not all.
+    let read_dir_count_1 = fs.read_dir_call_count();
+    tree.read_with(cx, |tree, _| {
+        tree.as_local()
+            .unwrap()
+            .refresh_entries_for_paths(vec![Path::new("node_modules/d/d.js").into()])
+    })
+    .recv()
+    .await;
+
+    // Those subdirectories are now loaded.
+    tree.read_with(cx, |tree, _| {
+        assert_eq!(
+            tree.entries(true)
+                .map(|e| (e.path.as_ref(), e.is_ignored))
+                .collect::<Vec<_>>(),
+            &[
+                (Path::new(""), false),
+                (Path::new(".gitignore"), false),
+                (Path::new("a"), false),
+                (Path::new("a/a.js"), false),
+                (Path::new("b"), false),
+                (Path::new("b/b.js"), false),
+                (Path::new("node_modules"), true),
+                (Path::new("node_modules/c"), true),
+                (Path::new("node_modules/d"), true),
+                (Path::new("node_modules/d/d.js"), true),
+                (Path::new("node_modules/d/e"), true),
+                (Path::new("node_modules/d/f"), true),
+            ]
+        );
+    });
+    let read_dir_count_2 = fs.read_dir_call_count();
+    assert_eq!(read_dir_count_2 - read_dir_count_1, 2);
+
+    // Update the gitignore so that node_modules is no longer ignored,
+    // but a subdirectory is ignored
+    fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
+        .await
+        .unwrap();
+    cx.executor().run_until_parked();
+
+    // All of the directories that are no longer ignored are now loaded.
+    tree.read_with(cx, |tree, _| {
+        assert_eq!(
+            tree.entries(true)
+                .map(|e| (e.path.as_ref(), e.is_ignored))
+                .collect::<Vec<_>>(),
+            &[
+                (Path::new(""), false),
+                (Path::new(".gitignore"), false),
+                (Path::new("a"), false),
+                (Path::new("a/a.js"), false),
+                (Path::new("b"), false),
+                (Path::new("b/b.js"), false),
+                // This directory is no longer ignored
+                (Path::new("node_modules"), false),
+                (Path::new("node_modules/c"), false),
+                (Path::new("node_modules/c/c.js"), false),
+                (Path::new("node_modules/d"), false),
+                (Path::new("node_modules/d/d.js"), false),
+                // This subdirectory is now ignored
+                (Path::new("node_modules/d/e"), true),
+                (Path::new("node_modules/d/f"), false),
+                (Path::new("node_modules/d/f/f1.js"), false),
+                (Path::new("node_modules/d/f/f2.js"), false),
+            ]
+        );
+    });
+
+    // Each of the newly-loaded directories is scanned only once.
+    let read_dir_count_3 = fs.read_dir_call_count();
+    assert_eq!(read_dir_count_3 - read_dir_count_2, 2);
+}
+
+#[gpui::test(iterations = 10)]
+async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
+    init_test(cx);
+    cx.update(|cx| {
+        cx.update_global::<SettingsStore, _>(|store, cx| {
+            store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+                project_settings.file_scan_exclusions = Some(Vec::new());
+            });
+        });
+    });
+    let fs = FakeFs::new(cx.background_executor.clone());
+    fs.insert_tree(
+        "/root",
+        json!({
+            ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
+            "tree": {
+                ".git": {},
+                ".gitignore": "ignored-dir\n",
+                "tracked-dir": {
+                    "tracked-file1": "",
+                    "ancestor-ignored-file1": "",
+                },
+                "ignored-dir": {
+                    "ignored-file1": ""
+                }
+            }
+        }),
+    )
+    .await;
+
+    let tree = Worktree::local(
+        build_client(cx),
+        "/root/tree".as_ref(),
+        true,
+        fs.clone(),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+
+    tree.read_with(cx, |tree, _| {
+        tree.as_local()
+            .unwrap()
+            .refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
+    })
+    .recv()
+    .await;
+
+    cx.read(|cx| {
+        let tree = tree.read(cx);
+        assert!(
+            !tree
+                .entry_for_path("tracked-dir/tracked-file1")
+                .unwrap()
+                .is_ignored
+        );
+        assert!(
+            tree.entry_for_path("tracked-dir/ancestor-ignored-file1")
+                .unwrap()
+                .is_ignored
+        );
+        assert!(
+            tree.entry_for_path("ignored-dir/ignored-file1")
+                .unwrap()
+                .is_ignored
+        );
+    });
+
+    fs.create_file(
+        "/root/tree/tracked-dir/tracked-file2".as_ref(),
+        Default::default(),
+    )
+    .await
+    .unwrap();
+    fs.create_file(
+        "/root/tree/tracked-dir/ancestor-ignored-file2".as_ref(),
+        Default::default(),
+    )
+    .await
+    .unwrap();
+    fs.create_file(
+        "/root/tree/ignored-dir/ignored-file2".as_ref(),
+        Default::default(),
+    )
+    .await
+    .unwrap();
+
+    cx.executor().run_until_parked();
+    cx.read(|cx| {
+        let tree = tree.read(cx);
+        assert!(
+            !tree
+                .entry_for_path("tracked-dir/tracked-file2")
+                .unwrap()
+                .is_ignored
+        );
+        assert!(
+            tree.entry_for_path("tracked-dir/ancestor-ignored-file2")
+                .unwrap()
+                .is_ignored
+        );
+        assert!(
+            tree.entry_for_path("ignored-dir/ignored-file2")
+                .unwrap()
+                .is_ignored
+        );
+        assert!(tree.entry_for_path(".git").unwrap().is_ignored);
+    });
+}
+
+#[gpui::test]
+async fn test_write_file(cx: &mut TestAppContext) {
+    init_test(cx);
+    cx.executor().allow_parking();
+    let dir = temp_tree(json!({
+        ".git": {},
+        ".gitignore": "ignored-dir\n",
+        "tracked-dir": {},
+        "ignored-dir": {}
+    }));
+
+    let tree = Worktree::local(
+        build_client(cx),
+        dir.path(),
+        true,
+        Arc::new(RealFs),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+    tree.flush_fs_events(cx).await;
+
+    tree.update(cx, |tree, cx| {
+        tree.as_local().unwrap().write_file(
+            Path::new("tracked-dir/file.txt"),
+            "hello".into(),
+            Default::default(),
+            cx,
+        )
+    })
+    .await
+    .unwrap();
+    tree.update(cx, |tree, cx| {
+        tree.as_local().unwrap().write_file(
+            Path::new("ignored-dir/file.txt"),
+            "world".into(),
+            Default::default(),
+            cx,
+        )
+    })
+    .await
+    .unwrap();
+
+    tree.read_with(cx, |tree, _| {
+        let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
+        let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
+        assert!(!tracked.is_ignored);
+        assert!(ignored.is_ignored);
+    });
+}
+
+#[gpui::test]
+async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
+    init_test(cx);
+    cx.executor().allow_parking();
+    let dir = temp_tree(json!({
+        ".gitignore": "**/target\n/node_modules\n",
+        "target": {
+            "index": "blah2"
+        },
+        "node_modules": {
+            ".DS_Store": "",
+            "prettier": {
+                "package.json": "{}",
+            },
+        },
+        "src": {
+            ".DS_Store": "",
+            "foo": {
+                "foo.rs": "mod another;\n",
+                "another.rs": "// another",
+            },
+            "bar": {
+                "bar.rs": "// bar",
+            },
+            "lib.rs": "mod foo;\nmod bar;\n",
+        },
+        ".DS_Store": "",
+    }));
+    cx.update(|cx| {
+        cx.update_global::<SettingsStore, _>(|store, cx| {
+            store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+                project_settings.file_scan_exclusions =
+                    Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]);
+            });
+        });
+    });
+
+    let tree = Worktree::local(
+        build_client(cx),
+        dir.path(),
+        true,
+        Arc::new(RealFs),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+    tree.flush_fs_events(cx).await;
+    tree.read_with(cx, |tree, _| {
+        check_worktree_entries(
+            tree,
+            &[
+                "src/foo/foo.rs",
+                "src/foo/another.rs",
+                "node_modules/.DS_Store",
+                "src/.DS_Store",
+                ".DS_Store",
+            ],
+            &["target", "node_modules"],
+            &["src/lib.rs", "src/bar/bar.rs", ".gitignore"],
+        )
+    });
+
+    cx.update(|cx| {
+        cx.update_global::<SettingsStore, _>(|store, cx| {
+            store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+                project_settings.file_scan_exclusions =
+                    Some(vec!["**/node_modules/**".to_string()]);
+            });
+        });
+    });
+    tree.flush_fs_events(cx).await;
+    cx.executor().run_until_parked();
+    tree.read_with(cx, |tree, _| {
+        check_worktree_entries(
+            tree,
+            &[
+                "node_modules/prettier/package.json",
+                "node_modules/.DS_Store",
+                "node_modules",
+            ],
+            &["target"],
+            &[
+                ".gitignore",
+                "src/lib.rs",
+                "src/bar/bar.rs",
+                "src/foo/foo.rs",
+                "src/foo/another.rs",
+                "src/.DS_Store",
+                ".DS_Store",
+            ],
+        )
+    });
+}
+
+#[gpui::test(iterations = 30)]
+async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
+    init_test(cx);
+    let fs = FakeFs::new(cx.background_executor.clone());
+    fs.insert_tree(
+        "/root",
+        json!({
+            "b": {},
+            "c": {},
+            "d": {},
+        }),
+    )
+    .await;
+
+    let tree = Worktree::local(
+        build_client(cx),
+        "/root".as_ref(),
+        true,
+        fs,
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    let snapshot1 = tree.update(cx, |tree, cx| {
+        let tree = tree.as_local_mut().unwrap();
+        let snapshot = Arc::new(Mutex::new(tree.snapshot()));
+        let _ = tree.observe_updates(0, cx, {
+            let snapshot = snapshot.clone();
+            move |update| {
+                snapshot.lock().apply_remote_update(update).unwrap();
+                async { true }
+            }
+        });
+        snapshot
+    });
+
+    let entry = tree
+        .update(cx, |tree, cx| {
+            tree.as_local_mut()
+                .unwrap()
+                .create_entry("a/e".as_ref(), true, cx)
+        })
+        .await
+        .unwrap();
+    assert!(entry.is_dir());
+
+    cx.executor().run_until_parked();
+    tree.read_with(cx, |tree, _| {
+        assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
+    });
+
+    let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
+    assert_eq!(
+        snapshot1.lock().entries(true).collect::<Vec<_>>(),
+        snapshot2.entries(true).collect::<Vec<_>>()
+    );
+}
+
+#[gpui::test]
+async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
+    init_test(cx);
+    cx.executor().allow_parking();
+    let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
+
+    let fs_fake = FakeFs::new(cx.background_executor.clone());
+    fs_fake
+        .insert_tree(
+            "/root",
+            json!({
+                "a": {},
+            }),
+        )
+        .await;
+
+    let tree_fake = Worktree::local(
+        client_fake,
+        "/root".as_ref(),
+        true,
+        fs_fake,
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    let entry = tree_fake
+        .update(cx, |tree, cx| {
+            tree.as_local_mut()
+                .unwrap()
+                .create_entry("a/b/c/d.txt".as_ref(), false, cx)
+        })
+        .await
+        .unwrap();
+    assert!(entry.is_file());
+
+    cx.executor().run_until_parked();
+    tree_fake.read_with(cx, |tree, _| {
+        assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
+        assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
+        assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
+    });
+
+    let client_real = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
+
+    let fs_real = Arc::new(RealFs);
+    let temp_root = temp_tree(json!({
+        "a": {}
+    }));
+
+    let tree_real = Worktree::local(
+        client_real,
+        temp_root.path(),
+        true,
+        fs_real,
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    let entry = tree_real
+        .update(cx, |tree, cx| {
+            tree.as_local_mut()
+                .unwrap()
+                .create_entry("a/b/c/d.txt".as_ref(), false, cx)
+        })
+        .await
+        .unwrap();
+    assert!(entry.is_file());
+
+    cx.executor().run_until_parked();
+    tree_real.read_with(cx, |tree, _| {
+        assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
+        assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
+        assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
+    });
+
+    // Test smallest change
+    let entry = tree_real
+        .update(cx, |tree, cx| {
+            tree.as_local_mut()
+                .unwrap()
+                .create_entry("a/b/c/e.txt".as_ref(), false, cx)
+        })
+        .await
+        .unwrap();
+    assert!(entry.is_file());
+
+    cx.executor().run_until_parked();
+    tree_real.read_with(cx, |tree, _| {
+        assert!(tree.entry_for_path("a/b/c/e.txt").unwrap().is_file());
+    });
+
+    // Test largest change
+    let entry = tree_real
+        .update(cx, |tree, cx| {
+            tree.as_local_mut()
+                .unwrap()
+                .create_entry("d/e/f/g.txt".as_ref(), false, cx)
+        })
+        .await
+        .unwrap();
+    assert!(entry.is_file());
+
+    cx.executor().run_until_parked();
+    tree_real.read_with(cx, |tree, _| {
+        assert!(tree.entry_for_path("d/e/f/g.txt").unwrap().is_file());
+        assert!(tree.entry_for_path("d/e/f").unwrap().is_dir());
+        assert!(tree.entry_for_path("d/e/").unwrap().is_dir());
+        assert!(tree.entry_for_path("d/").unwrap().is_dir());
+    });
+}
+
+#[gpui::test(iterations = 100)]
+async fn test_random_worktree_operations_during_initial_scan(
+    cx: &mut TestAppContext,
+    mut rng: StdRng,
+) {
+    init_test(cx);
+    let operations = env::var("OPERATIONS")
+        .map(|o| o.parse().unwrap())
+        .unwrap_or(5);
+    let initial_entries = env::var("INITIAL_ENTRIES")
+        .map(|o| o.parse().unwrap())
+        .unwrap_or(20);
+
+    let root_dir = Path::new("/test");
+    let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
+    fs.as_fake().insert_tree(root_dir, json!({})).await;
+    for _ in 0..initial_entries {
+        randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
+    }
+    log::info!("generated initial tree");
+
+    let worktree = Worktree::local(
+        build_client(cx),
+        root_dir,
+        true,
+        fs.clone(),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
+    let updates = Arc::new(Mutex::new(Vec::new()));
+    worktree.update(cx, |tree, cx| {
+        check_worktree_change_events(tree, cx);
+
+        let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
+            let updates = updates.clone();
+            move |update| {
+                updates.lock().push(update);
+                async { true }
+            }
+        });
+    });
+
+    for _ in 0..operations {
+        worktree
+            .update(cx, |worktree, cx| {
+                randomly_mutate_worktree(worktree, &mut rng, cx)
+            })
+            .await
+            .log_err();
+        worktree.read_with(cx, |tree, _| {
+            tree.as_local().unwrap().snapshot().check_invariants(true)
+        });
+
+        if rng.gen_bool(0.6) {
+            snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
+        }
+    }
+
+    worktree
+        .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
+        .await;
+
+    cx.executor().run_until_parked();
+
+    let final_snapshot = worktree.read_with(cx, |tree, _| {
+        let tree = tree.as_local().unwrap();
+        let snapshot = tree.snapshot();
+        snapshot.check_invariants(true);
+        snapshot
+    });
+
+    for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
+        let mut updated_snapshot = snapshot.clone();
+        for update in updates.lock().iter() {
+            if update.scan_id >= updated_snapshot.scan_id() as u64 {
+                updated_snapshot
+                    .apply_remote_update(update.clone())
+                    .unwrap();
+            }
+        }
+
+        assert_eq!(
+            updated_snapshot.entries(true).collect::<Vec<_>>(),
+            final_snapshot.entries(true).collect::<Vec<_>>(),
+            "wrong updates after snapshot {i}: {snapshot:#?} {updates:#?}",
+        );
+    }
+}
+
+#[gpui::test(iterations = 100)]
+async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
+    init_test(cx);
+    let operations = env::var("OPERATIONS")
+        .map(|o| o.parse().unwrap())
+        .unwrap_or(40);
+    let initial_entries = env::var("INITIAL_ENTRIES")
+        .map(|o| o.parse().unwrap())
+        .unwrap_or(20);
+
+    let root_dir = Path::new("/test");
+    let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
+    fs.as_fake().insert_tree(root_dir, json!({})).await;
+    for _ in 0..initial_entries {
+        randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
+    }
+    log::info!("generated initial tree");
+
+    let worktree = Worktree::local(
+        build_client(cx),
+        root_dir,
+        true,
+        fs.clone(),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    let updates = Arc::new(Mutex::new(Vec::new()));
+    worktree.update(cx, |tree, cx| {
+        check_worktree_change_events(tree, cx);
+
+        let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
+            let updates = updates.clone();
+            move |update| {
+                updates.lock().push(update);
+                async { true }
+            }
+        });
+    });
+
+    worktree
+        .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
+        .await;
+
+    fs.as_fake().pause_events();
+    let mut snapshots = Vec::new();
+    let mut mutations_len = operations;
+    while mutations_len > 1 {
+        if rng.gen_bool(0.2) {
+            worktree
+                .update(cx, |worktree, cx| {
+                    randomly_mutate_worktree(worktree, &mut rng, cx)
+                })
+                .await
+                .log_err();
+        } else {
+            randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
+        }
+
+        let buffered_event_count = fs.as_fake().buffered_event_count();
+        if buffered_event_count > 0 && rng.gen_bool(0.3) {
+            let len = rng.gen_range(0..=buffered_event_count);
+            log::info!("flushing {} events", len);
+            fs.as_fake().flush_events(len);
+        } else {
+            randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
+            mutations_len -= 1;
+        }
+
+        cx.executor().run_until_parked();
+        if rng.gen_bool(0.2) {
+            log::info!("storing snapshot {}", snapshots.len());
+            let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
+            snapshots.push(snapshot);
+        }
+    }
+
+    log::info!("quiescing");
+    fs.as_fake().flush_events(usize::MAX);
+    cx.executor().run_until_parked();
+
+    let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
+    snapshot.check_invariants(true);
+    let expanded_paths = snapshot
+        .expanded_entries()
+        .map(|e| e.path.clone())
+        .collect::<Vec<_>>();
+
+    {
+        let new_worktree = Worktree::local(
+            build_client(cx),
+            root_dir,
+            true,
+            fs.clone(),
+            Default::default(),
+            &mut cx.to_async(),
+        )
+        .await
+        .unwrap();
+        new_worktree
+            .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
+            .await;
+        new_worktree
+            .update(cx, |tree, _| {
+                tree.as_local_mut()
+                    .unwrap()
+                    .refresh_entries_for_paths(expanded_paths)
+            })
+            .recv()
+            .await;
+        let new_snapshot =
+            new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
+        assert_eq!(
+            snapshot.entries_without_ids(true),
+            new_snapshot.entries_without_ids(true)
+        );
+    }
+
+    for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
+        for update in updates.lock().iter() {
+            if update.scan_id >= prev_snapshot.scan_id() as u64 {
+                prev_snapshot.apply_remote_update(update.clone()).unwrap();
+            }
+        }
+
+        assert_eq!(
+            prev_snapshot
+                .entries(true)
+                .map(ignore_pending_dir)
+                .collect::<Vec<_>>(),
+            snapshot
+                .entries(true)
+                .map(ignore_pending_dir)
+                .collect::<Vec<_>>(),
+            "wrong updates after snapshot {i}: {updates:#?}",
+        );
+    }
+
+    fn ignore_pending_dir(entry: &Entry) -> Entry {
+        let mut entry = entry.clone();
+        if entry.kind.is_dir() {
+            entry.kind = EntryKind::Dir
+        }
+        entry
+    }
+}
+
+// The worktree's `UpdatedEntries` event can be used to follow along with
+// all changes to the worktree's snapshot.
+fn check_worktree_change_events(tree: &mut Worktree, cx: &mut ModelContext<Worktree>) {
+    let mut entries = tree.entries(true).cloned().collect::<Vec<_>>();
+    cx.subscribe(&cx.handle(), move |tree, _, event, _| {
+        if let Event::UpdatedEntries(changes) = event {
+            for (path, _, change_type) in changes.iter() {
+                let entry = tree.entry_for_path(&path).cloned();
+                let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
+                    Ok(ix) | Err(ix) => ix,
+                };
+                match change_type {
+                    PathChange::Added => entries.insert(ix, entry.unwrap()),
+                    PathChange::Removed => drop(entries.remove(ix)),
+                    PathChange::Updated => {
+                        let entry = entry.unwrap();
+                        let existing_entry = entries.get_mut(ix).unwrap();
+                        assert_eq!(existing_entry.path, entry.path);
+                        *existing_entry = entry;
+                    }
+                    PathChange::AddedOrUpdated | PathChange::Loaded => {
+                        let entry = entry.unwrap();
+                        if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
+                            *entries.get_mut(ix).unwrap() = entry;
+                        } else {
+                            entries.insert(ix, entry);
+                        }
+                    }
+                }
+            }
+
+            let new_entries = tree.entries(true).cloned().collect::<Vec<_>>();
+            assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
+        }
+    })
+    .detach();
+}
+
+fn randomly_mutate_worktree(
+    worktree: &mut Worktree,
+    rng: &mut impl Rng,
+    cx: &mut ModelContext<Worktree>,
+) -> Task<Result<()>> {
+    log::info!("mutating worktree");
+    let worktree = worktree.as_local_mut().unwrap();
+    let snapshot = worktree.snapshot();
+    let entry = snapshot.entries(false).choose(rng).unwrap();
+
+    match rng.gen_range(0_u32..100) {
+        0..=33 if entry.path.as_ref() != Path::new("") => {
+            log::info!("deleting entry {:?} ({})", entry.path, entry.id.0);
+            worktree.delete_entry(entry.id, cx).unwrap()
+        }
+        ..=66 if entry.path.as_ref() != Path::new("") => {
+            let other_entry = snapshot.entries(false).choose(rng).unwrap();
+            let new_parent_path = if other_entry.is_dir() {
+                other_entry.path.clone()
+            } else {
+                other_entry.path.parent().unwrap().into()
+            };
+            let mut new_path = new_parent_path.join(random_filename(rng));
+            if new_path.starts_with(&entry.path) {
+                new_path = random_filename(rng).into();
+            }
+
+            log::info!(
+                "renaming entry {:?} ({}) to {:?}",
+                entry.path,
+                entry.id.0,
+                new_path
+            );
+            let task = worktree.rename_entry(entry.id, new_path, cx).unwrap();
+            cx.background_executor().spawn(async move {
+                task.await?;
+                Ok(())
+            })
+        }
+        _ => {
+            let task = if entry.is_dir() {
+                let child_path = entry.path.join(random_filename(rng));
+                let is_dir = rng.gen_bool(0.3);
+                log::info!(
+                    "creating {} at {:?}",
+                    if is_dir { "dir" } else { "file" },
+                    child_path,
+                );
+                worktree.create_entry(child_path, is_dir, cx)
+            } else {
+                log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
+                worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx)
+            };
+            cx.background_executor().spawn(async move {
+                task.await?;
+                Ok(())
+            })
+        }
+    }
+}
+
+async fn randomly_mutate_fs(
+    fs: &Arc<dyn Fs>,
+    root_path: &Path,
+    insertion_probability: f64,
+    rng: &mut impl Rng,
+) {
+    log::info!("mutating fs");
+    let mut files = Vec::new();
+    let mut dirs = Vec::new();
+    for path in fs.as_fake().paths(false) {
+        if path.starts_with(root_path) {
+            if fs.is_file(&path).await {
+                files.push(path);
+            } else {
+                dirs.push(path);
+            }
+        }
+    }
+
+    if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
+        let path = dirs.choose(rng).unwrap();
+        let new_path = path.join(random_filename(rng));
+
+        if rng.gen() {
+            log::info!(
+                "creating dir {:?}",
+                new_path.strip_prefix(root_path).unwrap()
+            );
+            fs.create_dir(&new_path).await.unwrap();
+        } else {
+            log::info!(
+                "creating file {:?}",
+                new_path.strip_prefix(root_path).unwrap()
+            );
+            fs.create_file(&new_path, Default::default()).await.unwrap();
+        }
+    } else if rng.gen_bool(0.05) {
+        let ignore_dir_path = dirs.choose(rng).unwrap();
+        let ignore_path = ignore_dir_path.join(&*GITIGNORE);
+
+        let subdirs = dirs
+            .iter()
+            .filter(|d| d.starts_with(&ignore_dir_path))
+            .cloned()
+            .collect::<Vec<_>>();
+        let subfiles = files
+            .iter()
+            .filter(|d| d.starts_with(&ignore_dir_path))
+            .cloned()
+            .collect::<Vec<_>>();
+        let files_to_ignore = {
+            let len = rng.gen_range(0..=subfiles.len());
+            subfiles.choose_multiple(rng, len)
+        };
+        let dirs_to_ignore = {
+            let len = rng.gen_range(0..subdirs.len());
+            subdirs.choose_multiple(rng, len)
+        };
+
+        let mut ignore_contents = String::new();
+        for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
+            writeln!(
+                ignore_contents,
+                "{}",
+                path_to_ignore
+                    .strip_prefix(&ignore_dir_path)
+                    .unwrap()
+                    .to_str()
+                    .unwrap()
+            )
+            .unwrap();
+        }
+        log::info!(
+            "creating gitignore {:?} with contents:\n{}",
+            ignore_path.strip_prefix(&root_path).unwrap(),
+            ignore_contents
+        );
+        fs.save(
+            &ignore_path,
+            &ignore_contents.as_str().into(),
+            Default::default(),
+        )
+        .await
+        .unwrap();
+    } else {
+        let old_path = {
+            let file_path = files.choose(rng);
+            let dir_path = dirs[1..].choose(rng);
+            file_path.into_iter().chain(dir_path).choose(rng).unwrap()
+        };
+
+        let is_rename = rng.gen();
+        if is_rename {
+            let new_path_parent = dirs
+                .iter()
+                .filter(|d| !d.starts_with(old_path))
+                .choose(rng)
+                .unwrap();
+
+            let overwrite_existing_dir =
+                !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
+            let new_path = if overwrite_existing_dir {
+                fs.remove_dir(
+                    &new_path_parent,
+                    RemoveOptions {
+                        recursive: true,
+                        ignore_if_not_exists: true,
+                    },
+                )
+                .await
+                .unwrap();
+                new_path_parent.to_path_buf()
+            } else {
+                new_path_parent.join(random_filename(rng))
+            };
+
+            log::info!(
+                "renaming {:?} to {}{:?}",
+                old_path.strip_prefix(&root_path).unwrap(),
+                if overwrite_existing_dir {
+                    "overwrite "
+                } else {
+                    ""
+                },
+                new_path.strip_prefix(&root_path).unwrap()
+            );
+            fs.rename(
+                &old_path,
+                &new_path,
+                fs::RenameOptions {
+                    overwrite: true,
+                    ignore_if_exists: true,
+                },
+            )
+            .await
+            .unwrap();
+        } else if fs.is_file(&old_path).await {
+            log::info!(
+                "deleting file {:?}",
+                old_path.strip_prefix(&root_path).unwrap()
+            );
+            fs.remove_file(old_path, Default::default()).await.unwrap();
+        } else {
+            log::info!(
+                "deleting dir {:?}",
+                old_path.strip_prefix(&root_path).unwrap()
+            );
+            fs.remove_dir(
+                &old_path,
+                RemoveOptions {
+                    recursive: true,
+                    ignore_if_not_exists: true,
+                },
+            )
+            .await
+            .unwrap();
+        }
+    }
+}
+
+fn random_filename(rng: &mut impl Rng) -> String {
+    (0..6)
+        .map(|_| rng.sample(rand::distributions::Alphanumeric))
+        .map(char::from)
+        .collect()
+}
+
+#[gpui::test]
+async fn test_rename_work_directory(cx: &mut TestAppContext) {
+    init_test(cx);
+    cx.executor().allow_parking();
+    let root = temp_tree(json!({
+        "projects": {
+            "project1": {
+                "a": "",
+                "b": "",
+            }
+        },
+
+    }));
+    let root_path = root.path();
+
+    let tree = Worktree::local(
+        build_client(cx),
+        root_path,
+        true,
+        Arc::new(RealFs),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    let repo = git_init(&root_path.join("projects/project1"));
+    git_add("a", &repo);
+    git_commit("init", &repo);
+    std::fs::write(root_path.join("projects/project1/a"), "aa").ok();
+
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+
+    tree.flush_fs_events(cx).await;
+
+    cx.read(|cx| {
+        let tree = tree.read(cx);
+        let (work_dir, _) = tree.repositories().next().unwrap();
+        assert_eq!(work_dir.as_ref(), Path::new("projects/project1"));
+        assert_eq!(
+            tree.status_for_file(Path::new("projects/project1/a")),
+            Some(GitFileStatus::Modified)
+        );
+        assert_eq!(
+            tree.status_for_file(Path::new("projects/project1/b")),
+            Some(GitFileStatus::Added)
+        );
+    });
+
+    std::fs::rename(
+        root_path.join("projects/project1"),
+        root_path.join("projects/project2"),
+    )
+    .ok();
+    tree.flush_fs_events(cx).await;
+
+    cx.read(|cx| {
+        let tree = tree.read(cx);
+        let (work_dir, _) = tree.repositories().next().unwrap();
+        assert_eq!(work_dir.as_ref(), Path::new("projects/project2"));
+        assert_eq!(
+            tree.status_for_file(Path::new("projects/project2/a")),
+            Some(GitFileStatus::Modified)
+        );
+        assert_eq!(
+            tree.status_for_file(Path::new("projects/project2/b")),
+            Some(GitFileStatus::Added)
+        );
+    });
+}
+
+#[gpui::test]
+async fn test_git_repository_for_path(cx: &mut TestAppContext) {
+    init_test(cx);
+    cx.executor().allow_parking();
+    let root = temp_tree(json!({
+        "c.txt": "",
+        "dir1": {
+            ".git": {},
+            "deps": {
+                "dep1": {
+                    ".git": {},
+                    "src": {
+                        "a.txt": ""
+                    }
+                }
+            },
+            "src": {
+                "b.txt": ""
+            }
+        },
+    }));
+
+    let tree = Worktree::local(
+        build_client(cx),
+        root.path(),
+        true,
+        Arc::new(RealFs),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+    tree.flush_fs_events(cx).await;
+
+    tree.read_with(cx, |tree, _cx| {
+        let tree = tree.as_local().unwrap();
+
+        assert!(tree.repository_for_path("c.txt".as_ref()).is_none());
+
+        let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap();
+        assert_eq!(
+            entry
+                .work_directory(tree)
+                .map(|directory| directory.as_ref().to_owned()),
+            Some(Path::new("dir1").to_owned())
+        );
+
+        let entry = tree
+            .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref())
+            .unwrap();
+        assert_eq!(
+            entry
+                .work_directory(tree)
+                .map(|directory| directory.as_ref().to_owned()),
+            Some(Path::new("dir1/deps/dep1").to_owned())
+        );
+
+        let entries = tree.files(false, 0);
+
+        let paths_with_repos = tree
+            .entries_with_repositories(entries)
+            .map(|(entry, repo)| {
+                (
+                    entry.path.as_ref(),
+                    repo.and_then(|repo| {
+                        repo.work_directory(&tree)
+                            .map(|work_directory| work_directory.0.to_path_buf())
+                    }),
+                )
+            })
+            .collect::<Vec<_>>();
+
+        assert_eq!(
+            paths_with_repos,
+            &[
+                (Path::new("c.txt"), None),
+                (
+                    Path::new("dir1/deps/dep1/src/a.txt"),
+                    Some(Path::new("dir1/deps/dep1").into())
+                ),
+                (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())),
+            ]
+        );
+    });
+
+    let repo_update_events = Arc::new(Mutex::new(vec![]));
+    tree.update(cx, |_, cx| {
+        let repo_update_events = repo_update_events.clone();
+        cx.subscribe(&tree, move |_, _, event, _| {
+            if let Event::UpdatedGitRepositories(update) = event {
+                repo_update_events.lock().push(update.clone());
+            }
+        })
+        .detach();
+    });
+
+    std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
+    tree.flush_fs_events(cx).await;
+
+    assert_eq!(
+        repo_update_events.lock()[0]
+            .iter()
+            .map(|e| e.0.clone())
+            .collect::<Vec<Arc<Path>>>(),
+        vec![Path::new("dir1").into()]
+    );
+
+    std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
+    tree.flush_fs_events(cx).await;
+
+    tree.read_with(cx, |tree, _cx| {
+        let tree = tree.as_local().unwrap();
+
+        assert!(tree
+            .repository_for_path("dir1/src/b.txt".as_ref())
+            .is_none());
+    });
+}
+
+#[gpui::test]
+async fn test_git_status(cx: &mut TestAppContext) {
+    init_test(cx);
+    cx.executor().allow_parking();
+    const IGNORE_RULE: &'static str = "**/target";
+
+    let root = temp_tree(json!({
+        "project": {
+            "a.txt": "a",
+            "b.txt": "bb",
+            "c": {
+                "d": {
+                    "e.txt": "eee"
+                }
+            },
+            "f.txt": "ffff",
+            "target": {
+                "build_file": "???"
+            },
+            ".gitignore": IGNORE_RULE
+        },
+
+    }));
+
+    const A_TXT: &'static str = "a.txt";
+    const B_TXT: &'static str = "b.txt";
+    const E_TXT: &'static str = "c/d/e.txt";
+    const F_TXT: &'static str = "f.txt";
+    const DOTGITIGNORE: &'static str = ".gitignore";
+    const BUILD_FILE: &'static str = "target/build_file";
+    let project_path = Path::new("project");
+
+    // Set up git repository before creating the worktree.
+    let work_dir = root.path().join("project");
+    let mut repo = git_init(work_dir.as_path());
+    repo.add_ignore_rule(IGNORE_RULE).unwrap();
+    git_add(A_TXT, &repo);
+    git_add(E_TXT, &repo);
+    git_add(DOTGITIGNORE, &repo);
+    git_commit("Initial commit", &repo);
+
+    let tree = Worktree::local(
+        build_client(cx),
+        root.path(),
+        true,
+        Arc::new(RealFs),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    tree.flush_fs_events(cx).await;
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+    cx.executor().run_until_parked();
+
+    // Check that the right git state is observed on startup
+    tree.read_with(cx, |tree, _cx| {
+        let snapshot = tree.snapshot();
+        assert_eq!(snapshot.repositories().count(), 1);
+        let (dir, _) = snapshot.repositories().next().unwrap();
+        assert_eq!(dir.as_ref(), Path::new("project"));
+
+        assert_eq!(
+            snapshot.status_for_file(project_path.join(B_TXT)),
+            Some(GitFileStatus::Added)
+        );
+        assert_eq!(
+            snapshot.status_for_file(project_path.join(F_TXT)),
+            Some(GitFileStatus::Added)
+        );
+    });
+
+    // Modify a file in the working copy.
+    std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
+    tree.flush_fs_events(cx).await;
+    cx.executor().run_until_parked();
+
+    // The worktree detects that the file's git status has changed.
+    tree.read_with(cx, |tree, _cx| {
+        let snapshot = tree.snapshot();
+        assert_eq!(
+            snapshot.status_for_file(project_path.join(A_TXT)),
+            Some(GitFileStatus::Modified)
+        );
+    });
+
+    // Create a commit in the git repository.
+    git_add(A_TXT, &repo);
+    git_add(B_TXT, &repo);
+    git_commit("Committing modified and added", &repo);
+    tree.flush_fs_events(cx).await;
+    cx.executor().run_until_parked();
+
+    // The worktree detects that the files' git status have changed.
+    tree.read_with(cx, |tree, _cx| {
+        let snapshot = tree.snapshot();
+        assert_eq!(
+            snapshot.status_for_file(project_path.join(F_TXT)),
+            Some(GitFileStatus::Added)
+        );
+        assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None);
+        assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
+    });
+
+    // Modify files in the working copy and perform git operations on other files.
+    git_reset(0, &repo);
+    git_remove_index(Path::new(B_TXT), &repo);
+    git_stash(&mut repo);
+    std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
+    std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
+    tree.flush_fs_events(cx).await;
+    cx.executor().run_until_parked();
+
+    // Check that more complex repo changes are tracked
+    tree.read_with(cx, |tree, _cx| {
+        let snapshot = tree.snapshot();
+
+        assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
+        assert_eq!(
+            snapshot.status_for_file(project_path.join(B_TXT)),
+            Some(GitFileStatus::Added)
+        );
+        assert_eq!(
+            snapshot.status_for_file(project_path.join(E_TXT)),
+            Some(GitFileStatus::Modified)
+        );
+    });
+
+    std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
+    std::fs::remove_dir_all(work_dir.join("c")).unwrap();
+    std::fs::write(
+        work_dir.join(DOTGITIGNORE),
+        [IGNORE_RULE, "f.txt"].join("\n"),
+    )
+    .unwrap();
+
+    git_add(Path::new(DOTGITIGNORE), &repo);
+    git_commit("Committing modified git ignore", &repo);
+
+    tree.flush_fs_events(cx).await;
+    cx.executor().run_until_parked();
+
+    let mut renamed_dir_name = "first_directory/second_directory";
+    const RENAMED_FILE: &'static str = "rf.txt";
+
+    std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
+    std::fs::write(
+        work_dir.join(renamed_dir_name).join(RENAMED_FILE),
+        "new-contents",
+    )
+    .unwrap();
+
+    tree.flush_fs_events(cx).await;
+    cx.executor().run_until_parked();
+
+    tree.read_with(cx, |tree, _cx| {
+        let snapshot = tree.snapshot();
+        assert_eq!(
+            snapshot.status_for_file(&project_path.join(renamed_dir_name).join(RENAMED_FILE)),
+            Some(GitFileStatus::Added)
+        );
+    });
+
+    renamed_dir_name = "new_first_directory/second_directory";
+
+    std::fs::rename(
+        work_dir.join("first_directory"),
+        work_dir.join("new_first_directory"),
+    )
+    .unwrap();
+
+    tree.flush_fs_events(cx).await;
+    cx.executor().run_until_parked();
+
+    tree.read_with(cx, |tree, _cx| {
+        let snapshot = tree.snapshot();
+
+        assert_eq!(
+            snapshot.status_for_file(
+                project_path
+                    .join(Path::new(renamed_dir_name))
+                    .join(RENAMED_FILE)
+            ),
+            Some(GitFileStatus::Added)
+        );
+    });
+}
+
+#[gpui::test]
+async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
+    init_test(cx);
+    let fs = FakeFs::new(cx.background_executor.clone());
+    fs.insert_tree(
+        "/root",
+        json!({
+            ".git": {},
+            "a": {
+                "b": {
+                    "c1.txt": "",
+                    "c2.txt": "",
+                },
+                "d": {
+                    "e1.txt": "",
+                    "e2.txt": "",
+                    "e3.txt": "",
+                }
+            },
+            "f": {
+                "no-status.txt": ""
+            },
+            "g": {
+                "h1.txt": "",
+                "h2.txt": ""
+            },
+
+        }),
+    )
+    .await;
+
+    fs.set_status_for_repo_via_git_operation(
+        &Path::new("/root/.git"),
+        &[
+            (Path::new("a/b/c1.txt"), GitFileStatus::Added),
+            (Path::new("a/d/e2.txt"), GitFileStatus::Modified),
+            (Path::new("g/h2.txt"), GitFileStatus::Conflict),
+        ],
+    );
+
+    let tree = Worktree::local(
+        build_client(cx),
+        Path::new("/root"),
+        true,
+        fs.clone(),
+        Default::default(),
+        &mut cx.to_async(),
+    )
+    .await
+    .unwrap();
+
+    cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+        .await;
+
+    cx.executor().run_until_parked();
+    let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
+
+    check_propagated_statuses(
+        &snapshot,
+        &[
+            (Path::new(""), Some(GitFileStatus::Conflict)),
+            (Path::new("a"), Some(GitFileStatus::Modified)),
+            (Path::new("a/b"), Some(GitFileStatus::Added)),
+            (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
+            (Path::new("a/b/c2.txt"), None),
+            (Path::new("a/d"), Some(GitFileStatus::Modified)),
+            (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
+            (Path::new("f"), None),
+            (Path::new("f/no-status.txt"), None),
+            (Path::new("g"), Some(GitFileStatus::Conflict)),
+            (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)),
+        ],
+    );
+
+    check_propagated_statuses(
+        &snapshot,
+        &[
+            (Path::new("a/b"), Some(GitFileStatus::Added)),
+            (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
+            (Path::new("a/b/c2.txt"), None),
+            (Path::new("a/d"), Some(GitFileStatus::Modified)),
+            (Path::new("a/d/e1.txt"), None),
+            (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
+            (Path::new("f"), None),
+            (Path::new("f/no-status.txt"), None),
+            (Path::new("g"), Some(GitFileStatus::Conflict)),
+        ],
+    );
+
+    check_propagated_statuses(
+        &snapshot,
+        &[
+            (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
+            (Path::new("a/b/c2.txt"), None),
+            (Path::new("a/d/e1.txt"), None),
+            (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
+            (Path::new("f/no-status.txt"), None),
+        ],
+    );
+
+    #[track_caller]
+    fn check_propagated_statuses(
+        snapshot: &Snapshot,
+        expected_statuses: &[(&Path, Option<GitFileStatus>)],
+    ) {
+        let mut entries = expected_statuses
+            .iter()
+            .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone())
+            .collect::<Vec<_>>();
+        snapshot.propagate_git_statuses(&mut entries);
+        assert_eq!(
+            entries
+                .iter()
+                .map(|e| (e.path.as_ref(), e.git_status))
+                .collect::<Vec<_>>(),
+            expected_statuses
+        );
+    }
+}
+
+fn build_client(cx: &mut TestAppContext) -> Arc<Client> {
+    let http_client = FakeHttpClient::with_404_response();
+    cx.read(|cx| Client::new(http_client, cx))
+}
+
+#[track_caller]
+fn git_init(path: &Path) -> git2::Repository {
+    git2::Repository::init(path).expect("Failed to initialize git repository")
+}
+
+#[track_caller]
+fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
+    let path = path.as_ref();
+    let mut index = repo.index().expect("Failed to get index");
+    index.add_path(path).expect("Failed to add a.txt");
+    index.write().expect("Failed to write index");
+}
+
+#[track_caller]
+fn git_remove_index(path: &Path, repo: &git2::Repository) {
+    let mut index = repo.index().expect("Failed to get index");
+    index.remove_path(path).expect("Failed to add a.txt");
+    index.write().expect("Failed to write index");
+}
+
+#[track_caller]
+fn git_commit(msg: &'static str, repo: &git2::Repository) {
+    use git2::Signature;
+
+    let signature = Signature::now("test", "test@zed.dev").unwrap();
+    let oid = repo.index().unwrap().write_tree().unwrap();
+    let tree = repo.find_tree(oid).unwrap();
+    if let Some(head) = repo.head().ok() {
+        let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
+
+        let parent_commit = parent_obj.as_commit().unwrap();
+
+        repo.commit(
+            Some("HEAD"),
+            &signature,
+            &signature,
+            msg,
+            &tree,
+            &[parent_commit],
+        )
+        .expect("Failed to commit with parent");
+    } else {
+        repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
+            .expect("Failed to commit");
+    }
+}
+
+#[track_caller]
+fn git_stash(repo: &mut git2::Repository) {
+    use git2::Signature;
+
+    let signature = Signature::now("test", "test@zed.dev").unwrap();
+    repo.stash_save(&signature, "N/A", None)
+        .expect("Failed to stash");
+}
+
+#[track_caller]
+fn git_reset(offset: usize, repo: &git2::Repository) {
+    let head = repo.head().expect("Couldn't get repo head");
+    let object = head.peel(git2::ObjectType::Commit).unwrap();
+    let commit = object.as_commit().unwrap();
+    let new_head = commit
+        .parents()
+        .inspect(|parnet| {
+            parnet.message();
+        })
+        .skip(offset)
+        .next()
+        .expect("Not enough history");
+    repo.reset(&new_head.as_object(), git2::ResetType::Soft, None)
+        .expect("Could not reset");
+}
+
+#[allow(dead_code)]
+#[track_caller]
+fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
+    repo.statuses(None)
+        .unwrap()
+        .iter()
+        .map(|status| (status.path().unwrap().to_string(), status.status()))
+        .collect()
+}
+
+#[track_caller]
+fn check_worktree_entries(
+    tree: &Worktree,
+    expected_excluded_paths: &[&str],
+    expected_ignored_paths: &[&str],
+    expected_tracked_paths: &[&str],
+) {
+    for path in expected_excluded_paths {
+        let entry = tree.entry_for_path(path);
+        assert!(
+            entry.is_none(),
+            "expected path '{path}' to be excluded, but got entry: {entry:?}",
+        );
+    }
+    for path in expected_ignored_paths {
+        let entry = tree
+            .entry_for_path(path)
+            .unwrap_or_else(|| panic!("Missing entry for expected ignored path '{path}'"));
+        assert!(
+            entry.is_ignored,
+            "expected path '{path}' to be ignored, but got entry: {entry:?}",
+        );
+    }
+    for path in expected_tracked_paths {
+        let entry = tree
+            .entry_for_path(path)
+            .unwrap_or_else(|| panic!("Missing entry for expected tracked path '{path}'"));
+        assert!(
+            !entry.is_ignored,
+            "expected path '{path}' to be tracked, but got entry: {entry:?}",
+        );
+    }
+}
+
+fn init_test(cx: &mut gpui::TestAppContext) {
+    cx.update(|cx| {
+        let settings_store = SettingsStore::test(cx);
+        cx.set_global(settings_store);
+        Project::init_settings(cx);
+    });
+}

crates/project_panel/src/project_panel.rs 🔗

@@ -1732,7 +1732,7 @@ mod tests {
     use super::*;
     use gpui::{AnyWindowHandle, TestAppContext, ViewHandle, WindowHandle};
     use pretty_assertions::assert_eq;
-    use project::FakeFs;
+    use project::{project_settings::ProjectSettings, FakeFs};
     use serde_json::json;
     use settings::SettingsStore;
     use std::{
@@ -1832,6 +1832,123 @@ mod tests {
         );
     }
 
+    #[gpui::test]
+    async fn test_exclusions_in_visible_list(cx: &mut gpui::TestAppContext) {
+        init_test(cx);
+        cx.update(|cx| {
+            cx.update_global::<SettingsStore, _, _>(|store, cx| {
+                store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+                    project_settings.file_scan_exclusions =
+                        Some(vec!["**/.git".to_string(), "**/4/**".to_string()]);
+                });
+            });
+        });
+
+        let fs = FakeFs::new(cx.background());
+        fs.insert_tree(
+            "/root1",
+            json!({
+                ".dockerignore": "",
+                ".git": {
+                    "HEAD": "",
+                },
+                "a": {
+                    "0": { "q": "", "r": "", "s": "" },
+                    "1": { "t": "", "u": "" },
+                    "2": { "v": "", "w": "", "x": "", "y": "" },
+                },
+                "b": {
+                    "3": { "Q": "" },
+                    "4": { "R": "", "S": "", "T": "", "U": "" },
+                },
+                "C": {
+                    "5": {},
+                    "6": { "V": "", "W": "" },
+                    "7": { "X": "" },
+                    "8": { "Y": {}, "Z": "" }
+                }
+            }),
+        )
+        .await;
+        fs.insert_tree(
+            "/root2",
+            json!({
+                "d": {
+                    "4": ""
+                },
+                "e": {}
+            }),
+        )
+        .await;
+
+        let project = Project::test(fs.clone(), ["/root1".as_ref(), "/root2".as_ref()], cx).await;
+        let workspace = cx
+            .add_window(|cx| Workspace::test_new(project.clone(), cx))
+            .root(cx);
+        let panel = workspace.update(cx, |workspace, cx| ProjectPanel::new(workspace, cx));
+        assert_eq!(
+            visible_entries_as_strings(&panel, 0..50, cx),
+            &[
+                "v root1",
+                "    > a",
+                "    > b",
+                "    > C",
+                "      .dockerignore",
+                "v root2",
+                "    > d",
+                "    > e",
+            ]
+        );
+
+        toggle_expand_dir(&panel, "root1/b", cx);
+        assert_eq!(
+            visible_entries_as_strings(&panel, 0..50, cx),
+            &[
+                "v root1",
+                "    > a",
+                "    v b  <== selected",
+                "        > 3",
+                "    > C",
+                "      .dockerignore",
+                "v root2",
+                "    > d",
+                "    > e",
+            ]
+        );
+
+        toggle_expand_dir(&panel, "root2/d", cx);
+        assert_eq!(
+            visible_entries_as_strings(&panel, 0..50, cx),
+            &[
+                "v root1",
+                "    > a",
+                "    v b",
+                "        > 3",
+                "    > C",
+                "      .dockerignore",
+                "v root2",
+                "    v d  <== selected",
+                "    > e",
+            ]
+        );
+
+        toggle_expand_dir(&panel, "root2/e", cx);
+        assert_eq!(
+            visible_entries_as_strings(&panel, 0..50, cx),
+            &[
+                "v root1",
+                "    > a",
+                "    v b",
+                "        > 3",
+                "    > C",
+                "      .dockerignore",
+                "v root2",
+                "    v d",
+                "    v e  <== selected",
+            ]
+        );
+    }
+
     #[gpui::test(iterations = 30)]
     async fn test_editing_files(cx: &mut gpui::TestAppContext) {
         init_test(cx);
@@ -2929,6 +3046,12 @@ mod tests {
             workspace::init_settings(cx);
             client::init_settings(cx);
             Project::init_settings(cx);
+
+            cx.update_global::<SettingsStore, _, _>(|store, cx| {
+                store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+                    project_settings.file_scan_exclusions = Some(Vec::new());
+                });
+            });
         });
     }
 

crates/project_panel2/src/project_panel.rs 🔗

@@ -1571,7 +1571,7 @@ mod tests {
     use super::*;
     use gpui::{TestAppContext, View, VisualTestContext, WindowHandle};
     use pretty_assertions::assert_eq;
-    use project::FakeFs;
+    use project::{project_settings::ProjectSettings, FakeFs};
     use serde_json::json;
     use settings::SettingsStore;
     use std::{
@@ -1672,6 +1672,124 @@ mod tests {
         );
     }
 
+    #[gpui::test]
+    async fn test_exclusions_in_visible_list(cx: &mut gpui::TestAppContext) {
+        init_test(cx);
+        cx.update(|cx| {
+            cx.update_global::<SettingsStore, _>(|store, cx| {
+                store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+                    project_settings.file_scan_exclusions =
+                        Some(vec!["**/.git".to_string(), "**/4/**".to_string()]);
+                });
+            });
+        });
+
+        let fs = FakeFs::new(cx.background_executor.clone());
+        fs.insert_tree(
+            "/root1",
+            json!({
+                ".dockerignore": "",
+                ".git": {
+                    "HEAD": "",
+                },
+                "a": {
+                    "0": { "q": "", "r": "", "s": "" },
+                    "1": { "t": "", "u": "" },
+                    "2": { "v": "", "w": "", "x": "", "y": "" },
+                },
+                "b": {
+                    "3": { "Q": "" },
+                    "4": { "R": "", "S": "", "T": "", "U": "" },
+                },
+                "C": {
+                    "5": {},
+                    "6": { "V": "", "W": "" },
+                    "7": { "X": "" },
+                    "8": { "Y": {}, "Z": "" }
+                }
+            }),
+        )
+        .await;
+        fs.insert_tree(
+            "/root2",
+            json!({
+                "d": {
+                    "4": ""
+                },
+                "e": {}
+            }),
+        )
+        .await;
+
+        let project = Project::test(fs.clone(), ["/root1".as_ref(), "/root2".as_ref()], cx).await;
+        let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
+        let cx = &mut VisualTestContext::from_window(*workspace, cx);
+        let panel = workspace
+            .update(cx, |workspace, cx| ProjectPanel::new(workspace, cx))
+            .unwrap();
+        assert_eq!(
+            visible_entries_as_strings(&panel, 0..50, cx),
+            &[
+                "v root1",
+                "    > a",
+                "    > b",
+                "    > C",
+                "      .dockerignore",
+                "v root2",
+                "    > d",
+                "    > e",
+            ]
+        );
+
+        toggle_expand_dir(&panel, "root1/b", cx);
+        assert_eq!(
+            visible_entries_as_strings(&panel, 0..50, cx),
+            &[
+                "v root1",
+                "    > a",
+                "    v b  <== selected",
+                "        > 3",
+                "    > C",
+                "      .dockerignore",
+                "v root2",
+                "    > d",
+                "    > e",
+            ]
+        );
+
+        toggle_expand_dir(&panel, "root2/d", cx);
+        assert_eq!(
+            visible_entries_as_strings(&panel, 0..50, cx),
+            &[
+                "v root1",
+                "    > a",
+                "    v b",
+                "        > 3",
+                "    > C",
+                "      .dockerignore",
+                "v root2",
+                "    v d  <== selected",
+                "    > e",
+            ]
+        );
+
+        toggle_expand_dir(&panel, "root2/e", cx);
+        assert_eq!(
+            visible_entries_as_strings(&panel, 0..50, cx),
+            &[
+                "v root1",
+                "    > a",
+                "    v b",
+                "        > 3",
+                "    > C",
+                "      .dockerignore",
+                "v root2",
+                "    v d",
+                "    v e  <== selected",
+            ]
+        );
+    }
+
     #[gpui::test(iterations = 30)]
     async fn test_editing_files(cx: &mut gpui::TestAppContext) {
         init_test(cx);
@@ -2792,6 +2910,12 @@ mod tests {
             workspace::init_settings(cx);
             client::init_settings(cx);
             Project::init_settings(cx);
+
+            cx.update_global::<SettingsStore, _>(|store, cx| {
+                store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+                    project_settings.file_scan_exclusions = Some(Vec::new());
+                });
+            });
         });
     }
 

crates/rpc/proto/zed.proto 🔗

@@ -884,6 +884,7 @@ message SearchProject {
     bool case_sensitive = 5;
     string files_to_include = 6;
     string files_to_exclude = 7;
+    bool include_ignored = 8;
 }
 
 message SearchProjectResponse {

crates/rpc2/proto/zed.proto 🔗

@@ -884,6 +884,7 @@ message SearchProject {
     bool case_sensitive = 5;
     string files_to_include = 6;
     string files_to_exclude = 7;
+    bool include_ignored = 8;
 }
 
 message SearchProjectResponse {

crates/search/src/buffer_search.rs 🔗

@@ -805,6 +805,7 @@ impl BufferSearchBar {
                         query,
                         self.search_options.contains(SearchOptions::WHOLE_WORD),
                         self.search_options.contains(SearchOptions::CASE_SENSITIVE),
+                        false,
                         Vec::new(),
                         Vec::new(),
                     ) {
@@ -820,6 +821,7 @@ impl BufferSearchBar {
                         query,
                         self.search_options.contains(SearchOptions::WHOLE_WORD),
                         self.search_options.contains(SearchOptions::CASE_SENSITIVE),
+                        false,
                         Vec::new(),
                         Vec::new(),
                     ) {

crates/search/src/project_search.rs 🔗

@@ -4,7 +4,7 @@ use crate::{
     search_bar::{render_nav_button, render_option_button_icon, render_search_mode_button},
     ActivateRegexMode, ActivateSemanticMode, ActivateTextMode, CycleMode, NextHistoryQuery,
     PreviousHistoryQuery, ReplaceAll, ReplaceNext, SearchOptions, SelectNextMatch, SelectPrevMatch,
-    ToggleCaseSensitive, ToggleReplace, ToggleWholeWord,
+    ToggleCaseSensitive, ToggleIncludeIgnored, ToggleReplace, ToggleWholeWord,
 };
 use anyhow::{Context, Result};
 use collections::HashMap;
@@ -85,6 +85,7 @@ pub fn init(cx: &mut AppContext) {
     cx.capture_action(ProjectSearchView::replace_next);
     add_toggle_option_action::<ToggleCaseSensitive>(SearchOptions::CASE_SENSITIVE, cx);
     add_toggle_option_action::<ToggleWholeWord>(SearchOptions::WHOLE_WORD, cx);
+    add_toggle_option_action::<ToggleIncludeIgnored>(SearchOptions::INCLUDE_IGNORED, cx);
     add_toggle_filters_action::<ToggleFilters>(cx);
 }
 
@@ -1192,6 +1193,7 @@ impl ProjectSearchView {
                     text,
                     self.search_options.contains(SearchOptions::WHOLE_WORD),
                     self.search_options.contains(SearchOptions::CASE_SENSITIVE),
+                    self.search_options.contains(SearchOptions::INCLUDE_IGNORED),
                     included_files,
                     excluded_files,
                 ) {
@@ -1210,6 +1212,7 @@ impl ProjectSearchView {
                 text,
                 self.search_options.contains(SearchOptions::WHOLE_WORD),
                 self.search_options.contains(SearchOptions::CASE_SENSITIVE),
+                self.search_options.contains(SearchOptions::INCLUDE_IGNORED),
                 included_files,
                 excluded_files,
             ) {
@@ -1764,6 +1767,17 @@ impl View for ProjectSearchBar {
                 render_option_button_icon("icons/word_search.svg", SearchOptions::WHOLE_WORD, cx)
             });
 
+            let mut include_ignored = is_semantic_disabled.then(|| {
+                render_option_button_icon(
+                    // TODO proper icon
+                    "icons/case_insensitive.svg",
+                    SearchOptions::INCLUDE_IGNORED,
+                    cx,
+                )
+            });
+            // TODO not implemented yet
+            let _ = include_ignored.take();
+
             let search_button_for_mode = |mode, side, cx: &mut ViewContext<ProjectSearchBar>| {
                 let is_active = if let Some(search) = self.active_project_search.as_ref() {
                     let search = search.read(cx);
@@ -1879,7 +1893,15 @@ impl View for ProjectSearchBar {
                 .with_children(search.filters_enabled.then(|| {
                     Flex::row()
                         .with_child(
-                            ChildView::new(&search.included_files_editor, cx)
+                            Flex::row()
+                                .with_child(
+                                    ChildView::new(&search.included_files_editor, cx)
+                                        .contained()
+                                        .constrained()
+                                        .with_height(theme.search.search_bar_row_height)
+                                        .flex(1., true),
+                                )
+                                .with_children(include_ignored)
                                 .contained()
                                 .with_style(include_container_style)
                                 .constrained()

crates/search/src/search.rs 🔗

@@ -29,6 +29,7 @@ actions!(
         CycleMode,
         ToggleWholeWord,
         ToggleCaseSensitive,
+        ToggleIncludeIgnored,
         ToggleReplace,
         SelectNextMatch,
         SelectPrevMatch,
@@ -49,31 +50,35 @@ bitflags! {
         const NONE = 0b000;
         const WHOLE_WORD = 0b001;
         const CASE_SENSITIVE = 0b010;
+        const INCLUDE_IGNORED = 0b100;
     }
 }
 
 impl SearchOptions {
     pub fn label(&self) -> &'static str {
         match *self {
-            SearchOptions::WHOLE_WORD => "Match Whole Word",
-            SearchOptions::CASE_SENSITIVE => "Match Case",
-            _ => panic!("{:?} is not a named SearchOption", self),
+            Self::WHOLE_WORD => "Match Whole Word",
+            Self::CASE_SENSITIVE => "Match Case",
+            Self::INCLUDE_IGNORED => "Include Ignored",
+            _ => panic!("{self:?} is not a named SearchOption"),
         }
     }
 
     pub fn icon(&self) -> &'static str {
         match *self {
-            SearchOptions::WHOLE_WORD => "icons/word_search.svg",
-            SearchOptions::CASE_SENSITIVE => "icons/case_insensitive.svg",
-            _ => panic!("{:?} is not a named SearchOption", self),
+            Self::WHOLE_WORD => "icons/word_search.svg",
+            Self::CASE_SENSITIVE => "icons/case_insensitive.svg",
+            Self::INCLUDE_IGNORED => "icons/case_insensitive.svg",
+            _ => panic!("{self:?} is not a named SearchOption"),
         }
     }
 
     pub fn to_toggle_action(&self) -> Box<dyn Action> {
         match *self {
-            SearchOptions::WHOLE_WORD => Box::new(ToggleWholeWord),
-            SearchOptions::CASE_SENSITIVE => Box::new(ToggleCaseSensitive),
-            _ => panic!("{:?} is not a named SearchOption", self),
+            Self::WHOLE_WORD => Box::new(ToggleWholeWord),
+            Self::CASE_SENSITIVE => Box::new(ToggleCaseSensitive),
+            Self::INCLUDE_IGNORED => Box::new(ToggleIncludeIgnored),
+            _ => panic!("{self:?} is not a named SearchOption"),
         }
     }
 
@@ -85,6 +90,7 @@ impl SearchOptions {
         let mut options = SearchOptions::NONE;
         options.set(SearchOptions::WHOLE_WORD, query.whole_word());
         options.set(SearchOptions::CASE_SENSITIVE, query.case_sensitive());
+        options.set(SearchOptions::INCLUDE_IGNORED, query.include_ignored());
         options
     }
 

crates/util/src/paths.rs 🔗

@@ -202,6 +202,14 @@ impl std::fmt::Display for PathMatcher {
     }
 }
 
+impl PartialEq for PathMatcher {
+    fn eq(&self, other: &Self) -> bool {
+        self.maybe_path.eq(&other.maybe_path)
+    }
+}
+
+impl Eq for PathMatcher {}
+
 impl PathMatcher {
     pub fn new(maybe_glob: &str) -> Result<Self, globset::Error> {
         Ok(PathMatcher {
@@ -211,7 +219,19 @@ impl PathMatcher {
     }
 
     pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool {
-        other.as_ref().starts_with(&self.maybe_path) || self.glob.is_match(other)
+        other.as_ref().starts_with(&self.maybe_path)
+            || self.glob.is_match(&other)
+            || self.check_with_end_separator(other.as_ref())
+    }
+
+    fn check_with_end_separator(&self, path: &Path) -> bool {
+        let path_str = path.to_string_lossy();
+        let separator = std::path::MAIN_SEPARATOR_STR;
+        if path_str.ends_with(separator) {
+            self.glob.is_match(path)
+        } else {
+            self.glob.is_match(path_str.to_string() + separator)
+        }
     }
 }
 
@@ -388,4 +408,14 @@ mod tests {
         let path = Path::new("/a/b/c/.eslintrc.js");
         assert_eq!(path.extension_or_hidden_file_name(), Some("js"));
     }
+
+    #[test]
+    fn edge_of_glob() {
+        let path = Path::new("/work/node_modules");
+        let path_matcher = PathMatcher::new("**/node_modules/**").unwrap();
+        assert!(
+            path_matcher.is_match(&path),
+            "Path matcher {path_matcher} should match {path:?}"
+        );
+    }
 }