Scan any external/ignored directories needed for LSP watchers

Max Brunsfeld and Nathan Sobo created

Also, don't include "external" files in project searches. Treat them
the same as ignored files.

Co-authored-by: Nathan Sobo <nathan@zed.dev>

Change summary

crates/project/src/project.rs        |  69 +++++++++--
crates/project/src/project_tests.rs  | 124 +++++++++++++++++++--
crates/project/src/worktree.rs       | 165 +++++++++++++++++++----------
crates/project/src/worktree_tests.rs |   6 
4 files changed, 273 insertions(+), 91 deletions(-)

Detailed changes

crates/project/src/project.rs 🔗

@@ -64,7 +64,7 @@ use std::{
     mem,
     num::NonZeroU32,
     ops::Range,
-    path::{Component, Path, PathBuf},
+    path::{self, Component, Path, PathBuf},
     rc::Rc,
     str,
     sync::{
@@ -3116,23 +3116,44 @@ impl Project {
             for watcher in params.watchers {
                 for worktree in &self.worktrees {
                     if let Some(worktree) = worktree.upgrade(cx) {
-                        let worktree = worktree.read(cx);
-                        if let Some(abs_path) = worktree.abs_path().to_str() {
-                            if let Some(suffix) = match &watcher.glob_pattern {
-                                lsp::GlobPattern::String(s) => s,
-                                lsp::GlobPattern::Relative(rp) => &rp.pattern,
-                            }
-                            .strip_prefix(abs_path)
-                            .and_then(|s| s.strip_prefix(std::path::MAIN_SEPARATOR))
-                            {
-                                if let Some(glob) = Glob::new(suffix).log_err() {
-                                    builders
-                                        .entry(worktree.id())
-                                        .or_insert_with(|| GlobSetBuilder::new())
-                                        .add(glob);
+                        let glob_is_inside_worktree = worktree.update(cx, |tree, _| {
+                            if let Some(abs_path) = tree.abs_path().to_str() {
+                                let relative_glob_pattern = match &watcher.glob_pattern {
+                                    lsp::GlobPattern::String(s) => s
+                                        .strip_prefix(abs_path)
+                                        .and_then(|s| s.strip_prefix(std::path::MAIN_SEPARATOR)),
+                                    lsp::GlobPattern::Relative(rp) => {
+                                        let base_uri = match &rp.base_uri {
+                                            lsp::OneOf::Left(workspace_folder) => {
+                                                &workspace_folder.uri
+                                            }
+                                            lsp::OneOf::Right(base_uri) => base_uri,
+                                        };
+                                        base_uri.to_file_path().ok().and_then(|file_path| {
+                                            (file_path.to_str() == Some(abs_path))
+                                                .then_some(rp.pattern.as_str())
+                                        })
+                                    }
+                                };
+                                if let Some(relative_glob_pattern) = relative_glob_pattern {
+                                    let literal_prefix =
+                                        glob_literal_prefix(&relative_glob_pattern);
+                                    tree.as_local_mut()
+                                        .unwrap()
+                                        .add_path_prefix_to_scan(Path::new(literal_prefix).into());
+                                    if let Some(glob) = Glob::new(relative_glob_pattern).log_err() {
+                                        builders
+                                            .entry(tree.id())
+                                            .or_insert_with(|| GlobSetBuilder::new())
+                                            .add(glob);
+                                    }
+                                    return true;
                                 }
-                                break;
                             }
+                            false
+                        });
+                        if glob_is_inside_worktree {
+                            break;
                         }
                     }
                 }
@@ -7105,6 +7126,22 @@ impl Project {
     }
 }
 
+fn glob_literal_prefix<'a>(glob: &'a str) -> &'a str {
+    let mut literal_end = 0;
+    for (i, part) in glob.split(path::MAIN_SEPARATOR).enumerate() {
+        if part.contains(&['*', '?', '{', '}']) {
+            break;
+        } else {
+            if i > 0 {
+                // Acount for separator prior to this part
+                literal_end += path::MAIN_SEPARATOR.len_utf8();
+            }
+            literal_end += part.len();
+        }
+    }
+    &glob[..literal_end]
+}
+
 impl WorktreeHandle {
     pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
         match self {

crates/project/src/project_tests.rs 🔗

@@ -535,8 +535,28 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon
     fs.insert_tree(
         "/the-root",
         json!({
-            "a.rs": "",
-            "b.rs": "",
+            ".gitignore": "target\n",
+            "src": {
+                "a.rs": "",
+                "b.rs": "",
+            },
+            "target": {
+                "x": {
+                    "out": {
+                        "x.rs": ""
+                    }
+                },
+                "y": {
+                    "out": {
+                        "y.rs": "",
+                    }
+                },
+                "z": {
+                    "out": {
+                        "z.rs": ""
+                    }
+                }
+            }
         }),
     )
     .await;
@@ -550,11 +570,32 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon
     // Start the language server by opening a buffer with a compatible file extension.
     let _buffer = project
         .update(cx, |project, cx| {
-            project.open_local_buffer("/the-root/a.rs", cx)
+            project.open_local_buffer("/the-root/src/a.rs", cx)
         })
         .await
         .unwrap();
 
+    // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
+    project.read_with(cx, |project, cx| {
+        let worktree = project.worktrees(cx).next().unwrap();
+        assert_eq!(
+            worktree
+                .read(cx)
+                .snapshot()
+                .entries(true)
+                .map(|entry| (entry.path.as_ref(), entry.is_ignored))
+                .collect::<Vec<_>>(),
+            &[
+                (Path::new(""), false),
+                (Path::new(".gitignore"), false),
+                (Path::new("src"), false),
+                (Path::new("src/a.rs"), false),
+                (Path::new("src/b.rs"), false),
+                (Path::new("target"), true),
+            ]
+        );
+    });
+
     // Keep track of the FS events reported to the language server.
     let fake_server = fake_servers.next().await.unwrap();
     let file_changes = Arc::new(Mutex::new(Vec::new()));
@@ -565,12 +606,20 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon
                 method: "workspace/didChangeWatchedFiles".to_string(),
                 register_options: serde_json::to_value(
                     lsp::DidChangeWatchedFilesRegistrationOptions {
-                        watchers: vec![lsp::FileSystemWatcher {
-                            glob_pattern: lsp::GlobPattern::String(
-                                "/the-root/*.{rs,c}".to_string(),
-                            ),
-                            kind: None,
-                        }],
+                        watchers: vec![
+                            lsp::FileSystemWatcher {
+                                glob_pattern: lsp::GlobPattern::String(
+                                    "/the-root/src/*.{rs,c}".to_string(),
+                                ),
+                                kind: None,
+                            },
+                            lsp::FileSystemWatcher {
+                                glob_pattern: lsp::GlobPattern::String(
+                                    "/the-root/target/y/**/*.rs".to_string(),
+                                ),
+                                kind: None,
+                            },
+                        ],
                     },
                 )
                 .ok(),
@@ -588,17 +637,50 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon
     });
 
     cx.foreground().run_until_parked();
-    assert_eq!(file_changes.lock().len(), 0);
+    assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
+
+    // Now the language server has asked us to watch an ignored directory path,
+    // so we recursively load it.
+    project.read_with(cx, |project, cx| {
+        let worktree = project.worktrees(cx).next().unwrap();
+        assert_eq!(
+            worktree
+                .read(cx)
+                .snapshot()
+                .entries(true)
+                .map(|entry| (entry.path.as_ref(), entry.is_ignored))
+                .collect::<Vec<_>>(),
+            &[
+                (Path::new(""), false),
+                (Path::new(".gitignore"), false),
+                (Path::new("src"), false),
+                (Path::new("src/a.rs"), false),
+                (Path::new("src/b.rs"), false),
+                (Path::new("target"), true),
+                (Path::new("target/x"), true),
+                (Path::new("target/y"), true),
+                (Path::new("target/y/out"), true),
+                (Path::new("target/y/out/y.rs"), true),
+                (Path::new("target/z"), true),
+            ]
+        );
+    });
 
     // Perform some file system mutations, two of which match the watched patterns,
     // and one of which does not.
-    fs.create_file("/the-root/c.rs".as_ref(), Default::default())
+    fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
+        .await
+        .unwrap();
+    fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
+        .await
+        .unwrap();
+    fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
         .await
         .unwrap();
-    fs.create_file("/the-root/d.txt".as_ref(), Default::default())
+    fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
         .await
         .unwrap();
-    fs.remove_file("/the-root/b.rs".as_ref(), Default::default())
+    fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
         .await
         .unwrap();
 
@@ -608,11 +690,15 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon
         &*file_changes.lock(),
         &[
             lsp::FileEvent {
-                uri: lsp::Url::from_file_path("/the-root/b.rs").unwrap(),
+                uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
                 typ: lsp::FileChangeType::DELETED,
             },
             lsp::FileEvent {
-                uri: lsp::Url::from_file_path("/the-root/c.rs").unwrap(),
+                uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
+                typ: lsp::FileChangeType::CREATED,
+            },
+            lsp::FileEvent {
+                uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
                 typ: lsp::FileChangeType::CREATED,
             },
         ]
@@ -3846,6 +3932,14 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
     );
 }
 
+#[test]
+fn test_glob_literal_prefix() {
+    assert_eq!(glob_literal_prefix("**/*.js"), "");
+    assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
+    assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
+    assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
+}
+
 async fn search(
     project: &ModelHandle<Project>,
     query: SearchQuery,

crates/project/src/worktree.rs 🔗

@@ -68,6 +68,7 @@ pub enum Worktree {
 pub struct LocalWorktree {
     snapshot: LocalSnapshot,
     scan_requests_tx: channel::Sender<ScanRequest>,
+    path_prefixes_to_scan_tx: channel::Sender<Arc<Path>>,
     is_scanning: (watch::Sender<bool>, watch::Receiver<bool>),
     _background_scanner_task: Task<()>,
     share: Option<ShareState>,
@@ -219,8 +220,9 @@ pub struct LocalSnapshot {
 
 struct BackgroundScannerState {
     snapshot: LocalSnapshot,
-    expanded_dirs: HashSet<ProjectEntryId>,
-    expanded_paths: HashSet<Arc<Path>>,
+    scanned_dirs: HashSet<ProjectEntryId>,
+    path_prefixes_to_scan: HashSet<Arc<Path>>,
+    paths_to_scan: HashSet<Arc<Path>>,
     /// The ids of all of the entries that were removed from the snapshot
     /// as part of the current update. These entry ids may be re-used
     /// if the same inode is discovered at a new path, or if the given
@@ -331,6 +333,7 @@ impl Worktree {
             }
 
             let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
+            let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded();
             let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
 
             cx.spawn_weak(|this, mut cx| async move {
@@ -371,6 +374,7 @@ impl Worktree {
                         scan_states_tx,
                         background,
                         scan_requests_rx,
+                        path_prefixes_to_scan_rx,
                     )
                     .run(events)
                     .await;
@@ -382,6 +386,7 @@ impl Worktree {
                 is_scanning: watch::channel_with(true),
                 share: None,
                 scan_requests_tx,
+                path_prefixes_to_scan_tx,
                 _background_scanner_task: background_scanner_task,
                 diagnostics: Default::default(),
                 diagnostic_summaries: Default::default(),
@@ -1147,6 +1152,10 @@ impl LocalWorktree {
         rx
     }
 
+    pub fn add_path_prefix_to_scan(&self, path_prefix: Arc<Path>) {
+        self.path_prefixes_to_scan_tx.try_send(path_prefix).ok();
+    }
+
     fn refresh_entry(
         &self,
         path: Arc<Path>,
@@ -1566,7 +1575,7 @@ impl Snapshot {
     }
 
     pub fn visible_file_count(&self) -> usize {
-        self.entries_by_path.summary().visible_file_count
+        self.entries_by_path.summary().non_ignored_file_count
     }
 
     fn traverse_from_offset(
@@ -2067,7 +2076,7 @@ impl LocalSnapshot {
         for entry in self.entries_by_path.cursor::<()>() {
             if entry.is_file() {
                 assert_eq!(files.next().unwrap().inode, entry.inode);
-                if !entry.is_ignored {
+                if !entry.is_ignored && !entry.is_external {
                     assert_eq!(visible_files.next().unwrap().inode, entry.inode);
                 }
             }
@@ -2129,12 +2138,17 @@ impl LocalSnapshot {
 }
 
 impl BackgroundScannerState {
-    fn is_path_expanded(&self, path: &Path) -> bool {
-        self.expanded_paths.iter().any(|p| p.starts_with(path))
+    fn should_scan_directory(&self, entry: &Entry) -> bool {
+        (!entry.is_external && !entry.is_ignored)
+            || self.scanned_dirs.contains(&entry.id) // If we've ever scanned it, keep scanning
             || self
-                .snapshot
-                .entry_for_path(&path)
-                .map_or(false, |entry| self.expanded_dirs.contains(&entry.id))
+                .paths_to_scan
+                .iter()
+                .any(|p| p.starts_with(&entry.path))
+            || self
+                .path_prefixes_to_scan
+                .iter()
+                .any(|p| entry.path.starts_with(p))
     }
 
     fn reuse_entry_id(&mut self, entry: &mut Entry) {
@@ -2192,17 +2206,16 @@ impl BackgroundScannerState {
                 .insert(abs_parent_path, (ignore, false));
         }
 
-        self.expanded_dirs.insert(parent_entry.id);
+        self.scanned_dirs.insert(parent_entry.id);
         let mut entries_by_path_edits = vec![Edit::Insert(parent_entry)];
         let mut entries_by_id_edits = Vec::new();
         let mut dotgit_path = None;
 
-        for mut entry in entries {
+        for entry in entries {
             if entry.path.file_name() == Some(&DOT_GIT) {
                 dotgit_path = Some(entry.path.clone());
             }
 
-            self.reuse_entry_id(&mut entry);
             entries_by_id_edits.push(Edit::Insert(PathEntry {
                 id: entry.id,
                 path: entry.path.clone(),
@@ -2677,7 +2690,20 @@ pub struct Entry {
     pub inode: u64,
     pub mtime: SystemTime,
     pub is_symlink: bool,
+
+    /// Whether this entry is ignored by Git.
+    ///
+    /// We only scan ignored entries once the directory is expanded and
+    /// exclude them from searches.
     pub is_ignored: bool,
+
+    /// Whether this entry's canonical path is outside of the worktree.
+    /// This means the entry is only accessible from the worktree root via a
+    /// symlink.
+    ///
+    /// We only scan entries outside of the worktree once the symlinked
+    /// directory is expanded. External entries are treated like gitignored
+    /// entries in that they are not included in searches.
     pub is_external: bool,
     pub git_status: Option<GitFileStatus>,
 }
@@ -2772,15 +2798,19 @@ impl sum_tree::Item for Entry {
     type Summary = EntrySummary;
 
     fn summary(&self) -> Self::Summary {
-        let visible_count = if self.is_ignored { 0 } else { 1 };
+        let non_ignored_count = if self.is_ignored || self.is_external {
+            0
+        } else {
+            1
+        };
         let file_count;
-        let visible_file_count;
+        let non_ignored_file_count;
         if self.is_file() {
             file_count = 1;
-            visible_file_count = visible_count;
+            non_ignored_file_count = non_ignored_count;
         } else {
             file_count = 0;
-            visible_file_count = 0;
+            non_ignored_file_count = 0;
         }
 
         let mut statuses = GitStatuses::default();
@@ -2796,9 +2826,9 @@ impl sum_tree::Item for Entry {
         EntrySummary {
             max_path: self.path.clone(),
             count: 1,
-            visible_count,
+            non_ignored_count,
             file_count,
-            visible_file_count,
+            non_ignored_file_count,
             statuses,
         }
     }
@@ -2816,9 +2846,9 @@ impl sum_tree::KeyedItem for Entry {
 pub struct EntrySummary {
     max_path: Arc<Path>,
     count: usize,
-    visible_count: usize,
+    non_ignored_count: usize,
     file_count: usize,
-    visible_file_count: usize,
+    non_ignored_file_count: usize,
     statuses: GitStatuses,
 }
 
@@ -2827,9 +2857,9 @@ impl Default for EntrySummary {
         Self {
             max_path: Arc::from(Path::new("")),
             count: 0,
-            visible_count: 0,
+            non_ignored_count: 0,
             file_count: 0,
-            visible_file_count: 0,
+            non_ignored_file_count: 0,
             statuses: Default::default(),
         }
     }
@@ -2841,9 +2871,9 @@ impl sum_tree::Summary for EntrySummary {
     fn add_summary(&mut self, rhs: &Self, _: &()) {
         self.max_path = rhs.max_path.clone();
         self.count += rhs.count;
-        self.visible_count += rhs.visible_count;
+        self.non_ignored_count += rhs.non_ignored_count;
         self.file_count += rhs.file_count;
-        self.visible_file_count += rhs.visible_file_count;
+        self.non_ignored_file_count += rhs.non_ignored_file_count;
         self.statuses += rhs.statuses;
     }
 }
@@ -2912,6 +2942,7 @@ struct BackgroundScanner {
     status_updates_tx: UnboundedSender<ScanState>,
     executor: Arc<executor::Background>,
     scan_requests_rx: channel::Receiver<ScanRequest>,
+    path_prefixes_to_scan_rx: channel::Receiver<Arc<Path>>,
     next_entry_id: Arc<AtomicUsize>,
     phase: BackgroundScannerPhase,
 }
@@ -2931,20 +2962,23 @@ impl BackgroundScanner {
         status_updates_tx: UnboundedSender<ScanState>,
         executor: Arc<executor::Background>,
         scan_requests_rx: channel::Receiver<ScanRequest>,
+        path_prefixes_to_scan_rx: channel::Receiver<Arc<Path>>,
     ) -> Self {
         Self {
             fs,
             status_updates_tx,
             executor,
             scan_requests_rx,
+            path_prefixes_to_scan_rx,
             next_entry_id,
             state: Mutex::new(BackgroundScannerState {
                 prev_snapshot: snapshot.snapshot.clone(),
                 snapshot,
-                expanded_dirs: Default::default(),
+                scanned_dirs: Default::default(),
+                path_prefixes_to_scan: Default::default(),
+                paths_to_scan: Default::default(),
                 removed_entry_ids: Default::default(),
                 changed_paths: Default::default(),
-                expanded_paths: Default::default(),
             }),
             phase: BackgroundScannerPhase::InitialScan,
         }
@@ -2952,7 +2986,7 @@ impl BackgroundScanner {
 
     async fn run(
         &mut self,
-        mut events_rx: Pin<Box<dyn Send + Stream<Item = Vec<fsevent::Event>>>>,
+        mut fs_events_rx: Pin<Box<dyn Send + Stream<Item = Vec<fsevent::Event>>>>,
     ) {
         use futures::FutureExt as _;
 
@@ -3014,9 +3048,9 @@ impl BackgroundScanner {
         // For these events, update events cannot be as precise, because we didn't
         // have the previous state loaded yet.
         self.phase = BackgroundScannerPhase::EventsReceivedDuringInitialScan;
-        if let Poll::Ready(Some(events)) = futures::poll!(events_rx.next()) {
+        if let Poll::Ready(Some(events)) = futures::poll!(fs_events_rx.next()) {
             let mut paths = events.into_iter().map(|e| e.path).collect::<Vec<_>>();
-            while let Poll::Ready(Some(more_events)) = futures::poll!(events_rx.next()) {
+            while let Poll::Ready(Some(more_events)) = futures::poll!(fs_events_rx.next()) {
                 paths.extend(more_events.into_iter().map(|e| e.path));
             }
             self.process_events(paths).await;
@@ -3035,10 +3069,26 @@ impl BackgroundScanner {
                     }
                 }
 
-                events = events_rx.next().fuse() => {
+                path_prefix = self.path_prefixes_to_scan_rx.recv().fuse() => {
+                    let Ok(path_prefix) = path_prefix else { break };
+
+                    self.forcibly_load_paths(&[path_prefix.clone()]).await;
+
+                    let abs_path =
+                    {
+                        let mut state = self.state.lock();
+                        state.path_prefixes_to_scan.insert(path_prefix.clone());
+                        state.snapshot.abs_path.join(path_prefix)
+                    };
+                    if let Some(abs_path) = self.fs.canonicalize(&abs_path).await.log_err() {
+                        self.process_events(vec![abs_path]).await;
+                    }
+                }
+
+                events = fs_events_rx.next().fuse() => {
                     let Some(events) = events else { break };
                     let mut paths = events.into_iter().map(|e| e.path).collect::<Vec<_>>();
-                    while let Poll::Ready(Some(more_events)) = futures::poll!(events_rx.next()) {
+                    while let Poll::Ready(Some(more_events)) = futures::poll!(fs_events_rx.next()) {
                         paths.extend(more_events.into_iter().map(|e| e.path));
                     }
                     self.process_events(paths.clone()).await;
@@ -3050,7 +3100,7 @@ impl BackgroundScanner {
     async fn process_scan_request(&self, request: ScanRequest, scanning: bool) -> bool {
         log::debug!("rescanning paths {:?}", request.relative_paths);
 
-        let root_path = self.expand_paths(&request.relative_paths).await;
+        let root_path = self.forcibly_load_paths(&request.relative_paths).await;
         let root_canonical_path = match self.fs.canonicalize(&root_path).await {
             Ok(path) => path,
             Err(err) => {
@@ -3108,14 +3158,14 @@ impl BackgroundScanner {
             state.reload_repositories(&paths, self.fs.as_ref());
             state.snapshot.completed_scan_id = state.snapshot.scan_id;
             for (_, entry_id) in mem::take(&mut state.removed_entry_ids) {
-                state.expanded_dirs.remove(&entry_id);
+                state.scanned_dirs.remove(&entry_id);
             }
         }
 
         self.send_status_update(false, None);
     }
 
-    async fn expand_paths(&self, paths: &[Arc<Path>]) -> Arc<Path> {
+    async fn forcibly_load_paths(&self, paths: &[Arc<Path>]) -> Arc<Path> {
         let root_path;
         let (scan_job_tx, mut scan_job_rx) = channel::unbounded();
         {
@@ -3140,7 +3190,7 @@ impl BackgroundScanner {
                                     is_external: entry.is_external,
                                 })
                                 .unwrap();
-                            state.expanded_paths.insert(path.clone());
+                            state.paths_to_scan.insert(path.clone());
                             break;
                         }
                     }
@@ -3151,7 +3201,7 @@ impl BackgroundScanner {
         while let Some(job) = scan_job_rx.next().await {
             self.scan_dir(&job).await.log_err();
         }
-        self.state.lock().expanded_paths.clear();
+        self.state.lock().paths_to_scan.clear();
         root_path
     }
 
@@ -3414,18 +3464,19 @@ impl BackgroundScanner {
         let mut state = self.state.lock();
         let mut new_jobs = new_jobs.into_iter();
         for entry in &mut new_entries {
+            state.reuse_entry_id(entry);
+
             if entry.is_dir() {
                 let new_job = new_jobs.next().expect("missing scan job for entry");
-                if (entry.is_external || entry.is_ignored)
-                    && entry.kind == EntryKind::PendingDir
-                    && !state.is_path_expanded(&entry.path)
-                {
+                if state.should_scan_directory(&entry) {
+                    if let Some(new_job) = new_job {
+                        job.scan_queue
+                            .try_send(new_job)
+                            .expect("channel is unbounded");
+                    }
+                } else {
                     log::debug!("defer scanning directory {:?} {:?}", entry.path, entry.kind);
                     entry.kind = EntryKind::UnloadedDir;
-                } else if let Some(new_job) = new_job {
-                    job.scan_queue
-                        .try_send(new_job)
-                        .expect("channel is unbounded");
                 }
             }
         }
@@ -3865,14 +3916,14 @@ impl BackgroundScanner {
                         old_paths.next(&());
                     }
                     (None, Some(new_entry)) => {
+                        let is_newly_loaded = self.phase == InitialScan
+                            || last_newly_loaded_dir_path
+                                .as_ref()
+                                .map_or(false, |dir| new_entry.path.starts_with(&dir));
                         changes.push((
                             new_entry.path.clone(),
                             new_entry.id,
-                            if self.phase == InitialScan {
-                                Loaded
-                            } else {
-                                Added
-                            },
+                            if is_newly_loaded { Loaded } else { Added },
                         ));
                         new_paths.next(&());
                     }
@@ -3975,9 +4026,9 @@ impl WorktreeHandle for ModelHandle<Worktree> {
 struct TraversalProgress<'a> {
     max_path: &'a Path,
     count: usize,
-    visible_count: usize,
+    non_ignored_count: usize,
     file_count: usize,
-    visible_file_count: usize,
+    non_ignored_file_count: usize,
 }
 
 impl<'a> TraversalProgress<'a> {
@@ -3985,8 +4036,8 @@ impl<'a> TraversalProgress<'a> {
         match (include_ignored, include_dirs) {
             (true, true) => self.count,
             (true, false) => self.file_count,
-            (false, true) => self.visible_count,
-            (false, false) => self.visible_file_count,
+            (false, true) => self.non_ignored_count,
+            (false, false) => self.non_ignored_file_count,
         }
     }
 }
@@ -3995,9 +4046,9 @@ impl<'a> sum_tree::Dimension<'a, EntrySummary> for TraversalProgress<'a> {
     fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) {
         self.max_path = summary.max_path.as_ref();
         self.count += summary.count;
-        self.visible_count += summary.visible_count;
+        self.non_ignored_count += summary.non_ignored_count;
         self.file_count += summary.file_count;
-        self.visible_file_count += summary.visible_file_count;
+        self.non_ignored_file_count += summary.non_ignored_file_count;
     }
 }
 
@@ -4006,9 +4057,9 @@ impl<'a> Default for TraversalProgress<'a> {
         Self {
             max_path: Path::new(""),
             count: 0,
-            visible_count: 0,
+            non_ignored_count: 0,
             file_count: 0,
-            visible_file_count: 0,
+            non_ignored_file_count: 0,
         }
     }
 }

crates/project/src/worktree_tests.rs 🔗

@@ -332,7 +332,7 @@ async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
     // The symlinked directories are not scanned by default.
     tree.read_with(cx, |tree, _| {
         assert_eq!(
-            tree.entries(false)
+            tree.entries(true)
                 .map(|entry| (entry.path.as_ref(), entry.is_external))
                 .collect::<Vec<_>>(),
             vec![
@@ -365,7 +365,7 @@ async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
     // not scanned yet.
     tree.read_with(cx, |tree, _| {
         assert_eq!(
-            tree.entries(false)
+            tree.entries(true)
                 .map(|entry| (entry.path.as_ref(), entry.is_external))
                 .collect::<Vec<_>>(),
             vec![
@@ -402,7 +402,7 @@ async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
     // The expanded subdirectory's contents are loaded.
     tree.read_with(cx, |tree, _| {
         assert_eq!(
-            tree.entries(false)
+            tree.entries(true)
                 .map(|entry| (entry.path.as_ref(), entry.is_external))
                 .collect::<Vec<_>>(),
             vec![