crates/project/src/project.rs 🔗
@@ -557,6 +557,7 @@ enum SearchMatchCandidate {
},
Path {
worktree_id: WorktreeId,
+ is_ignored: bool,
path: Arc<Path>,
},
}
@@ -5743,8 +5744,76 @@ impl Project {
.log_err();
}
+ // TODO kb parallelize directory traversal
background
.scoped(|scope| {
+ for worker_ix in 0..workers {
+ let worker_start_ix = worker_ix * paths_per_worker;
+ let worker_end_ix = worker_start_ix + paths_per_worker;
+ let unnamed_buffers = opened_buffers.clone();
+ scope.spawn(async move {
+ let mut snapshot_start_ix = 0;
+ let mut abs_path = PathBuf::new();
+ for snapshot in snapshots {
+ let snapshot_end_ix = snapshot_start_ix
+ + if query.include_ignored() {
+ snapshot.file_count()
+ } else {
+ snapshot.visible_file_count()
+ };
+ if worker_end_ix <= snapshot_start_ix {
+ break;
+ } else if worker_start_ix > snapshot_end_ix {
+ snapshot_start_ix = snapshot_end_ix;
+ continue;
+ } else {
+ let start_in_snapshot =
+ worker_start_ix.saturating_sub(snapshot_start_ix);
+ let end_in_snapshot =
+ cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
+
+ for entry in snapshot
+ .files(query.include_ignored(), start_in_snapshot)
+ .take(end_in_snapshot - start_in_snapshot)
+ {
+ if matching_paths_tx.is_closed() {
+ break;
+ }
+ if unnamed_buffers.contains_key(&entry.path) {
+ continue;
+ }
+ let matches = if query.file_matches(Some(&entry.path)) {
+ abs_path.clear();
+ abs_path.push(&snapshot.abs_path());
+ abs_path.push(&entry.path);
+ if let Some(file) = fs.open_sync(&abs_path).await.log_err()
+ {
+ query.detect(file).unwrap_or(false)
+ } else {
+ false
+ }
+ } else {
+ false
+ };
+
+ if matches {
+ let project_path = SearchMatchCandidate::Path {
+ worktree_id: snapshot.id(),
+ path: entry.path.clone(),
+ is_ignored: entry.is_ignored,
+ };
+ if matching_paths_tx.send(project_path).await.is_err() {
+ break;
+ }
+ }
+ }
+
+ snapshot_start_ix = snapshot_end_ix;
+ }
+ }
+ });
+ }
+
if query.include_ignored() {
scope.spawn(async move {
for snapshot in snapshots {
@@ -5784,8 +5853,7 @@ impl Project {
{
while let Some(subfile) = subfiles.next().await {
if let Some(subfile) = subfile.log_err() {
- ignored_paths_to_process
- .push_front(subfile);
+ ignored_paths_to_process.push_back(subfile);
}
}
}
@@ -5807,7 +5875,14 @@ impl Project {
if matches {
let project_path = SearchMatchCandidate::Path {
worktree_id: snapshot.id(),
- path: ignored_entry.path.clone(),
+ path: Arc::from(
+ ignored_abs_path
+ .strip_prefix(snapshot.abs_path())
+ .expect(
+ "scanning worktree-related files",
+ ),
+ ),
+ is_ignored: true,
};
if matching_paths_tx
.send(project_path)
@@ -5824,72 +5899,6 @@ impl Project {
}
});
}
-
- for worker_ix in 0..workers {
- let worker_start_ix = worker_ix * paths_per_worker;
- let worker_end_ix = worker_start_ix + paths_per_worker;
- let unnamed_buffers = opened_buffers.clone();
- scope.spawn(async move {
- let mut snapshot_start_ix = 0;
- let mut abs_path = PathBuf::new();
- for snapshot in snapshots {
- let snapshot_end_ix = snapshot_start_ix
- + if query.include_ignored() {
- snapshot.file_count()
- } else {
- snapshot.visible_file_count()
- };
- if worker_end_ix <= snapshot_start_ix {
- break;
- } else if worker_start_ix > snapshot_end_ix {
- snapshot_start_ix = snapshot_end_ix;
- continue;
- } else {
- let start_in_snapshot =
- worker_start_ix.saturating_sub(snapshot_start_ix);
- let end_in_snapshot =
- cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
-
- for entry in snapshot
- .files(query.include_ignored(), start_in_snapshot)
- .take(end_in_snapshot - start_in_snapshot)
- {
- if matching_paths_tx.is_closed() {
- break;
- }
- if unnamed_buffers.contains_key(&entry.path) {
- continue;
- }
- let matches = if query.file_matches(Some(&entry.path)) {
- abs_path.clear();
- abs_path.push(&snapshot.abs_path());
- abs_path.push(&entry.path);
- if let Some(file) = fs.open_sync(&abs_path).await.log_err()
- {
- query.detect(file).unwrap_or(false)
- } else {
- false
- }
- } else {
- false
- };
-
- if matches {
- let project_path = SearchMatchCandidate::Path {
- worktree_id: snapshot.id(),
- path: entry.path.clone(),
- };
- if matching_paths_tx.send(project_path).await.is_err() {
- break;
- }
- }
- }
-
- snapshot_start_ix = snapshot_end_ix;
- }
- }
- });
- }
})
.await;
}
@@ -5998,11 +6007,24 @@ impl Project {
let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
let (sorted_buffers_tx, sorted_buffers_rx) = futures::channel::oneshot::channel();
cx.spawn(|this, cx| async move {
- let mut buffers = vec![];
+ let mut buffers = Vec::new();
+ let mut ignored_buffers = Vec::new();
while let Some(entry) = matching_paths_rx.next().await {
- buffers.push(entry);
+ if matches!(
+ entry,
+ SearchMatchCandidate::Path {
+ is_ignored: true,
+ ..
+ }
+ ) {
+ ignored_buffers.push(entry);
+ } else {
+ buffers.push(entry);
+ }
}
buffers.sort_by_key(|candidate| candidate.path());
+ ignored_buffers.sort_by_key(|candidate| candidate.path());
+ buffers.extend(ignored_buffers);
let matching_paths = buffers.clone();
let _ = sorted_buffers_tx.send(buffers);
for (index, candidate) in matching_paths.into_iter().enumerate() {
@@ -6014,7 +6036,9 @@ impl Project {
cx.spawn(|mut cx| async move {
let buffer = match candidate {
SearchMatchCandidate::OpenBuffer { buffer, .. } => Some(buffer),
- SearchMatchCandidate::Path { worktree_id, path } => this
+ SearchMatchCandidate::Path {
+ worktree_id, path, ..
+ } => this
.update(&mut cx, |this, cx| {
this.open_buffer((worktree_id, path), cx)
})