1use crate::{copy_recursive, ProjectEntryId, RemoveOptions};
2
3use super::{
4 fs::{self, Fs},
5 ignore::IgnoreStack,
6 DiagnosticSummary,
7};
8use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
9use anyhow::{anyhow, Context, Result};
10use client::{proto, Client};
11use clock::ReplicaId;
12use collections::{HashMap, VecDeque};
13use futures::{
14 channel::{
15 mpsc::{self, UnboundedSender},
16 oneshot,
17 },
18 Stream, StreamExt,
19};
20use fuzzy::CharBag;
21use git2::Repository;
22use gpui::{
23 executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext,
24 Task,
25};
26use language::{
27 proto::{deserialize_version, serialize_line_ending, serialize_version},
28 Buffer, DiagnosticEntry, LineEnding, PointUtf16, Rope,
29};
30use lazy_static::lazy_static;
31use parking_lot::Mutex;
32use postage::{
33 prelude::{Sink as _, Stream as _},
34 watch,
35};
36use settings::Settings;
37use smol::channel::{self, Sender};
38use std::{
39 any::Any,
40 cmp::{self, Ordering},
41 convert::TryFrom,
42 ffi::{OsStr, OsString},
43 fmt,
44 future::Future,
45 mem,
46 ops::{Deref, DerefMut},
47 os::unix::prelude::{OsStrExt, OsStringExt},
48 path::{Path, PathBuf},
49 sync::{atomic::AtomicUsize, Arc},
50 task::Poll,
51 time::{Duration, SystemTime},
52};
53use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
54use util::{ResultExt, TryFutureExt};
55
56lazy_static! {
57 static ref DOT_GIT: &'static OsStr = OsStr::new(".git");
58 static ref GITIGNORE: &'static OsStr = OsStr::new(".gitignore");
59}
60
61#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
62pub struct WorktreeId(usize);
63
64#[allow(clippy::large_enum_variant)]
65pub enum Worktree {
66 Local(LocalWorktree),
67 Remote(RemoteWorktree),
68}
69
70pub struct LocalWorktree {
71 snapshot: LocalSnapshot,
72 background_snapshot: Arc<Mutex<LocalSnapshot>>,
73 last_scan_state_rx: watch::Receiver<ScanState>,
74 _background_scanner_task: Option<Task<()>>,
75 poll_task: Option<Task<()>>,
76 share: Option<ShareState>,
77 diagnostics: HashMap<Arc<Path>, Vec<DiagnosticEntry<PointUtf16>>>,
78 diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
79 client: Arc<Client>,
80 fs: Arc<dyn Fs>,
81 visible: bool,
82}
83
84pub struct RemoteWorktree {
85 pub snapshot: Snapshot,
86 pub(crate) background_snapshot: Arc<Mutex<Snapshot>>,
87 project_id: u64,
88 client: Arc<Client>,
89 updates_tx: Option<UnboundedSender<proto::UpdateWorktree>>,
90 snapshot_subscriptions: VecDeque<(usize, oneshot::Sender<()>)>,
91 replica_id: ReplicaId,
92 diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
93 visible: bool,
94}
95
96#[derive(Clone)]
97pub struct Snapshot {
98 id: WorktreeId,
99 root_name: String,
100 root_char_bag: CharBag,
101 entries_by_path: SumTree<Entry>,
102 entries_by_id: SumTree<PathEntry>,
103 scan_id: usize,
104 is_complete: bool,
105}
106
107//
108
109// 'GitResolver'
110// File paths <-> Repository Paths -> git_repository_path() -> First .git in an ancestor in a path
111// Repository Paths <-> Repository Pointers -> git_repository_open()
112// fs.watch() ^
113//
114// Folder: where all the git magic happens
115// .git IT
116// OR it can be a file that points somewhere else
117
118// 1. Walk through the file tree, looking for .git files or folders
119// 2. When we discover them, open and save a libgit2 pointer to the repository
120// 2a. Use git_repository_path() to start a watch on the repository (if not already watched)
121//
122// File paths -> Git repository == Ancestor check (is there a .git in an ancestor folder)
123// Git repository -> Files == Descendent check (subtracting out any intersecting .git folders)
124
125#[derive(Clone)]
126pub struct LocalSnapshot {
127 abs_path: Arc<Path>,
128 ignores_by_parent_abs_path: HashMap<Arc<Path>, (Arc<Gitignore>, usize)>,
129 git_repositories: Vec<GitRepositoryState>,
130 removed_entry_ids: HashMap<u64, ProjectEntryId>,
131 next_entry_id: Arc<AtomicUsize>,
132 snapshot: Snapshot,
133 extension_counts: HashMap<OsString, usize>,
134}
135
136#[derive(Clone)]
137pub struct GitRepositoryState {
138 content_path: Arc<Path>,
139 git_dir_path: Arc<Path>,
140 scan_id: usize,
141 repository: Arc<Mutex<git2::Repository>>,
142}
143
144impl Deref for LocalSnapshot {
145 type Target = Snapshot;
146
147 fn deref(&self) -> &Self::Target {
148 &self.snapshot
149 }
150}
151
152impl DerefMut for LocalSnapshot {
153 fn deref_mut(&mut self) -> &mut Self::Target {
154 &mut self.snapshot
155 }
156}
157
158#[derive(Clone, Debug)]
159enum ScanState {
160 Idle,
161 /// The worktree is performing its initial scan of the filesystem.
162 Initializing,
163 /// The worktree is updating in response to filesystem events.
164 Updating,
165 Err(Arc<anyhow::Error>),
166}
167
168struct ShareState {
169 project_id: u64,
170 snapshots_tx: watch::Sender<LocalSnapshot>,
171 _maintain_remote_snapshot: Option<Task<Option<()>>>,
172}
173
174pub enum Event {
175 UpdatedEntries,
176 UpdatedGitRepositories(Vec<GitRepositoryState>),
177}
178
179impl Entity for Worktree {
180 type Event = Event;
181}
182
183impl Worktree {
184 pub async fn local(
185 client: Arc<Client>,
186 path: impl Into<Arc<Path>>,
187 visible: bool,
188 fs: Arc<dyn Fs>,
189 next_entry_id: Arc<AtomicUsize>,
190 cx: &mut AsyncAppContext,
191 ) -> Result<ModelHandle<Self>> {
192 let (tree, scan_states_tx) =
193 LocalWorktree::create(client, path, visible, fs.clone(), next_entry_id, cx).await?;
194 tree.update(cx, |tree, cx| {
195 let tree = tree.as_local_mut().unwrap();
196 let abs_path = tree.abs_path().clone();
197 let background_snapshot = tree.background_snapshot.clone();
198 let background = cx.background().clone();
199 tree._background_scanner_task = Some(cx.background().spawn(async move {
200 let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
201 let scanner =
202 BackgroundScanner::new(background_snapshot, scan_states_tx, fs, background);
203 scanner.run(events).await;
204 }));
205 });
206 Ok(tree)
207 }
208
209 pub fn remote(
210 project_remote_id: u64,
211 replica_id: ReplicaId,
212 worktree: proto::WorktreeMetadata,
213 client: Arc<Client>,
214 cx: &mut MutableAppContext,
215 ) -> ModelHandle<Self> {
216 let remote_id = worktree.id;
217 let root_char_bag: CharBag = worktree
218 .root_name
219 .chars()
220 .map(|c| c.to_ascii_lowercase())
221 .collect();
222 let root_name = worktree.root_name.clone();
223 let visible = worktree.visible;
224 let snapshot = Snapshot {
225 id: WorktreeId(remote_id as usize),
226 root_name,
227 root_char_bag,
228 entries_by_path: Default::default(),
229 entries_by_id: Default::default(),
230 scan_id: 0,
231 is_complete: false,
232 };
233
234 let (updates_tx, mut updates_rx) = mpsc::unbounded();
235 let background_snapshot = Arc::new(Mutex::new(snapshot.clone()));
236 let (mut snapshot_updated_tx, mut snapshot_updated_rx) = watch::channel();
237 let worktree_handle = cx.add_model(|_: &mut ModelContext<Worktree>| {
238 Worktree::Remote(RemoteWorktree {
239 project_id: project_remote_id,
240 replica_id,
241 snapshot: snapshot.clone(),
242 background_snapshot: background_snapshot.clone(),
243 updates_tx: Some(updates_tx),
244 snapshot_subscriptions: Default::default(),
245 client: client.clone(),
246 diagnostic_summaries: Default::default(),
247 visible,
248 })
249 });
250
251 cx.background()
252 .spawn(async move {
253 while let Some(update) = updates_rx.next().await {
254 if let Err(error) = background_snapshot.lock().apply_remote_update(update) {
255 log::error!("error applying worktree update: {}", error);
256 }
257 snapshot_updated_tx.send(()).await.ok();
258 }
259 })
260 .detach();
261
262 cx.spawn(|mut cx| {
263 let this = worktree_handle.downgrade();
264 async move {
265 while (snapshot_updated_rx.recv().await).is_some() {
266 if let Some(this) = this.upgrade(&cx) {
267 this.update(&mut cx, |this, cx| {
268 this.poll_snapshot(cx);
269 let this = this.as_remote_mut().unwrap();
270 while let Some((scan_id, _)) = this.snapshot_subscriptions.front() {
271 if this.observed_snapshot(*scan_id) {
272 let (_, tx) = this.snapshot_subscriptions.pop_front().unwrap();
273 let _ = tx.send(());
274 } else {
275 break;
276 }
277 }
278 });
279 } else {
280 break;
281 }
282 }
283 }
284 })
285 .detach();
286
287 worktree_handle
288 }
289
290 pub fn as_local(&self) -> Option<&LocalWorktree> {
291 if let Worktree::Local(worktree) = self {
292 Some(worktree)
293 } else {
294 None
295 }
296 }
297
298 pub fn as_remote(&self) -> Option<&RemoteWorktree> {
299 if let Worktree::Remote(worktree) = self {
300 Some(worktree)
301 } else {
302 None
303 }
304 }
305
306 pub fn as_local_mut(&mut self) -> Option<&mut LocalWorktree> {
307 if let Worktree::Local(worktree) = self {
308 Some(worktree)
309 } else {
310 None
311 }
312 }
313
314 pub fn as_remote_mut(&mut self) -> Option<&mut RemoteWorktree> {
315 if let Worktree::Remote(worktree) = self {
316 Some(worktree)
317 } else {
318 None
319 }
320 }
321
322 pub fn is_local(&self) -> bool {
323 matches!(self, Worktree::Local(_))
324 }
325
326 pub fn is_remote(&self) -> bool {
327 !self.is_local()
328 }
329
330 pub fn snapshot(&self) -> Snapshot {
331 match self {
332 Worktree::Local(worktree) => worktree.snapshot().snapshot,
333 Worktree::Remote(worktree) => worktree.snapshot(),
334 }
335 }
336
337 pub fn scan_id(&self) -> usize {
338 match self {
339 Worktree::Local(worktree) => worktree.snapshot.scan_id,
340 Worktree::Remote(worktree) => worktree.snapshot.scan_id,
341 }
342 }
343
344 pub fn is_visible(&self) -> bool {
345 match self {
346 Worktree::Local(worktree) => worktree.visible,
347 Worktree::Remote(worktree) => worktree.visible,
348 }
349 }
350
351 pub fn replica_id(&self) -> ReplicaId {
352 match self {
353 Worktree::Local(_) => 0,
354 Worktree::Remote(worktree) => worktree.replica_id,
355 }
356 }
357
358 pub fn diagnostic_summaries(
359 &self,
360 ) -> impl Iterator<Item = (Arc<Path>, DiagnosticSummary)> + '_ {
361 match self {
362 Worktree::Local(worktree) => &worktree.diagnostic_summaries,
363 Worktree::Remote(worktree) => &worktree.diagnostic_summaries,
364 }
365 .iter()
366 .map(|(path, summary)| (path.0.clone(), *summary))
367 }
368
369 fn poll_snapshot(&mut self, cx: &mut ModelContext<Self>) {
370 match self {
371 Self::Local(worktree) => worktree.poll_snapshot(false, cx),
372 Self::Remote(worktree) => worktree.poll_snapshot(cx),
373 };
374 }
375}
376
377impl LocalWorktree {
378 async fn create(
379 client: Arc<Client>,
380 path: impl Into<Arc<Path>>,
381 visible: bool,
382 fs: Arc<dyn Fs>,
383 next_entry_id: Arc<AtomicUsize>,
384 cx: &mut AsyncAppContext,
385 ) -> Result<(ModelHandle<Worktree>, UnboundedSender<ScanState>)> {
386 let abs_path = path.into();
387 let path: Arc<Path> = Arc::from(Path::new(""));
388
389 // After determining whether the root entry is a file or a directory, populate the
390 // snapshot's "root name", which will be used for the purpose of fuzzy matching.
391 let root_name = abs_path
392 .file_name()
393 .map_or(String::new(), |f| f.to_string_lossy().to_string());
394 let root_char_bag = root_name.chars().map(|c| c.to_ascii_lowercase()).collect();
395 let metadata = fs
396 .metadata(&abs_path)
397 .await
398 .context("failed to stat worktree path")?;
399
400 let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
401 let (mut last_scan_state_tx, last_scan_state_rx) =
402 watch::channel_with(ScanState::Initializing);
403 let tree = cx.add_model(move |cx: &mut ModelContext<Worktree>| {
404 let mut snapshot = LocalSnapshot {
405 abs_path,
406 ignores_by_parent_abs_path: Default::default(),
407 git_repositories: Default::default(),
408 removed_entry_ids: Default::default(),
409 next_entry_id,
410 snapshot: Snapshot {
411 id: WorktreeId::from_usize(cx.model_id()),
412 root_name: root_name.clone(),
413 root_char_bag,
414 entries_by_path: Default::default(),
415 entries_by_id: Default::default(),
416 scan_id: 0,
417 is_complete: true,
418 },
419 extension_counts: Default::default(),
420 };
421 if let Some(metadata) = metadata {
422 let entry = Entry::new(
423 path,
424 &metadata,
425 &snapshot.next_entry_id,
426 snapshot.root_char_bag,
427 );
428 snapshot.insert_entry(entry, fs.as_ref());
429 }
430
431 let tree = Self {
432 snapshot: snapshot.clone(),
433 background_snapshot: Arc::new(Mutex::new(snapshot)),
434 last_scan_state_rx,
435 _background_scanner_task: None,
436 share: None,
437 poll_task: None,
438 diagnostics: Default::default(),
439 diagnostic_summaries: Default::default(),
440 client,
441 fs,
442 visible,
443 };
444
445 cx.spawn_weak(|this, mut cx| async move {
446 while let Some(scan_state) = scan_states_rx.next().await {
447 if let Some(this) = this.upgrade(&cx) {
448 last_scan_state_tx.blocking_send(scan_state).ok();
449 this.update(&mut cx, |this, cx| this.poll_snapshot(cx));
450 } else {
451 break;
452 }
453 }
454 })
455 .detach();
456
457 Worktree::Local(tree)
458 });
459
460 Ok((tree, scan_states_tx))
461 }
462
463 pub fn contains_abs_path(&self, path: &Path) -> bool {
464 path.starts_with(&self.abs_path)
465 }
466
467 fn absolutize(&self, path: &Path) -> PathBuf {
468 if path.file_name().is_some() {
469 self.abs_path.join(path)
470 } else {
471 self.abs_path.to_path_buf()
472 }
473 }
474
475 pub(crate) fn load_buffer(
476 &mut self,
477 path: &Path,
478 cx: &mut ModelContext<Worktree>,
479 ) -> Task<Result<ModelHandle<Buffer>>> {
480 let path = Arc::from(path);
481 cx.spawn(move |this, mut cx| async move {
482 let (file, contents, head_text) = this
483 .update(&mut cx, |t, cx| t.as_local().unwrap().load(&path, cx))
484 .await?;
485 Ok(cx.add_model(|cx| {
486 let mut buffer = Buffer::from_file(0, contents, head_text, Arc::new(file), cx);
487 buffer.update_git(cx);
488 buffer
489 }))
490 })
491 }
492
493 pub fn diagnostics_for_path(&self, path: &Path) -> Option<Vec<DiagnosticEntry<PointUtf16>>> {
494 self.diagnostics.get(path).cloned()
495 }
496
497 pub fn update_diagnostics(
498 &mut self,
499 language_server_id: usize,
500 worktree_path: Arc<Path>,
501 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
502 _: &mut ModelContext<Worktree>,
503 ) -> Result<bool> {
504 self.diagnostics.remove(&worktree_path);
505 let old_summary = self
506 .diagnostic_summaries
507 .remove(&PathKey(worktree_path.clone()))
508 .unwrap_or_default();
509 let new_summary = DiagnosticSummary::new(language_server_id, &diagnostics);
510 if !new_summary.is_empty() {
511 self.diagnostic_summaries
512 .insert(PathKey(worktree_path.clone()), new_summary);
513 self.diagnostics.insert(worktree_path.clone(), diagnostics);
514 }
515
516 let updated = !old_summary.is_empty() || !new_summary.is_empty();
517 if updated {
518 if let Some(share) = self.share.as_ref() {
519 self.client
520 .send(proto::UpdateDiagnosticSummary {
521 project_id: share.project_id,
522 worktree_id: self.id().to_proto(),
523 summary: Some(proto::DiagnosticSummary {
524 path: worktree_path.to_string_lossy().to_string(),
525 language_server_id: language_server_id as u64,
526 error_count: new_summary.error_count as u32,
527 warning_count: new_summary.warning_count as u32,
528 }),
529 })
530 .log_err();
531 }
532 }
533
534 Ok(updated)
535 }
536
537 fn poll_snapshot(&mut self, force: bool, cx: &mut ModelContext<Worktree>) {
538 self.poll_task.take();
539
540 match self.scan_state() {
541 ScanState::Idle => {
542 self.snapshot = self.background_snapshot.lock().clone();
543 if let Some(share) = self.share.as_mut() {
544 *share.snapshots_tx.borrow_mut() = self.snapshot.clone();
545 }
546 cx.emit(Event::UpdatedEntries);
547 }
548
549 ScanState::Initializing => {
550 let is_fake_fs = self.fs.is_fake();
551 self.snapshot = self.background_snapshot.lock().clone();
552 self.poll_task = Some(cx.spawn_weak(|this, mut cx| async move {
553 if is_fake_fs {
554 #[cfg(any(test, feature = "test-support"))]
555 cx.background().simulate_random_delay().await;
556 } else {
557 smol::Timer::after(Duration::from_millis(100)).await;
558 }
559 if let Some(this) = this.upgrade(&cx) {
560 this.update(&mut cx, |this, cx| this.poll_snapshot(cx));
561 }
562 }));
563 cx.emit(Event::UpdatedEntries);
564 }
565
566 _ => {
567 if force {
568 self.snapshot = self.background_snapshot.lock().clone();
569 }
570 }
571 }
572
573 cx.notify();
574 }
575
576 pub fn scan_complete(&self) -> impl Future<Output = ()> {
577 let mut scan_state_rx = self.last_scan_state_rx.clone();
578 async move {
579 let mut scan_state = Some(scan_state_rx.borrow().clone());
580 while let Some(ScanState::Initializing | ScanState::Updating) = scan_state {
581 scan_state = scan_state_rx.recv().await;
582 }
583 }
584 }
585
586 fn scan_state(&self) -> ScanState {
587 self.last_scan_state_rx.borrow().clone()
588 }
589
590 pub fn snapshot(&self) -> LocalSnapshot {
591 self.snapshot.clone()
592 }
593
594 pub fn metadata_proto(&self) -> proto::WorktreeMetadata {
595 proto::WorktreeMetadata {
596 id: self.id().to_proto(),
597 root_name: self.root_name().to_string(),
598 visible: self.visible,
599 }
600 }
601
602 fn load(
603 &self,
604 path: &Path,
605 cx: &mut ModelContext<Worktree>,
606 ) -> Task<Result<(File, String, Option<String>)>> {
607 let handle = cx.handle();
608 let path = Arc::from(path);
609 let abs_path = self.absolutize(&path);
610 let fs = self.fs.clone();
611
612 let files_included = cx
613 .global::<Settings>()
614 .editor_overrides
615 .git_gutter
616 .unwrap_or_default()
617 .files_included;
618
619 cx.spawn(|this, mut cx| async move {
620 let text = fs.load(&abs_path).await?;
621
622 let head_text = if matches!(
623 files_included,
624 settings::GitFilesIncluded::All | settings::GitFilesIncluded::OnlyTracked
625 ) {
626 let fs = fs.clone();
627 let abs_path = abs_path.clone();
628 let task = async move { fs.load_head_text(&abs_path).await };
629 let results = cx.background().spawn(task).await;
630
631 if files_included == settings::GitFilesIncluded::All {
632 results.or_else(|| Some(text.clone()))
633 } else {
634 results
635 }
636 } else {
637 None
638 };
639
640 // Eagerly populate the snapshot with an updated entry for the loaded file
641 let entry = this
642 .update(&mut cx, |this, cx| {
643 this.as_local()
644 .unwrap()
645 .refresh_entry(path, abs_path, None, cx)
646 })
647 .await?;
648
649 Ok((
650 File {
651 entry_id: Some(entry.id),
652 worktree: handle,
653 path: entry.path,
654 mtime: entry.mtime,
655 is_local: true,
656 },
657 text,
658 head_text,
659 ))
660 })
661 }
662
663 pub fn save_buffer_as(
664 &self,
665 buffer_handle: ModelHandle<Buffer>,
666 path: impl Into<Arc<Path>>,
667 cx: &mut ModelContext<Worktree>,
668 ) -> Task<Result<()>> {
669 let buffer = buffer_handle.read(cx);
670 let text = buffer.as_rope().clone();
671 let fingerprint = text.fingerprint();
672 let version = buffer.version();
673 let save = self.write_file(path, text, buffer.line_ending(), cx);
674 let handle = cx.handle();
675 cx.as_mut().spawn(|mut cx| async move {
676 let entry = save.await?;
677 let file = File {
678 entry_id: Some(entry.id),
679 worktree: handle,
680 path: entry.path,
681 mtime: entry.mtime,
682 is_local: true,
683 };
684
685 buffer_handle.update(&mut cx, |buffer, cx| {
686 buffer.did_save(version, fingerprint, file.mtime, Some(Arc::new(file)), cx);
687 });
688
689 Ok(())
690 })
691 }
692
693 pub fn create_entry(
694 &self,
695 path: impl Into<Arc<Path>>,
696 is_dir: bool,
697 cx: &mut ModelContext<Worktree>,
698 ) -> Task<Result<Entry>> {
699 self.write_entry_internal(
700 path,
701 if is_dir {
702 None
703 } else {
704 Some(Default::default())
705 },
706 cx,
707 )
708 }
709
710 pub fn write_file(
711 &self,
712 path: impl Into<Arc<Path>>,
713 text: Rope,
714 line_ending: LineEnding,
715 cx: &mut ModelContext<Worktree>,
716 ) -> Task<Result<Entry>> {
717 self.write_entry_internal(path, Some((text, line_ending)), cx)
718 }
719
720 pub fn delete_entry(
721 &self,
722 entry_id: ProjectEntryId,
723 cx: &mut ModelContext<Worktree>,
724 ) -> Option<Task<Result<()>>> {
725 let entry = self.entry_for_id(entry_id)?.clone();
726 let abs_path = self.absolutize(&entry.path);
727 let delete = cx.background().spawn({
728 let fs = self.fs.clone();
729 let abs_path = abs_path;
730 async move {
731 if entry.is_file() {
732 fs.remove_file(&abs_path, Default::default()).await
733 } else {
734 fs.remove_dir(
735 &abs_path,
736 RemoveOptions {
737 recursive: true,
738 ignore_if_not_exists: false,
739 },
740 )
741 .await
742 }
743 }
744 });
745
746 Some(cx.spawn(|this, mut cx| async move {
747 delete.await?;
748 this.update(&mut cx, |this, cx| {
749 let this = this.as_local_mut().unwrap();
750 {
751 let mut snapshot = this.background_snapshot.lock();
752 snapshot.delete_entry(entry_id);
753 }
754 this.poll_snapshot(true, cx);
755 });
756 Ok(())
757 }))
758 }
759
760 pub fn rename_entry(
761 &self,
762 entry_id: ProjectEntryId,
763 new_path: impl Into<Arc<Path>>,
764 cx: &mut ModelContext<Worktree>,
765 ) -> Option<Task<Result<Entry>>> {
766 let old_path = self.entry_for_id(entry_id)?.path.clone();
767 let new_path = new_path.into();
768 let abs_old_path = self.absolutize(&old_path);
769 let abs_new_path = self.absolutize(&new_path);
770 let rename = cx.background().spawn({
771 let fs = self.fs.clone();
772 let abs_new_path = abs_new_path.clone();
773 async move {
774 fs.rename(&abs_old_path, &abs_new_path, Default::default())
775 .await
776 }
777 });
778
779 Some(cx.spawn(|this, mut cx| async move {
780 rename.await?;
781 let entry = this
782 .update(&mut cx, |this, cx| {
783 this.as_local_mut().unwrap().refresh_entry(
784 new_path.clone(),
785 abs_new_path,
786 Some(old_path),
787 cx,
788 )
789 })
790 .await?;
791 Ok(entry)
792 }))
793 }
794
795 pub fn copy_entry(
796 &self,
797 entry_id: ProjectEntryId,
798 new_path: impl Into<Arc<Path>>,
799 cx: &mut ModelContext<Worktree>,
800 ) -> Option<Task<Result<Entry>>> {
801 let old_path = self.entry_for_id(entry_id)?.path.clone();
802 let new_path = new_path.into();
803 let abs_old_path = self.absolutize(&old_path);
804 let abs_new_path = self.absolutize(&new_path);
805 let copy = cx.background().spawn({
806 let fs = self.fs.clone();
807 let abs_new_path = abs_new_path.clone();
808 async move {
809 copy_recursive(
810 fs.as_ref(),
811 &abs_old_path,
812 &abs_new_path,
813 Default::default(),
814 )
815 .await
816 }
817 });
818
819 Some(cx.spawn(|this, mut cx| async move {
820 copy.await?;
821 let entry = this
822 .update(&mut cx, |this, cx| {
823 this.as_local_mut().unwrap().refresh_entry(
824 new_path.clone(),
825 abs_new_path,
826 None,
827 cx,
828 )
829 })
830 .await?;
831 Ok(entry)
832 }))
833 }
834
835 fn write_entry_internal(
836 &self,
837 path: impl Into<Arc<Path>>,
838 text_if_file: Option<(Rope, LineEnding)>,
839 cx: &mut ModelContext<Worktree>,
840 ) -> Task<Result<Entry>> {
841 let path = path.into();
842 let abs_path = self.absolutize(&path);
843 let write = cx.background().spawn({
844 let fs = self.fs.clone();
845 let abs_path = abs_path.clone();
846 async move {
847 if let Some((text, line_ending)) = text_if_file {
848 fs.save(&abs_path, &text, line_ending).await
849 } else {
850 fs.create_dir(&abs_path).await
851 }
852 }
853 });
854
855 cx.spawn(|this, mut cx| async move {
856 write.await?;
857 let entry = this
858 .update(&mut cx, |this, cx| {
859 this.as_local_mut()
860 .unwrap()
861 .refresh_entry(path, abs_path, None, cx)
862 })
863 .await?;
864 Ok(entry)
865 })
866 }
867
868 fn refresh_entry(
869 &self,
870 path: Arc<Path>,
871 abs_path: PathBuf,
872 old_path: Option<Arc<Path>>,
873 cx: &mut ModelContext<Worktree>,
874 ) -> Task<Result<Entry>> {
875 let fs = self.fs.clone();
876 let root_char_bag;
877 let next_entry_id;
878 {
879 let snapshot = self.background_snapshot.lock();
880 root_char_bag = snapshot.root_char_bag;
881 next_entry_id = snapshot.next_entry_id.clone();
882 }
883 cx.spawn_weak(|this, mut cx| async move {
884 let metadata = fs
885 .metadata(&abs_path)
886 .await?
887 .ok_or_else(|| anyhow!("could not read saved file metadata"))?;
888 let this = this
889 .upgrade(&cx)
890 .ok_or_else(|| anyhow!("worktree was dropped"))?;
891 this.update(&mut cx, |this, cx| {
892 let this = this.as_local_mut().unwrap();
893 let inserted_entry;
894 {
895 let mut snapshot = this.background_snapshot.lock();
896 let mut entry = Entry::new(path, &metadata, &next_entry_id, root_char_bag);
897 entry.is_ignored = snapshot
898 .ignore_stack_for_abs_path(&abs_path, entry.is_dir())
899 .is_abs_path_ignored(&abs_path, entry.is_dir());
900 if let Some(old_path) = old_path {
901 snapshot.remove_path(&old_path);
902 }
903 inserted_entry = snapshot.insert_entry(entry, fs.as_ref());
904 snapshot.scan_id += 1;
905 }
906 this.poll_snapshot(true, cx);
907 Ok(inserted_entry)
908 })
909 })
910 }
911
912 pub fn share(&mut self, project_id: u64, cx: &mut ModelContext<Worktree>) -> Task<Result<()>> {
913 let (share_tx, share_rx) = oneshot::channel();
914
915 if self.share.is_some() {
916 let _ = share_tx.send(Ok(()));
917 } else {
918 let (snapshots_tx, mut snapshots_rx) = watch::channel_with(self.snapshot());
919 let rpc = self.client.clone();
920 let worktree_id = cx.model_id() as u64;
921 let maintain_remote_snapshot = cx.background().spawn({
922 let rpc = rpc;
923 let diagnostic_summaries = self.diagnostic_summaries.clone();
924 async move {
925 let mut prev_snapshot = match snapshots_rx.recv().await {
926 Some(snapshot) => {
927 let update = proto::UpdateWorktree {
928 project_id,
929 worktree_id,
930 root_name: snapshot.root_name().to_string(),
931 updated_entries: snapshot
932 .entries_by_path
933 .iter()
934 .map(Into::into)
935 .collect(),
936 removed_entries: Default::default(),
937 scan_id: snapshot.scan_id as u64,
938 is_last_update: true,
939 };
940 if let Err(error) = send_worktree_update(&rpc, update).await {
941 let _ = share_tx.send(Err(error));
942 return Err(anyhow!("failed to send initial update worktree"));
943 } else {
944 let _ = share_tx.send(Ok(()));
945 snapshot
946 }
947 }
948 None => {
949 share_tx
950 .send(Err(anyhow!("worktree dropped before share completed")))
951 .ok();
952 return Err(anyhow!("failed to send initial update worktree"));
953 }
954 };
955
956 for (path, summary) in diagnostic_summaries.iter() {
957 rpc.send(proto::UpdateDiagnosticSummary {
958 project_id,
959 worktree_id,
960 summary: Some(summary.to_proto(&path.0)),
961 })?;
962 }
963
964 while let Some(snapshot) = snapshots_rx.recv().await {
965 send_worktree_update(
966 &rpc,
967 snapshot.build_update(&prev_snapshot, project_id, worktree_id, true),
968 )
969 .await?;
970 prev_snapshot = snapshot;
971 }
972
973 Ok::<_, anyhow::Error>(())
974 }
975 .log_err()
976 });
977 self.share = Some(ShareState {
978 project_id,
979 snapshots_tx,
980 _maintain_remote_snapshot: Some(maintain_remote_snapshot),
981 });
982 }
983
984 cx.foreground().spawn(async move {
985 share_rx
986 .await
987 .unwrap_or_else(|_| Err(anyhow!("share ended")))
988 })
989 }
990
991 pub fn unshare(&mut self) {
992 self.share.take();
993 }
994
995 pub fn is_shared(&self) -> bool {
996 self.share.is_some()
997 }
998
999 pub fn send_extension_counts(&self, project_id: u64) {
1000 let mut extensions = Vec::new();
1001 let mut counts = Vec::new();
1002
1003 for (extension, count) in self.extension_counts() {
1004 extensions.push(extension.to_string_lossy().to_string());
1005 counts.push(*count as u32);
1006 }
1007
1008 self.client
1009 .send(proto::UpdateWorktreeExtensions {
1010 project_id,
1011 worktree_id: self.id().to_proto(),
1012 extensions,
1013 counts,
1014 })
1015 .log_err();
1016 }
1017}
1018
1019impl RemoteWorktree {
1020 fn snapshot(&self) -> Snapshot {
1021 self.snapshot.clone()
1022 }
1023
1024 fn poll_snapshot(&mut self, cx: &mut ModelContext<Worktree>) {
1025 self.snapshot = self.background_snapshot.lock().clone();
1026 cx.emit(Event::UpdatedEntries);
1027 cx.notify();
1028 }
1029
1030 pub fn disconnected_from_host(&mut self) {
1031 self.updates_tx.take();
1032 self.snapshot_subscriptions.clear();
1033 }
1034
1035 pub fn update_from_remote(&mut self, update: proto::UpdateWorktree) {
1036 if let Some(updates_tx) = &self.updates_tx {
1037 updates_tx
1038 .unbounded_send(update)
1039 .expect("consumer runs to completion");
1040 }
1041 }
1042
1043 fn observed_snapshot(&self, scan_id: usize) -> bool {
1044 self.scan_id > scan_id || (self.scan_id == scan_id && self.is_complete)
1045 }
1046
1047 fn wait_for_snapshot(&mut self, scan_id: usize) -> impl Future<Output = ()> {
1048 let (tx, rx) = oneshot::channel();
1049 if self.observed_snapshot(scan_id) {
1050 let _ = tx.send(());
1051 } else {
1052 match self
1053 .snapshot_subscriptions
1054 .binary_search_by_key(&scan_id, |probe| probe.0)
1055 {
1056 Ok(ix) | Err(ix) => self.snapshot_subscriptions.insert(ix, (scan_id, tx)),
1057 }
1058 }
1059
1060 async move {
1061 let _ = rx.await;
1062 }
1063 }
1064
1065 pub fn update_diagnostic_summary(
1066 &mut self,
1067 path: Arc<Path>,
1068 summary: &proto::DiagnosticSummary,
1069 ) {
1070 let summary = DiagnosticSummary {
1071 language_server_id: summary.language_server_id as usize,
1072 error_count: summary.error_count as usize,
1073 warning_count: summary.warning_count as usize,
1074 };
1075 if summary.is_empty() {
1076 self.diagnostic_summaries.remove(&PathKey(path));
1077 } else {
1078 self.diagnostic_summaries.insert(PathKey(path), summary);
1079 }
1080 }
1081
1082 pub fn insert_entry(
1083 &mut self,
1084 entry: proto::Entry,
1085 scan_id: usize,
1086 cx: &mut ModelContext<Worktree>,
1087 ) -> Task<Result<Entry>> {
1088 let wait_for_snapshot = self.wait_for_snapshot(scan_id);
1089 cx.spawn(|this, mut cx| async move {
1090 wait_for_snapshot.await;
1091 this.update(&mut cx, |worktree, _| {
1092 let worktree = worktree.as_remote_mut().unwrap();
1093 let mut snapshot = worktree.background_snapshot.lock();
1094 let entry = snapshot.insert_entry(entry);
1095 worktree.snapshot = snapshot.clone();
1096 entry
1097 })
1098 })
1099 }
1100
1101 pub(crate) fn delete_entry(
1102 &mut self,
1103 id: ProjectEntryId,
1104 scan_id: usize,
1105 cx: &mut ModelContext<Worktree>,
1106 ) -> Task<Result<()>> {
1107 let wait_for_snapshot = self.wait_for_snapshot(scan_id);
1108 cx.spawn(|this, mut cx| async move {
1109 wait_for_snapshot.await;
1110 this.update(&mut cx, |worktree, _| {
1111 let worktree = worktree.as_remote_mut().unwrap();
1112 let mut snapshot = worktree.background_snapshot.lock();
1113 snapshot.delete_entry(id);
1114 worktree.snapshot = snapshot.clone();
1115 });
1116 Ok(())
1117 })
1118 }
1119}
1120
1121impl Snapshot {
1122 pub fn id(&self) -> WorktreeId {
1123 self.id
1124 }
1125
1126 pub fn contains_entry(&self, entry_id: ProjectEntryId) -> bool {
1127 self.entries_by_id.get(&entry_id, &()).is_some()
1128 }
1129
1130 pub(crate) fn insert_entry(&mut self, entry: proto::Entry) -> Result<Entry> {
1131 let entry = Entry::try_from((&self.root_char_bag, entry))?;
1132 let old_entry = self.entries_by_id.insert_or_replace(
1133 PathEntry {
1134 id: entry.id,
1135 path: entry.path.clone(),
1136 is_ignored: entry.is_ignored,
1137 scan_id: 0,
1138 },
1139 &(),
1140 );
1141 if let Some(old_entry) = old_entry {
1142 self.entries_by_path.remove(&PathKey(old_entry.path), &());
1143 }
1144 self.entries_by_path.insert_or_replace(entry.clone(), &());
1145 Ok(entry)
1146 }
1147
1148 fn delete_entry(&mut self, entry_id: ProjectEntryId) -> bool {
1149 if let Some(removed_entry) = self.entries_by_id.remove(&entry_id, &()) {
1150 self.entries_by_path = {
1151 let mut cursor = self.entries_by_path.cursor();
1152 let mut new_entries_by_path =
1153 cursor.slice(&TraversalTarget::Path(&removed_entry.path), Bias::Left, &());
1154 while let Some(entry) = cursor.item() {
1155 if entry.path.starts_with(&removed_entry.path) {
1156 self.entries_by_id.remove(&entry.id, &());
1157 cursor.next(&());
1158 } else {
1159 break;
1160 }
1161 }
1162 new_entries_by_path.push_tree(cursor.suffix(&()), &());
1163 new_entries_by_path
1164 };
1165
1166 true
1167 } else {
1168 false
1169 }
1170 }
1171
1172 pub(crate) fn apply_remote_update(&mut self, update: proto::UpdateWorktree) -> Result<()> {
1173 let mut entries_by_path_edits = Vec::new();
1174 let mut entries_by_id_edits = Vec::new();
1175 for entry_id in update.removed_entries {
1176 let entry = self
1177 .entry_for_id(ProjectEntryId::from_proto(entry_id))
1178 .ok_or_else(|| anyhow!("unknown entry {}", entry_id))?;
1179 entries_by_path_edits.push(Edit::Remove(PathKey(entry.path.clone())));
1180 entries_by_id_edits.push(Edit::Remove(entry.id));
1181 }
1182
1183 for entry in update.updated_entries {
1184 let entry = Entry::try_from((&self.root_char_bag, entry))?;
1185 if let Some(PathEntry { path, .. }) = self.entries_by_id.get(&entry.id, &()) {
1186 entries_by_path_edits.push(Edit::Remove(PathKey(path.clone())));
1187 }
1188 entries_by_id_edits.push(Edit::Insert(PathEntry {
1189 id: entry.id,
1190 path: entry.path.clone(),
1191 is_ignored: entry.is_ignored,
1192 scan_id: 0,
1193 }));
1194 entries_by_path_edits.push(Edit::Insert(entry));
1195 }
1196
1197 self.entries_by_path.edit(entries_by_path_edits, &());
1198 self.entries_by_id.edit(entries_by_id_edits, &());
1199 self.scan_id = update.scan_id as usize;
1200 self.is_complete = update.is_last_update;
1201
1202 Ok(())
1203 }
1204
1205 pub fn file_count(&self) -> usize {
1206 self.entries_by_path.summary().file_count
1207 }
1208
1209 pub fn visible_file_count(&self) -> usize {
1210 self.entries_by_path.summary().visible_file_count
1211 }
1212
1213 fn traverse_from_offset(
1214 &self,
1215 include_dirs: bool,
1216 include_ignored: bool,
1217 start_offset: usize,
1218 ) -> Traversal {
1219 let mut cursor = self.entries_by_path.cursor();
1220 cursor.seek(
1221 &TraversalTarget::Count {
1222 count: start_offset,
1223 include_dirs,
1224 include_ignored,
1225 },
1226 Bias::Right,
1227 &(),
1228 );
1229 Traversal {
1230 cursor,
1231 include_dirs,
1232 include_ignored,
1233 }
1234 }
1235
1236 fn traverse_from_path(
1237 &self,
1238 include_dirs: bool,
1239 include_ignored: bool,
1240 path: &Path,
1241 ) -> Traversal {
1242 let mut cursor = self.entries_by_path.cursor();
1243 cursor.seek(&TraversalTarget::Path(path), Bias::Left, &());
1244 Traversal {
1245 cursor,
1246 include_dirs,
1247 include_ignored,
1248 }
1249 }
1250
1251 pub fn files(&self, include_ignored: bool, start: usize) -> Traversal {
1252 self.traverse_from_offset(false, include_ignored, start)
1253 }
1254
1255 pub fn entries(&self, include_ignored: bool) -> Traversal {
1256 self.traverse_from_offset(true, include_ignored, 0)
1257 }
1258
1259 pub fn paths(&self) -> impl Iterator<Item = &Arc<Path>> {
1260 let empty_path = Path::new("");
1261 self.entries_by_path
1262 .cursor::<()>()
1263 .filter(move |entry| entry.path.as_ref() != empty_path)
1264 .map(|entry| &entry.path)
1265 }
1266
1267 fn child_entries<'a>(&'a self, parent_path: &'a Path) -> ChildEntriesIter<'a> {
1268 let mut cursor = self.entries_by_path.cursor();
1269 cursor.seek(&TraversalTarget::Path(parent_path), Bias::Right, &());
1270 let traversal = Traversal {
1271 cursor,
1272 include_dirs: true,
1273 include_ignored: true,
1274 };
1275 ChildEntriesIter {
1276 traversal,
1277 parent_path,
1278 }
1279 }
1280
1281 pub fn root_entry(&self) -> Option<&Entry> {
1282 self.entry_for_path("")
1283 }
1284
1285 pub fn root_name(&self) -> &str {
1286 &self.root_name
1287 }
1288
1289 pub fn scan_id(&self) -> usize {
1290 self.scan_id
1291 }
1292
1293 pub fn entry_for_path(&self, path: impl AsRef<Path>) -> Option<&Entry> {
1294 let path = path.as_ref();
1295 self.traverse_from_path(true, true, path)
1296 .entry()
1297 .and_then(|entry| {
1298 if entry.path.as_ref() == path {
1299 Some(entry)
1300 } else {
1301 None
1302 }
1303 })
1304 }
1305
1306 pub fn entry_for_id(&self, id: ProjectEntryId) -> Option<&Entry> {
1307 let entry = self.entries_by_id.get(&id, &())?;
1308 self.entry_for_path(&entry.path)
1309 }
1310
1311 pub fn inode_for_path(&self, path: impl AsRef<Path>) -> Option<u64> {
1312 self.entry_for_path(path.as_ref()).map(|e| e.inode)
1313 }
1314}
1315
1316impl LocalSnapshot {
1317 pub fn abs_path(&self) -> &Arc<Path> {
1318 &self.abs_path
1319 }
1320
1321 pub fn extension_counts(&self) -> &HashMap<OsString, usize> {
1322 &self.extension_counts
1323 }
1324
1325 pub(crate) fn git_repository_for_file_path(&self, path: &Path) -> Option<GitRepositoryState> {
1326 for repository in self.git_repositories.iter().rev() {
1327 if path.starts_with(&repository.content_path) {
1328 return Some(repository.clone());
1329 }
1330 }
1331 None
1332 }
1333
1334 pub(crate) fn git_repository_for_git_data(&self, path: &Path) -> Option<GitRepositoryState> {
1335 for repository in self.git_repositories.iter() {
1336 if path.starts_with(&repository.git_dir_path) {
1337 return Some(repository.clone());
1338 }
1339 }
1340 None
1341 }
1342
1343 pub(crate) fn does_git_repository_track_file_path(
1344 &self,
1345 repo: &GitRepositoryState,
1346 file_path: &Path,
1347 ) -> bool {
1348 self.git_repository_for_file_path(file_path)
1349 .map_or(false, |r| r.content_path == repo.content_path)
1350 }
1351
1352 #[cfg(test)]
1353 pub(crate) fn build_initial_update(&self, project_id: u64) -> proto::UpdateWorktree {
1354 let root_name = self.root_name.clone();
1355 proto::UpdateWorktree {
1356 project_id,
1357 worktree_id: self.id().to_proto(),
1358 root_name,
1359 updated_entries: self.entries_by_path.iter().map(Into::into).collect(),
1360 removed_entries: Default::default(),
1361 scan_id: self.scan_id as u64,
1362 is_last_update: true,
1363 }
1364 }
1365
1366 pub(crate) fn build_update(
1367 &self,
1368 other: &Self,
1369 project_id: u64,
1370 worktree_id: u64,
1371 include_ignored: bool,
1372 ) -> proto::UpdateWorktree {
1373 let mut updated_entries = Vec::new();
1374 let mut removed_entries = Vec::new();
1375 let mut self_entries = self
1376 .entries_by_id
1377 .cursor::<()>()
1378 .filter(|e| include_ignored || !e.is_ignored)
1379 .peekable();
1380 let mut other_entries = other
1381 .entries_by_id
1382 .cursor::<()>()
1383 .filter(|e| include_ignored || !e.is_ignored)
1384 .peekable();
1385 loop {
1386 match (self_entries.peek(), other_entries.peek()) {
1387 (Some(self_entry), Some(other_entry)) => {
1388 match Ord::cmp(&self_entry.id, &other_entry.id) {
1389 Ordering::Less => {
1390 let entry = self.entry_for_id(self_entry.id).unwrap().into();
1391 updated_entries.push(entry);
1392 self_entries.next();
1393 }
1394 Ordering::Equal => {
1395 if self_entry.scan_id != other_entry.scan_id {
1396 let entry = self.entry_for_id(self_entry.id).unwrap().into();
1397 updated_entries.push(entry);
1398 }
1399
1400 self_entries.next();
1401 other_entries.next();
1402 }
1403 Ordering::Greater => {
1404 removed_entries.push(other_entry.id.to_proto());
1405 other_entries.next();
1406 }
1407 }
1408 }
1409 (Some(self_entry), None) => {
1410 let entry = self.entry_for_id(self_entry.id).unwrap().into();
1411 updated_entries.push(entry);
1412 self_entries.next();
1413 }
1414 (None, Some(other_entry)) => {
1415 removed_entries.push(other_entry.id.to_proto());
1416 other_entries.next();
1417 }
1418 (None, None) => break,
1419 }
1420 }
1421
1422 proto::UpdateWorktree {
1423 project_id,
1424 worktree_id,
1425 root_name: self.root_name().to_string(),
1426 updated_entries,
1427 removed_entries,
1428 scan_id: self.scan_id as u64,
1429 is_last_update: true,
1430 }
1431 }
1432
1433 fn insert_entry(&mut self, mut entry: Entry, fs: &dyn Fs) -> Entry {
1434 if !entry.is_dir() && entry.path.file_name() == Some(&GITIGNORE) {
1435 let abs_path = self.abs_path.join(&entry.path);
1436 match smol::block_on(build_gitignore(&abs_path, fs)) {
1437 Ok(ignore) => {
1438 self.ignores_by_parent_abs_path.insert(
1439 abs_path.parent().unwrap().into(),
1440 (Arc::new(ignore), self.scan_id),
1441 );
1442 }
1443 Err(error) => {
1444 log::error!(
1445 "error loading .gitignore file {:?} - {:?}",
1446 &entry.path,
1447 error
1448 );
1449 }
1450 }
1451 } else if entry.path.file_name() == Some(&DOT_GIT) {
1452 let abs_path = self.abs_path.join(&entry.path);
1453 let content_path: Arc<Path> = entry.path.parent().unwrap().into();
1454 if let Err(ix) = self
1455 .git_repositories
1456 .binary_search_by_key(&&content_path, |repo| &repo.content_path)
1457 {
1458 if let Some(repository) = Repository::open(&abs_path).log_err() {
1459 self.git_repositories.insert(
1460 ix,
1461 GitRepositoryState {
1462 content_path,
1463 git_dir_path: repository.path().into(),
1464 scan_id: self.scan_id,
1465 repository: Arc::new(Mutex::new(repository)),
1466 },
1467 );
1468 }
1469 }
1470 }
1471
1472 self.reuse_entry_id(&mut entry);
1473
1474 if entry.kind == EntryKind::PendingDir {
1475 if let Some(existing_entry) =
1476 self.entries_by_path.get(&PathKey(entry.path.clone()), &())
1477 {
1478 entry.kind = existing_entry.kind;
1479 }
1480 }
1481
1482 self.entries_by_path.insert_or_replace(entry.clone(), &());
1483 let scan_id = self.scan_id;
1484 let removed_entry = self.entries_by_id.insert_or_replace(
1485 PathEntry {
1486 id: entry.id,
1487 path: entry.path.clone(),
1488 is_ignored: entry.is_ignored,
1489 scan_id,
1490 },
1491 &(),
1492 );
1493
1494 if let Some(removed_entry) = removed_entry {
1495 self.dec_extension_count(&removed_entry.path, removed_entry.is_ignored);
1496 }
1497 self.inc_extension_count(&entry.path, entry.is_ignored);
1498
1499 entry
1500 }
1501
1502 fn populate_dir(
1503 &mut self,
1504 parent_path: Arc<Path>,
1505 entries: impl IntoIterator<Item = Entry>,
1506 ignore: Option<Arc<Gitignore>>,
1507 ) {
1508 let mut parent_entry = if let Some(parent_entry) =
1509 self.entries_by_path.get(&PathKey(parent_path.clone()), &())
1510 {
1511 parent_entry.clone()
1512 } else {
1513 log::warn!(
1514 "populating a directory {:?} that has been removed",
1515 parent_path
1516 );
1517 return;
1518 };
1519
1520 if let Some(ignore) = ignore {
1521 self.ignores_by_parent_abs_path.insert(
1522 self.abs_path.join(&parent_path).into(),
1523 (ignore, self.scan_id),
1524 );
1525 }
1526 if matches!(parent_entry.kind, EntryKind::PendingDir) {
1527 parent_entry.kind = EntryKind::Dir;
1528 } else {
1529 unreachable!();
1530 }
1531
1532 let mut entries_by_path_edits = vec![Edit::Insert(parent_entry)];
1533 let mut entries_by_id_edits = Vec::new();
1534
1535 for mut entry in entries {
1536 self.reuse_entry_id(&mut entry);
1537 self.inc_extension_count(&entry.path, entry.is_ignored);
1538 entries_by_id_edits.push(Edit::Insert(PathEntry {
1539 id: entry.id,
1540 path: entry.path.clone(),
1541 is_ignored: entry.is_ignored,
1542 scan_id: self.scan_id,
1543 }));
1544 entries_by_path_edits.push(Edit::Insert(entry));
1545 }
1546
1547 self.entries_by_path.edit(entries_by_path_edits, &());
1548 let removed_entries = self.entries_by_id.edit(entries_by_id_edits, &());
1549
1550 for removed_entry in removed_entries {
1551 self.dec_extension_count(&removed_entry.path, removed_entry.is_ignored);
1552 }
1553 }
1554
1555 fn inc_extension_count(&mut self, path: &Path, ignored: bool) {
1556 if !ignored {
1557 if let Some(extension) = path.extension() {
1558 if let Some(count) = self.extension_counts.get_mut(extension) {
1559 *count += 1;
1560 } else {
1561 self.extension_counts.insert(extension.into(), 1);
1562 }
1563 }
1564 }
1565 }
1566
1567 fn dec_extension_count(&mut self, path: &Path, ignored: bool) {
1568 if !ignored {
1569 if let Some(extension) = path.extension() {
1570 if let Some(count) = self.extension_counts.get_mut(extension) {
1571 *count -= 1;
1572 }
1573 }
1574 }
1575 }
1576
1577 fn reuse_entry_id(&mut self, entry: &mut Entry) {
1578 if let Some(removed_entry_id) = self.removed_entry_ids.remove(&entry.inode) {
1579 entry.id = removed_entry_id;
1580 } else if let Some(existing_entry) = self.entry_for_path(&entry.path) {
1581 entry.id = existing_entry.id;
1582 }
1583 }
1584
1585 fn remove_path(&mut self, path: &Path) {
1586 let mut new_entries;
1587 let removed_entries;
1588 {
1589 let mut cursor = self.entries_by_path.cursor::<TraversalProgress>();
1590 new_entries = cursor.slice(&TraversalTarget::Path(path), Bias::Left, &());
1591 removed_entries = cursor.slice(&TraversalTarget::PathSuccessor(path), Bias::Left, &());
1592 new_entries.push_tree(cursor.suffix(&()), &());
1593 }
1594 self.entries_by_path = new_entries;
1595
1596 let mut entries_by_id_edits = Vec::new();
1597 for entry in removed_entries.cursor::<()>() {
1598 let removed_entry_id = self
1599 .removed_entry_ids
1600 .entry(entry.inode)
1601 .or_insert(entry.id);
1602 *removed_entry_id = cmp::max(*removed_entry_id, entry.id);
1603 entries_by_id_edits.push(Edit::Remove(entry.id));
1604 self.dec_extension_count(&entry.path, entry.is_ignored);
1605 }
1606 self.entries_by_id.edit(entries_by_id_edits, &());
1607
1608 if path.file_name() == Some(&GITIGNORE) {
1609 let abs_parent_path = self.abs_path.join(path.parent().unwrap());
1610 if let Some((_, scan_id)) = self
1611 .ignores_by_parent_abs_path
1612 .get_mut(abs_parent_path.as_path())
1613 {
1614 *scan_id = self.snapshot.scan_id;
1615 }
1616 } else if path.file_name() == Some(&DOT_GIT) {
1617 let parent_path = path.parent().unwrap();
1618 if let Ok(ix) = self
1619 .git_repositories
1620 .binary_search_by_key(&parent_path, |repo| repo.content_path.as_ref())
1621 {
1622 self.git_repositories[ix].scan_id = self.snapshot.scan_id;
1623 }
1624 }
1625 }
1626
1627 fn ancestor_inodes_for_path(&self, path: &Path) -> TreeSet<u64> {
1628 let mut inodes = TreeSet::default();
1629 for ancestor in path.ancestors().skip(1) {
1630 if let Some(entry) = self.entry_for_path(ancestor) {
1631 inodes.insert(entry.inode);
1632 }
1633 }
1634 inodes
1635 }
1636
1637 fn ignore_stack_for_abs_path(&self, abs_path: &Path, is_dir: bool) -> Arc<IgnoreStack> {
1638 let mut new_ignores = Vec::new();
1639 for ancestor in abs_path.ancestors().skip(1) {
1640 if let Some((ignore, _)) = self.ignores_by_parent_abs_path.get(ancestor) {
1641 new_ignores.push((ancestor, Some(ignore.clone())));
1642 } else {
1643 new_ignores.push((ancestor, None));
1644 }
1645 }
1646
1647 let mut ignore_stack = IgnoreStack::none();
1648 for (parent_abs_path, ignore) in new_ignores.into_iter().rev() {
1649 if ignore_stack.is_abs_path_ignored(parent_abs_path, true) {
1650 ignore_stack = IgnoreStack::all();
1651 break;
1652 } else if let Some(ignore) = ignore {
1653 ignore_stack = ignore_stack.append(parent_abs_path.into(), ignore);
1654 }
1655 }
1656
1657 if ignore_stack.is_abs_path_ignored(abs_path, is_dir) {
1658 ignore_stack = IgnoreStack::all();
1659 }
1660
1661 ignore_stack
1662 }
1663}
1664
1665async fn build_gitignore(abs_path: &Path, fs: &dyn Fs) -> Result<Gitignore> {
1666 let contents = fs.load(abs_path).await?;
1667 let parent = abs_path.parent().unwrap_or_else(|| Path::new("/"));
1668 let mut builder = GitignoreBuilder::new(parent);
1669 for line in contents.lines() {
1670 builder.add_line(Some(abs_path.into()), line)?;
1671 }
1672 Ok(builder.build()?)
1673}
1674
1675impl WorktreeId {
1676 pub fn from_usize(handle_id: usize) -> Self {
1677 Self(handle_id)
1678 }
1679
1680 pub(crate) fn from_proto(id: u64) -> Self {
1681 Self(id as usize)
1682 }
1683
1684 pub fn to_proto(&self) -> u64 {
1685 self.0 as u64
1686 }
1687
1688 pub fn to_usize(&self) -> usize {
1689 self.0
1690 }
1691}
1692
1693impl fmt::Display for WorktreeId {
1694 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1695 self.0.fmt(f)
1696 }
1697}
1698
1699impl Deref for Worktree {
1700 type Target = Snapshot;
1701
1702 fn deref(&self) -> &Self::Target {
1703 match self {
1704 Worktree::Local(worktree) => &worktree.snapshot,
1705 Worktree::Remote(worktree) => &worktree.snapshot,
1706 }
1707 }
1708}
1709
1710impl Deref for LocalWorktree {
1711 type Target = LocalSnapshot;
1712
1713 fn deref(&self) -> &Self::Target {
1714 &self.snapshot
1715 }
1716}
1717
1718impl Deref for RemoteWorktree {
1719 type Target = Snapshot;
1720
1721 fn deref(&self) -> &Self::Target {
1722 &self.snapshot
1723 }
1724}
1725
1726impl fmt::Debug for LocalWorktree {
1727 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1728 self.snapshot.fmt(f)
1729 }
1730}
1731
1732impl fmt::Debug for Snapshot {
1733 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1734 struct EntriesById<'a>(&'a SumTree<PathEntry>);
1735 struct EntriesByPath<'a>(&'a SumTree<Entry>);
1736
1737 impl<'a> fmt::Debug for EntriesByPath<'a> {
1738 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1739 f.debug_map()
1740 .entries(self.0.iter().map(|entry| (&entry.path, entry.id)))
1741 .finish()
1742 }
1743 }
1744
1745 impl<'a> fmt::Debug for EntriesById<'a> {
1746 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1747 f.debug_list().entries(self.0.iter()).finish()
1748 }
1749 }
1750
1751 f.debug_struct("Snapshot")
1752 .field("id", &self.id)
1753 .field("root_name", &self.root_name)
1754 .field("entries_by_path", &EntriesByPath(&self.entries_by_path))
1755 .field("entries_by_id", &EntriesById(&self.entries_by_id))
1756 .finish()
1757 }
1758}
1759
1760#[derive(Clone, PartialEq)]
1761pub struct File {
1762 pub worktree: ModelHandle<Worktree>,
1763 pub path: Arc<Path>,
1764 pub mtime: SystemTime,
1765 pub(crate) entry_id: Option<ProjectEntryId>,
1766 pub(crate) is_local: bool,
1767}
1768
1769impl language::File for File {
1770 fn as_local(&self) -> Option<&dyn language::LocalFile> {
1771 if self.is_local {
1772 Some(self)
1773 } else {
1774 None
1775 }
1776 }
1777
1778 fn mtime(&self) -> SystemTime {
1779 self.mtime
1780 }
1781
1782 fn path(&self) -> &Arc<Path> {
1783 &self.path
1784 }
1785
1786 fn full_path(&self, cx: &AppContext) -> PathBuf {
1787 let mut full_path = PathBuf::new();
1788 full_path.push(self.worktree.read(cx).root_name());
1789 if self.path.components().next().is_some() {
1790 full_path.push(&self.path);
1791 }
1792 full_path
1793 }
1794
1795 /// Returns the last component of this handle's absolute path. If this handle refers to the root
1796 /// of its worktree, then this method will return the name of the worktree itself.
1797 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr {
1798 self.path
1799 .file_name()
1800 .unwrap_or_else(|| OsStr::new(&self.worktree.read(cx).root_name))
1801 }
1802
1803 fn is_deleted(&self) -> bool {
1804 self.entry_id.is_none()
1805 }
1806
1807 fn save(
1808 &self,
1809 buffer_id: u64,
1810 text: Rope,
1811 version: clock::Global,
1812 line_ending: LineEnding,
1813 cx: &mut MutableAppContext,
1814 ) -> Task<Result<(clock::Global, String, SystemTime)>> {
1815 self.worktree.update(cx, |worktree, cx| match worktree {
1816 Worktree::Local(worktree) => {
1817 let rpc = worktree.client.clone();
1818 let project_id = worktree.share.as_ref().map(|share| share.project_id);
1819 let fingerprint = text.fingerprint();
1820 let save = worktree.write_file(self.path.clone(), text, line_ending, cx);
1821 cx.background().spawn(async move {
1822 let entry = save.await?;
1823 if let Some(project_id) = project_id {
1824 rpc.send(proto::BufferSaved {
1825 project_id,
1826 buffer_id,
1827 version: serialize_version(&version),
1828 mtime: Some(entry.mtime.into()),
1829 fingerprint: fingerprint.clone(),
1830 })?;
1831 }
1832 Ok((version, fingerprint, entry.mtime))
1833 })
1834 }
1835 Worktree::Remote(worktree) => {
1836 let rpc = worktree.client.clone();
1837 let project_id = worktree.project_id;
1838 cx.foreground().spawn(async move {
1839 let response = rpc
1840 .request(proto::SaveBuffer {
1841 project_id,
1842 buffer_id,
1843 version: serialize_version(&version),
1844 })
1845 .await?;
1846 let version = deserialize_version(response.version);
1847 let mtime = response
1848 .mtime
1849 .ok_or_else(|| anyhow!("missing mtime"))?
1850 .into();
1851 Ok((version, response.fingerprint, mtime))
1852 })
1853 }
1854 })
1855 }
1856
1857 fn as_any(&self) -> &dyn Any {
1858 self
1859 }
1860
1861 fn to_proto(&self) -> rpc::proto::File {
1862 rpc::proto::File {
1863 worktree_id: self.worktree.id() as u64,
1864 entry_id: self.entry_id.map(|entry_id| entry_id.to_proto()),
1865 path: self.path.to_string_lossy().into(),
1866 mtime: Some(self.mtime.into()),
1867 }
1868 }
1869}
1870
1871impl language::LocalFile for File {
1872 fn abs_path(&self, cx: &AppContext) -> PathBuf {
1873 self.worktree
1874 .read(cx)
1875 .as_local()
1876 .unwrap()
1877 .abs_path
1878 .join(&self.path)
1879 }
1880
1881 fn load(&self, cx: &AppContext) -> Task<Result<String>> {
1882 let worktree = self.worktree.read(cx).as_local().unwrap();
1883 let abs_path = worktree.absolutize(&self.path);
1884 let fs = worktree.fs.clone();
1885 cx.background()
1886 .spawn(async move { fs.load(&abs_path).await })
1887 }
1888
1889 fn buffer_reloaded(
1890 &self,
1891 buffer_id: u64,
1892 version: &clock::Global,
1893 fingerprint: String,
1894 line_ending: LineEnding,
1895 mtime: SystemTime,
1896 cx: &mut MutableAppContext,
1897 ) {
1898 let worktree = self.worktree.read(cx).as_local().unwrap();
1899 if let Some(project_id) = worktree.share.as_ref().map(|share| share.project_id) {
1900 worktree
1901 .client
1902 .send(proto::BufferReloaded {
1903 project_id,
1904 buffer_id,
1905 version: serialize_version(version),
1906 mtime: Some(mtime.into()),
1907 fingerprint,
1908 line_ending: serialize_line_ending(line_ending) as i32,
1909 })
1910 .log_err();
1911 }
1912 }
1913}
1914
1915impl File {
1916 pub fn from_proto(
1917 proto: rpc::proto::File,
1918 worktree: ModelHandle<Worktree>,
1919 cx: &AppContext,
1920 ) -> Result<Self> {
1921 let worktree_id = worktree
1922 .read(cx)
1923 .as_remote()
1924 .ok_or_else(|| anyhow!("not remote"))?
1925 .id();
1926
1927 if worktree_id.to_proto() != proto.worktree_id {
1928 return Err(anyhow!("worktree id does not match file"));
1929 }
1930
1931 Ok(Self {
1932 worktree,
1933 path: Path::new(&proto.path).into(),
1934 mtime: proto.mtime.ok_or_else(|| anyhow!("no timestamp"))?.into(),
1935 entry_id: proto.entry_id.map(ProjectEntryId::from_proto),
1936 is_local: false,
1937 })
1938 }
1939
1940 pub fn from_dyn(file: Option<&dyn language::File>) -> Option<&Self> {
1941 file.and_then(|f| f.as_any().downcast_ref())
1942 }
1943
1944 pub fn worktree_id(&self, cx: &AppContext) -> WorktreeId {
1945 self.worktree.read(cx).id()
1946 }
1947
1948 pub fn project_entry_id(&self, _: &AppContext) -> Option<ProjectEntryId> {
1949 self.entry_id
1950 }
1951}
1952
1953#[derive(Clone, Debug, PartialEq, Eq)]
1954pub struct Entry {
1955 pub id: ProjectEntryId,
1956 pub kind: EntryKind,
1957 pub path: Arc<Path>,
1958 pub inode: u64,
1959 pub mtime: SystemTime,
1960 pub is_symlink: bool,
1961 pub is_ignored: bool,
1962}
1963
1964#[derive(Clone, Copy, Debug, PartialEq, Eq)]
1965pub enum EntryKind {
1966 PendingDir,
1967 Dir,
1968 File(CharBag),
1969}
1970
1971impl Entry {
1972 fn new(
1973 path: Arc<Path>,
1974 metadata: &fs::Metadata,
1975 next_entry_id: &AtomicUsize,
1976 root_char_bag: CharBag,
1977 ) -> Self {
1978 Self {
1979 id: ProjectEntryId::new(next_entry_id),
1980 kind: if metadata.is_dir {
1981 EntryKind::PendingDir
1982 } else {
1983 EntryKind::File(char_bag_for_path(root_char_bag, &path))
1984 },
1985 path,
1986 inode: metadata.inode,
1987 mtime: metadata.mtime,
1988 is_symlink: metadata.is_symlink,
1989 is_ignored: false,
1990 }
1991 }
1992
1993 pub fn is_dir(&self) -> bool {
1994 matches!(self.kind, EntryKind::Dir | EntryKind::PendingDir)
1995 }
1996
1997 pub fn is_file(&self) -> bool {
1998 matches!(self.kind, EntryKind::File(_))
1999 }
2000}
2001
2002impl sum_tree::Item for Entry {
2003 type Summary = EntrySummary;
2004
2005 fn summary(&self) -> Self::Summary {
2006 let visible_count = if self.is_ignored { 0 } else { 1 };
2007 let file_count;
2008 let visible_file_count;
2009 if self.is_file() {
2010 file_count = 1;
2011 visible_file_count = visible_count;
2012 } else {
2013 file_count = 0;
2014 visible_file_count = 0;
2015 }
2016
2017 EntrySummary {
2018 max_path: self.path.clone(),
2019 count: 1,
2020 visible_count,
2021 file_count,
2022 visible_file_count,
2023 }
2024 }
2025}
2026
2027impl sum_tree::KeyedItem for Entry {
2028 type Key = PathKey;
2029
2030 fn key(&self) -> Self::Key {
2031 PathKey(self.path.clone())
2032 }
2033}
2034
2035#[derive(Clone, Debug)]
2036pub struct EntrySummary {
2037 max_path: Arc<Path>,
2038 count: usize,
2039 visible_count: usize,
2040 file_count: usize,
2041 visible_file_count: usize,
2042}
2043
2044impl Default for EntrySummary {
2045 fn default() -> Self {
2046 Self {
2047 max_path: Arc::from(Path::new("")),
2048 count: 0,
2049 visible_count: 0,
2050 file_count: 0,
2051 visible_file_count: 0,
2052 }
2053 }
2054}
2055
2056impl sum_tree::Summary for EntrySummary {
2057 type Context = ();
2058
2059 fn add_summary(&mut self, rhs: &Self, _: &()) {
2060 self.max_path = rhs.max_path.clone();
2061 self.count += rhs.count;
2062 self.visible_count += rhs.visible_count;
2063 self.file_count += rhs.file_count;
2064 self.visible_file_count += rhs.visible_file_count;
2065 }
2066}
2067
2068#[derive(Clone, Debug)]
2069struct PathEntry {
2070 id: ProjectEntryId,
2071 path: Arc<Path>,
2072 is_ignored: bool,
2073 scan_id: usize,
2074}
2075
2076impl sum_tree::Item for PathEntry {
2077 type Summary = PathEntrySummary;
2078
2079 fn summary(&self) -> Self::Summary {
2080 PathEntrySummary { max_id: self.id }
2081 }
2082}
2083
2084impl sum_tree::KeyedItem for PathEntry {
2085 type Key = ProjectEntryId;
2086
2087 fn key(&self) -> Self::Key {
2088 self.id
2089 }
2090}
2091
2092#[derive(Clone, Debug, Default)]
2093struct PathEntrySummary {
2094 max_id: ProjectEntryId,
2095}
2096
2097impl sum_tree::Summary for PathEntrySummary {
2098 type Context = ();
2099
2100 fn add_summary(&mut self, summary: &Self, _: &Self::Context) {
2101 self.max_id = summary.max_id;
2102 }
2103}
2104
2105impl<'a> sum_tree::Dimension<'a, PathEntrySummary> for ProjectEntryId {
2106 fn add_summary(&mut self, summary: &'a PathEntrySummary, _: &()) {
2107 *self = summary.max_id;
2108 }
2109}
2110
2111#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]
2112pub struct PathKey(Arc<Path>);
2113
2114impl Default for PathKey {
2115 fn default() -> Self {
2116 Self(Path::new("").into())
2117 }
2118}
2119
2120impl<'a> sum_tree::Dimension<'a, EntrySummary> for PathKey {
2121 fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) {
2122 self.0 = summary.max_path.clone();
2123 }
2124}
2125
2126struct BackgroundScanner {
2127 fs: Arc<dyn Fs>,
2128 snapshot: Arc<Mutex<LocalSnapshot>>,
2129 notify: UnboundedSender<ScanState>,
2130 executor: Arc<executor::Background>,
2131}
2132
2133impl BackgroundScanner {
2134 fn new(
2135 snapshot: Arc<Mutex<LocalSnapshot>>,
2136 notify: UnboundedSender<ScanState>,
2137 fs: Arc<dyn Fs>,
2138 executor: Arc<executor::Background>,
2139 ) -> Self {
2140 Self {
2141 fs,
2142 snapshot,
2143 notify,
2144 executor,
2145 }
2146 }
2147
2148 fn abs_path(&self) -> Arc<Path> {
2149 self.snapshot.lock().abs_path.clone()
2150 }
2151
2152 fn snapshot(&self) -> LocalSnapshot {
2153 self.snapshot.lock().clone()
2154 }
2155
2156 async fn run(mut self, events_rx: impl Stream<Item = Vec<fsevent::Event>>) {
2157 if self.notify.unbounded_send(ScanState::Initializing).is_err() {
2158 return;
2159 }
2160
2161 if let Err(err) = self.scan_dirs().await {
2162 if self
2163 .notify
2164 .unbounded_send(ScanState::Err(Arc::new(err)))
2165 .is_err()
2166 {
2167 return;
2168 }
2169 }
2170
2171 if self.notify.unbounded_send(ScanState::Idle).is_err() {
2172 return;
2173 }
2174
2175 futures::pin_mut!(events_rx);
2176
2177 while let Some(mut events) = events_rx.next().await {
2178 while let Poll::Ready(Some(additional_events)) = futures::poll!(events_rx.next()) {
2179 events.extend(additional_events);
2180 }
2181
2182 if self.notify.unbounded_send(ScanState::Updating).is_err() {
2183 break;
2184 }
2185
2186 if !self.process_events(events).await {
2187 break;
2188 }
2189
2190 if self.notify.unbounded_send(ScanState::Idle).is_err() {
2191 break;
2192 }
2193 }
2194 }
2195
2196 async fn scan_dirs(&mut self) -> Result<()> {
2197 let root_char_bag;
2198 let root_abs_path;
2199 let root_inode;
2200 let is_dir;
2201 let next_entry_id;
2202 {
2203 let snapshot = self.snapshot.lock();
2204 root_char_bag = snapshot.root_char_bag;
2205 root_abs_path = snapshot.abs_path.clone();
2206 root_inode = snapshot.root_entry().map(|e| e.inode);
2207 is_dir = snapshot.root_entry().map_or(false, |e| e.is_dir());
2208 next_entry_id = snapshot.next_entry_id.clone();
2209 };
2210
2211 // Populate ignores above the root.
2212 for ancestor in root_abs_path.ancestors().skip(1) {
2213 if let Ok(ignore) = build_gitignore(&ancestor.join(&*GITIGNORE), self.fs.as_ref()).await
2214 {
2215 self.snapshot
2216 .lock()
2217 .ignores_by_parent_abs_path
2218 .insert(ancestor.into(), (ignore.into(), 0));
2219 }
2220 }
2221
2222 let ignore_stack = {
2223 let mut snapshot = self.snapshot.lock();
2224 let ignore_stack = snapshot.ignore_stack_for_abs_path(&root_abs_path, true);
2225 if ignore_stack.is_all() {
2226 if let Some(mut root_entry) = snapshot.root_entry().cloned() {
2227 root_entry.is_ignored = true;
2228 snapshot.insert_entry(root_entry, self.fs.as_ref());
2229 }
2230 }
2231 ignore_stack
2232 };
2233
2234 if is_dir {
2235 let path: Arc<Path> = Arc::from(Path::new(""));
2236 let mut ancestor_inodes = TreeSet::default();
2237 if let Some(root_inode) = root_inode {
2238 ancestor_inodes.insert(root_inode);
2239 }
2240
2241 let (tx, rx) = channel::unbounded();
2242 self.executor
2243 .block(tx.send(ScanJob {
2244 abs_path: root_abs_path.to_path_buf(),
2245 path,
2246 ignore_stack,
2247 ancestor_inodes,
2248 scan_queue: tx.clone(),
2249 }))
2250 .unwrap();
2251 drop(tx);
2252
2253 self.executor
2254 .scoped(|scope| {
2255 for _ in 0..self.executor.num_cpus() {
2256 scope.spawn(async {
2257 while let Ok(job) = rx.recv().await {
2258 if let Err(err) = self
2259 .scan_dir(root_char_bag, next_entry_id.clone(), &job)
2260 .await
2261 {
2262 log::error!("error scanning {:?}: {}", job.abs_path, err);
2263 }
2264 }
2265 });
2266 }
2267 })
2268 .await;
2269 }
2270
2271 Ok(())
2272 }
2273
2274 async fn scan_dir(
2275 &self,
2276 root_char_bag: CharBag,
2277 next_entry_id: Arc<AtomicUsize>,
2278 job: &ScanJob,
2279 ) -> Result<()> {
2280 let mut new_entries: Vec<Entry> = Vec::new();
2281 let mut new_jobs: Vec<ScanJob> = Vec::new();
2282 let mut ignore_stack = job.ignore_stack.clone();
2283 let mut new_ignore = None;
2284
2285 let mut child_paths = self.fs.read_dir(&job.abs_path).await?;
2286 while let Some(child_abs_path) = child_paths.next().await {
2287 let child_abs_path = match child_abs_path {
2288 Ok(child_abs_path) => child_abs_path,
2289 Err(error) => {
2290 log::error!("error processing entry {:?}", error);
2291 continue;
2292 }
2293 };
2294 let child_name = child_abs_path.file_name().unwrap();
2295 let child_path: Arc<Path> = job.path.join(child_name).into();
2296 let child_metadata = match self.fs.metadata(&child_abs_path).await {
2297 Ok(Some(metadata)) => metadata,
2298 Ok(None) => continue,
2299 Err(err) => {
2300 log::error!("error processing {:?}: {:?}", child_abs_path, err);
2301 continue;
2302 }
2303 };
2304
2305 // If we find a .gitignore, add it to the stack of ignores used to determine which paths are ignored
2306 if child_name == *GITIGNORE {
2307 match build_gitignore(&child_abs_path, self.fs.as_ref()).await {
2308 Ok(ignore) => {
2309 let ignore = Arc::new(ignore);
2310 ignore_stack =
2311 ignore_stack.append(job.abs_path.as_path().into(), ignore.clone());
2312 new_ignore = Some(ignore);
2313 }
2314 Err(error) => {
2315 log::error!(
2316 "error loading .gitignore file {:?} - {:?}",
2317 child_name,
2318 error
2319 );
2320 }
2321 }
2322
2323 // Update ignore status of any child entries we've already processed to reflect the
2324 // ignore file in the current directory. Because `.gitignore` starts with a `.`,
2325 // there should rarely be too numerous. Update the ignore stack associated with any
2326 // new jobs as well.
2327 let mut new_jobs = new_jobs.iter_mut();
2328 for entry in &mut new_entries {
2329 let entry_abs_path = self.abs_path().join(&entry.path);
2330 entry.is_ignored =
2331 ignore_stack.is_abs_path_ignored(&entry_abs_path, entry.is_dir());
2332 if entry.is_dir() {
2333 new_jobs.next().unwrap().ignore_stack = if entry.is_ignored {
2334 IgnoreStack::all()
2335 } else {
2336 ignore_stack.clone()
2337 };
2338 }
2339 }
2340 }
2341
2342 let mut child_entry = Entry::new(
2343 child_path.clone(),
2344 &child_metadata,
2345 &next_entry_id,
2346 root_char_bag,
2347 );
2348
2349 if child_entry.is_dir() {
2350 let is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, true);
2351 child_entry.is_ignored = is_ignored;
2352
2353 if !job.ancestor_inodes.contains(&child_entry.inode) {
2354 let mut ancestor_inodes = job.ancestor_inodes.clone();
2355 ancestor_inodes.insert(child_entry.inode);
2356 new_jobs.push(ScanJob {
2357 abs_path: child_abs_path,
2358 path: child_path,
2359 ignore_stack: if is_ignored {
2360 IgnoreStack::all()
2361 } else {
2362 ignore_stack.clone()
2363 },
2364 ancestor_inodes,
2365 scan_queue: job.scan_queue.clone(),
2366 });
2367 }
2368 } else {
2369 child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, false);
2370 }
2371
2372 new_entries.push(child_entry);
2373 }
2374
2375 self.snapshot
2376 .lock()
2377 .populate_dir(job.path.clone(), new_entries, new_ignore);
2378 for new_job in new_jobs {
2379 job.scan_queue.send(new_job).await.unwrap();
2380 }
2381
2382 Ok(())
2383 }
2384
2385 async fn process_events(&mut self, mut events: Vec<fsevent::Event>) -> bool {
2386 events.sort_unstable_by(|a, b| a.path.cmp(&b.path));
2387 events.dedup_by(|a, b| a.path.starts_with(&b.path));
2388
2389 let root_char_bag;
2390 let root_abs_path;
2391 let next_entry_id;
2392 {
2393 let snapshot = self.snapshot.lock();
2394 root_char_bag = snapshot.root_char_bag;
2395 root_abs_path = snapshot.abs_path.clone();
2396 next_entry_id = snapshot.next_entry_id.clone();
2397 }
2398
2399 let root_canonical_path = if let Ok(path) = self.fs.canonicalize(&root_abs_path).await {
2400 path
2401 } else {
2402 return false;
2403 };
2404 let metadata = futures::future::join_all(
2405 events
2406 .iter()
2407 .map(|event| self.fs.metadata(&event.path))
2408 .collect::<Vec<_>>(),
2409 )
2410 .await;
2411
2412 // Hold the snapshot lock while clearing and re-inserting the root entries
2413 // for each event. This way, the snapshot is not observable to the foreground
2414 // thread while this operation is in-progress.
2415 let (scan_queue_tx, scan_queue_rx) = channel::unbounded();
2416 {
2417 let mut snapshot = self.snapshot.lock();
2418 snapshot.scan_id += 1;
2419 for event in &events {
2420 if let Ok(path) = event.path.strip_prefix(&root_canonical_path) {
2421 snapshot.remove_path(path);
2422 }
2423 }
2424
2425 for (event, metadata) in events.into_iter().zip(metadata.into_iter()) {
2426 let path: Arc<Path> = match event.path.strip_prefix(&root_canonical_path) {
2427 Ok(path) => Arc::from(path.to_path_buf()),
2428 Err(_) => {
2429 log::error!(
2430 "unexpected event {:?} for root path {:?}",
2431 event.path,
2432 root_canonical_path
2433 );
2434 continue;
2435 }
2436 };
2437 let abs_path = root_abs_path.join(&path);
2438
2439 match metadata {
2440 Ok(Some(metadata)) => {
2441 let ignore_stack =
2442 snapshot.ignore_stack_for_abs_path(&abs_path, metadata.is_dir);
2443 let mut fs_entry = Entry::new(
2444 path.clone(),
2445 &metadata,
2446 snapshot.next_entry_id.as_ref(),
2447 snapshot.root_char_bag,
2448 );
2449 fs_entry.is_ignored = ignore_stack.is_all();
2450 snapshot.insert_entry(fs_entry, self.fs.as_ref());
2451
2452 let mut ancestor_inodes = snapshot.ancestor_inodes_for_path(&path);
2453 if metadata.is_dir && !ancestor_inodes.contains(&metadata.inode) {
2454 ancestor_inodes.insert(metadata.inode);
2455 self.executor
2456 .block(scan_queue_tx.send(ScanJob {
2457 abs_path,
2458 path,
2459 ignore_stack,
2460 ancestor_inodes,
2461 scan_queue: scan_queue_tx.clone(),
2462 }))
2463 .unwrap();
2464 }
2465 }
2466 Ok(None) => {}
2467 Err(err) => {
2468 // TODO - create a special 'error' entry in the entries tree to mark this
2469 log::error!("error reading file on event {:?}", err);
2470 }
2471 }
2472 }
2473 drop(scan_queue_tx);
2474 }
2475
2476 // Scan any directories that were created as part of this event batch.
2477 self.executor
2478 .scoped(|scope| {
2479 for _ in 0..self.executor.num_cpus() {
2480 scope.spawn(async {
2481 while let Ok(job) = scan_queue_rx.recv().await {
2482 if let Err(err) = self
2483 .scan_dir(root_char_bag, next_entry_id.clone(), &job)
2484 .await
2485 {
2486 log::error!("error scanning {:?}: {}", job.abs_path, err);
2487 }
2488 }
2489 });
2490 }
2491 })
2492 .await;
2493
2494 // Attempt to detect renames only over a single batch of file-system events.
2495 self.snapshot.lock().removed_entry_ids.clear();
2496
2497 self.update_ignore_statuses().await;
2498 self.update_git_repositories().await;
2499 true
2500 }
2501
2502 async fn update_ignore_statuses(&self) {
2503 let mut snapshot = self.snapshot();
2504
2505 let mut ignores_to_update = Vec::new();
2506 let mut ignores_to_delete = Vec::new();
2507 for (parent_abs_path, (_, scan_id)) in &snapshot.ignores_by_parent_abs_path {
2508 if let Ok(parent_path) = parent_abs_path.strip_prefix(&snapshot.abs_path) {
2509 if *scan_id == snapshot.scan_id && snapshot.entry_for_path(parent_path).is_some() {
2510 ignores_to_update.push(parent_abs_path.clone());
2511 }
2512
2513 let ignore_path = parent_path.join(&*GITIGNORE);
2514 if snapshot.entry_for_path(ignore_path).is_none() {
2515 ignores_to_delete.push(parent_abs_path.clone());
2516 }
2517 }
2518 }
2519
2520 for parent_abs_path in ignores_to_delete {
2521 snapshot.ignores_by_parent_abs_path.remove(&parent_abs_path);
2522 self.snapshot
2523 .lock()
2524 .ignores_by_parent_abs_path
2525 .remove(&parent_abs_path);
2526 }
2527
2528 let (ignore_queue_tx, ignore_queue_rx) = channel::unbounded();
2529 ignores_to_update.sort_unstable();
2530 let mut ignores_to_update = ignores_to_update.into_iter().peekable();
2531 while let Some(parent_abs_path) = ignores_to_update.next() {
2532 while ignores_to_update
2533 .peek()
2534 .map_or(false, |p| p.starts_with(&parent_abs_path))
2535 {
2536 ignores_to_update.next().unwrap();
2537 }
2538
2539 let ignore_stack = snapshot.ignore_stack_for_abs_path(&parent_abs_path, true);
2540 ignore_queue_tx
2541 .send(UpdateIgnoreStatusJob {
2542 abs_path: parent_abs_path,
2543 ignore_stack,
2544 ignore_queue: ignore_queue_tx.clone(),
2545 })
2546 .await
2547 .unwrap();
2548 }
2549 drop(ignore_queue_tx);
2550
2551 self.executor
2552 .scoped(|scope| {
2553 for _ in 0..self.executor.num_cpus() {
2554 scope.spawn(async {
2555 while let Ok(job) = ignore_queue_rx.recv().await {
2556 self.update_ignore_status(job, &snapshot).await;
2557 }
2558 });
2559 }
2560 })
2561 .await;
2562 }
2563
2564 async fn update_git_repositories(&self) {
2565 let mut snapshot = self.snapshot();
2566 let mut git_repositories = mem::take(&mut snapshot.git_repositories);
2567 git_repositories.retain(|git_repository| {
2568 let dot_git_path = git_repository.content_path.join(&*DOT_GIT);
2569 snapshot.entry_for_path(dot_git_path).is_some()
2570 });
2571 snapshot.git_repositories = git_repositories;
2572 }
2573
2574 async fn update_ignore_status(&self, job: UpdateIgnoreStatusJob, snapshot: &LocalSnapshot) {
2575 let mut ignore_stack = job.ignore_stack;
2576 if let Some((ignore, _)) = snapshot.ignores_by_parent_abs_path.get(&job.abs_path) {
2577 ignore_stack = ignore_stack.append(job.abs_path.clone(), ignore.clone());
2578 }
2579
2580 let mut entries_by_id_edits = Vec::new();
2581 let mut entries_by_path_edits = Vec::new();
2582 let path = job.abs_path.strip_prefix(&snapshot.abs_path).unwrap();
2583 for mut entry in snapshot.child_entries(path).cloned() {
2584 let was_ignored = entry.is_ignored;
2585 let abs_path = self.abs_path().join(&entry.path);
2586 entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, entry.is_dir());
2587 if entry.is_dir() {
2588 let child_ignore_stack = if entry.is_ignored {
2589 IgnoreStack::all()
2590 } else {
2591 ignore_stack.clone()
2592 };
2593 job.ignore_queue
2594 .send(UpdateIgnoreStatusJob {
2595 abs_path: abs_path.into(),
2596 ignore_stack: child_ignore_stack,
2597 ignore_queue: job.ignore_queue.clone(),
2598 })
2599 .await
2600 .unwrap();
2601 }
2602
2603 if entry.is_ignored != was_ignored {
2604 let mut path_entry = snapshot.entries_by_id.get(&entry.id, &()).unwrap().clone();
2605 path_entry.scan_id = snapshot.scan_id;
2606 path_entry.is_ignored = entry.is_ignored;
2607 entries_by_id_edits.push(Edit::Insert(path_entry));
2608 entries_by_path_edits.push(Edit::Insert(entry));
2609 }
2610 }
2611
2612 let mut snapshot = self.snapshot.lock();
2613 snapshot.entries_by_path.edit(entries_by_path_edits, &());
2614 snapshot.entries_by_id.edit(entries_by_id_edits, &());
2615 }
2616}
2617
2618fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag {
2619 let mut result = root_char_bag;
2620 result.extend(
2621 path.to_string_lossy()
2622 .chars()
2623 .map(|c| c.to_ascii_lowercase()),
2624 );
2625 result
2626}
2627
2628struct ScanJob {
2629 abs_path: PathBuf,
2630 path: Arc<Path>,
2631 ignore_stack: Arc<IgnoreStack>,
2632 scan_queue: Sender<ScanJob>,
2633 ancestor_inodes: TreeSet<u64>,
2634}
2635
2636struct UpdateIgnoreStatusJob {
2637 abs_path: Arc<Path>,
2638 ignore_stack: Arc<IgnoreStack>,
2639 ignore_queue: Sender<UpdateIgnoreStatusJob>,
2640}
2641
2642pub trait WorktreeHandle {
2643 #[cfg(any(test, feature = "test-support"))]
2644 fn flush_fs_events<'a>(
2645 &self,
2646 cx: &'a gpui::TestAppContext,
2647 ) -> futures::future::LocalBoxFuture<'a, ()>;
2648}
2649
2650impl WorktreeHandle for ModelHandle<Worktree> {
2651 // When the worktree's FS event stream sometimes delivers "redundant" events for FS changes that
2652 // occurred before the worktree was constructed. These events can cause the worktree to perfrom
2653 // extra directory scans, and emit extra scan-state notifications.
2654 //
2655 // This function mutates the worktree's directory and waits for those mutations to be picked up,
2656 // to ensure that all redundant FS events have already been processed.
2657 #[cfg(any(test, feature = "test-support"))]
2658 fn flush_fs_events<'a>(
2659 &self,
2660 cx: &'a gpui::TestAppContext,
2661 ) -> futures::future::LocalBoxFuture<'a, ()> {
2662 use smol::future::FutureExt;
2663
2664 let filename = "fs-event-sentinel";
2665 let tree = self.clone();
2666 let (fs, root_path) = self.read_with(cx, |tree, _| {
2667 let tree = tree.as_local().unwrap();
2668 (tree.fs.clone(), tree.abs_path().clone())
2669 });
2670
2671 async move {
2672 fs.create_file(&root_path.join(filename), Default::default())
2673 .await
2674 .unwrap();
2675 tree.condition(cx, |tree, _| tree.entry_for_path(filename).is_some())
2676 .await;
2677
2678 fs.remove_file(&root_path.join(filename), Default::default())
2679 .await
2680 .unwrap();
2681 tree.condition(cx, |tree, _| tree.entry_for_path(filename).is_none())
2682 .await;
2683
2684 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2685 .await;
2686 }
2687 .boxed_local()
2688 }
2689}
2690
2691#[derive(Clone, Debug)]
2692struct TraversalProgress<'a> {
2693 max_path: &'a Path,
2694 count: usize,
2695 visible_count: usize,
2696 file_count: usize,
2697 visible_file_count: usize,
2698}
2699
2700impl<'a> TraversalProgress<'a> {
2701 fn count(&self, include_dirs: bool, include_ignored: bool) -> usize {
2702 match (include_ignored, include_dirs) {
2703 (true, true) => self.count,
2704 (true, false) => self.file_count,
2705 (false, true) => self.visible_count,
2706 (false, false) => self.visible_file_count,
2707 }
2708 }
2709}
2710
2711impl<'a> sum_tree::Dimension<'a, EntrySummary> for TraversalProgress<'a> {
2712 fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) {
2713 self.max_path = summary.max_path.as_ref();
2714 self.count += summary.count;
2715 self.visible_count += summary.visible_count;
2716 self.file_count += summary.file_count;
2717 self.visible_file_count += summary.visible_file_count;
2718 }
2719}
2720
2721impl<'a> Default for TraversalProgress<'a> {
2722 fn default() -> Self {
2723 Self {
2724 max_path: Path::new(""),
2725 count: 0,
2726 visible_count: 0,
2727 file_count: 0,
2728 visible_file_count: 0,
2729 }
2730 }
2731}
2732
2733pub struct Traversal<'a> {
2734 cursor: sum_tree::Cursor<'a, Entry, TraversalProgress<'a>>,
2735 include_ignored: bool,
2736 include_dirs: bool,
2737}
2738
2739impl<'a> Traversal<'a> {
2740 pub fn advance(&mut self) -> bool {
2741 self.advance_to_offset(self.offset() + 1)
2742 }
2743
2744 pub fn advance_to_offset(&mut self, offset: usize) -> bool {
2745 self.cursor.seek_forward(
2746 &TraversalTarget::Count {
2747 count: offset,
2748 include_dirs: self.include_dirs,
2749 include_ignored: self.include_ignored,
2750 },
2751 Bias::Right,
2752 &(),
2753 )
2754 }
2755
2756 pub fn advance_to_sibling(&mut self) -> bool {
2757 while let Some(entry) = self.cursor.item() {
2758 self.cursor.seek_forward(
2759 &TraversalTarget::PathSuccessor(&entry.path),
2760 Bias::Left,
2761 &(),
2762 );
2763 if let Some(entry) = self.cursor.item() {
2764 if (self.include_dirs || !entry.is_dir())
2765 && (self.include_ignored || !entry.is_ignored)
2766 {
2767 return true;
2768 }
2769 }
2770 }
2771 false
2772 }
2773
2774 pub fn entry(&self) -> Option<&'a Entry> {
2775 self.cursor.item()
2776 }
2777
2778 pub fn offset(&self) -> usize {
2779 self.cursor
2780 .start()
2781 .count(self.include_dirs, self.include_ignored)
2782 }
2783}
2784
2785impl<'a> Iterator for Traversal<'a> {
2786 type Item = &'a Entry;
2787
2788 fn next(&mut self) -> Option<Self::Item> {
2789 if let Some(item) = self.entry() {
2790 self.advance();
2791 Some(item)
2792 } else {
2793 None
2794 }
2795 }
2796}
2797
2798#[derive(Debug)]
2799enum TraversalTarget<'a> {
2800 Path(&'a Path),
2801 PathSuccessor(&'a Path),
2802 Count {
2803 count: usize,
2804 include_ignored: bool,
2805 include_dirs: bool,
2806 },
2807}
2808
2809impl<'a, 'b> SeekTarget<'a, EntrySummary, TraversalProgress<'a>> for TraversalTarget<'b> {
2810 fn cmp(&self, cursor_location: &TraversalProgress<'a>, _: &()) -> Ordering {
2811 match self {
2812 TraversalTarget::Path(path) => path.cmp(&cursor_location.max_path),
2813 TraversalTarget::PathSuccessor(path) => {
2814 if !cursor_location.max_path.starts_with(path) {
2815 Ordering::Equal
2816 } else {
2817 Ordering::Greater
2818 }
2819 }
2820 TraversalTarget::Count {
2821 count,
2822 include_dirs,
2823 include_ignored,
2824 } => Ord::cmp(
2825 count,
2826 &cursor_location.count(*include_dirs, *include_ignored),
2827 ),
2828 }
2829 }
2830}
2831
2832struct ChildEntriesIter<'a> {
2833 parent_path: &'a Path,
2834 traversal: Traversal<'a>,
2835}
2836
2837impl<'a> Iterator for ChildEntriesIter<'a> {
2838 type Item = &'a Entry;
2839
2840 fn next(&mut self) -> Option<Self::Item> {
2841 if let Some(item) = self.traversal.entry() {
2842 if item.path.starts_with(&self.parent_path) {
2843 self.traversal.advance_to_sibling();
2844 return Some(item);
2845 }
2846 }
2847 None
2848 }
2849}
2850
2851impl<'a> From<&'a Entry> for proto::Entry {
2852 fn from(entry: &'a Entry) -> Self {
2853 Self {
2854 id: entry.id.to_proto(),
2855 is_dir: entry.is_dir(),
2856 path: entry.path.as_os_str().as_bytes().to_vec(),
2857 inode: entry.inode,
2858 mtime: Some(entry.mtime.into()),
2859 is_symlink: entry.is_symlink,
2860 is_ignored: entry.is_ignored,
2861 }
2862 }
2863}
2864
2865impl<'a> TryFrom<(&'a CharBag, proto::Entry)> for Entry {
2866 type Error = anyhow::Error;
2867
2868 fn try_from((root_char_bag, entry): (&'a CharBag, proto::Entry)) -> Result<Self> {
2869 if let Some(mtime) = entry.mtime {
2870 let kind = if entry.is_dir {
2871 EntryKind::Dir
2872 } else {
2873 let mut char_bag = *root_char_bag;
2874 char_bag.extend(
2875 String::from_utf8_lossy(&entry.path)
2876 .chars()
2877 .map(|c| c.to_ascii_lowercase()),
2878 );
2879 EntryKind::File(char_bag)
2880 };
2881 let path: Arc<Path> = PathBuf::from(OsString::from_vec(entry.path)).into();
2882 Ok(Entry {
2883 id: ProjectEntryId::from_proto(entry.id),
2884 kind,
2885 path,
2886 inode: entry.inode,
2887 mtime: mtime.into(),
2888 is_symlink: entry.is_symlink,
2889 is_ignored: entry.is_ignored,
2890 })
2891 } else {
2892 Err(anyhow!(
2893 "missing mtime in remote worktree entry {:?}",
2894 entry.path
2895 ))
2896 }
2897 }
2898}
2899
2900async fn send_worktree_update(client: &Arc<Client>, update: proto::UpdateWorktree) -> Result<()> {
2901 #[cfg(any(test, feature = "test-support"))]
2902 const MAX_CHUNK_SIZE: usize = 2;
2903 #[cfg(not(any(test, feature = "test-support")))]
2904 const MAX_CHUNK_SIZE: usize = 256;
2905
2906 for update in proto::split_worktree_update(update, MAX_CHUNK_SIZE) {
2907 client.request(update).await?;
2908 }
2909
2910 Ok(())
2911}
2912
2913#[cfg(test)]
2914mod tests {
2915 use super::*;
2916 use crate::fs::FakeFs;
2917 use anyhow::Result;
2918 use client::test::FakeHttpClient;
2919 use fs::RealFs;
2920 use gpui::{executor::Deterministic, TestAppContext};
2921 use rand::prelude::*;
2922 use serde_json::json;
2923 use std::{
2924 env,
2925 fmt::Write,
2926 time::{SystemTime, UNIX_EPOCH},
2927 };
2928 use util::test::temp_tree;
2929
2930 #[gpui::test]
2931 async fn test_traversal(cx: &mut TestAppContext) {
2932 let fs = FakeFs::new(cx.background());
2933 fs.insert_tree(
2934 "/root",
2935 json!({
2936 ".gitignore": "a/b\n",
2937 "a": {
2938 "b": "",
2939 "c": "",
2940 }
2941 }),
2942 )
2943 .await;
2944
2945 let http_client = FakeHttpClient::with_404_response();
2946 let client = cx.read(|cx| Client::new(http_client, cx));
2947
2948 let tree = Worktree::local(
2949 client,
2950 Arc::from(Path::new("/root")),
2951 true,
2952 fs,
2953 Default::default(),
2954 &mut cx.to_async(),
2955 )
2956 .await
2957 .unwrap();
2958 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2959 .await;
2960
2961 tree.read_with(cx, |tree, _| {
2962 assert_eq!(
2963 tree.entries(false)
2964 .map(|entry| entry.path.as_ref())
2965 .collect::<Vec<_>>(),
2966 vec![
2967 Path::new(""),
2968 Path::new(".gitignore"),
2969 Path::new("a"),
2970 Path::new("a/c"),
2971 ]
2972 );
2973 assert_eq!(
2974 tree.entries(true)
2975 .map(|entry| entry.path.as_ref())
2976 .collect::<Vec<_>>(),
2977 vec![
2978 Path::new(""),
2979 Path::new(".gitignore"),
2980 Path::new("a"),
2981 Path::new("a/b"),
2982 Path::new("a/c"),
2983 ]
2984 );
2985 })
2986 }
2987
2988 #[gpui::test(iterations = 10)]
2989 async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
2990 let fs = FakeFs::new(cx.background());
2991 fs.insert_tree(
2992 "/root",
2993 json!({
2994 "lib": {
2995 "a": {
2996 "a.txt": ""
2997 },
2998 "b": {
2999 "b.txt": ""
3000 }
3001 }
3002 }),
3003 )
3004 .await;
3005 fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
3006 fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
3007
3008 let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
3009 let tree = Worktree::local(
3010 client,
3011 Arc::from(Path::new("/root")),
3012 true,
3013 fs.clone(),
3014 Default::default(),
3015 &mut cx.to_async(),
3016 )
3017 .await
3018 .unwrap();
3019
3020 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3021 .await;
3022
3023 tree.read_with(cx, |tree, _| {
3024 assert_eq!(
3025 tree.entries(false)
3026 .map(|entry| entry.path.as_ref())
3027 .collect::<Vec<_>>(),
3028 vec![
3029 Path::new(""),
3030 Path::new("lib"),
3031 Path::new("lib/a"),
3032 Path::new("lib/a/a.txt"),
3033 Path::new("lib/a/lib"),
3034 Path::new("lib/b"),
3035 Path::new("lib/b/b.txt"),
3036 Path::new("lib/b/lib"),
3037 ]
3038 );
3039 });
3040
3041 fs.rename(
3042 Path::new("/root/lib/a/lib"),
3043 Path::new("/root/lib/a/lib-2"),
3044 Default::default(),
3045 )
3046 .await
3047 .unwrap();
3048 executor.run_until_parked();
3049 tree.read_with(cx, |tree, _| {
3050 assert_eq!(
3051 tree.entries(false)
3052 .map(|entry| entry.path.as_ref())
3053 .collect::<Vec<_>>(),
3054 vec![
3055 Path::new(""),
3056 Path::new("lib"),
3057 Path::new("lib/a"),
3058 Path::new("lib/a/a.txt"),
3059 Path::new("lib/a/lib-2"),
3060 Path::new("lib/b"),
3061 Path::new("lib/b/b.txt"),
3062 Path::new("lib/b/lib"),
3063 ]
3064 );
3065 });
3066 }
3067
3068 #[gpui::test]
3069 async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
3070 let parent_dir = temp_tree(json!({
3071 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
3072 "tree": {
3073 ".git": {},
3074 ".gitignore": "ignored-dir\n",
3075 "tracked-dir": {
3076 "tracked-file1": "",
3077 "ancestor-ignored-file1": "",
3078 },
3079 "ignored-dir": {
3080 "ignored-file1": ""
3081 }
3082 }
3083 }));
3084 let dir = parent_dir.path().join("tree");
3085
3086 let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
3087
3088 let tree = Worktree::local(
3089 client,
3090 dir.as_path(),
3091 true,
3092 Arc::new(RealFs),
3093 Default::default(),
3094 &mut cx.to_async(),
3095 )
3096 .await
3097 .unwrap();
3098 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3099 .await;
3100 tree.flush_fs_events(cx).await;
3101 cx.read(|cx| {
3102 let tree = tree.read(cx);
3103 assert!(
3104 !tree
3105 .entry_for_path("tracked-dir/tracked-file1")
3106 .unwrap()
3107 .is_ignored
3108 );
3109 assert!(
3110 tree.entry_for_path("tracked-dir/ancestor-ignored-file1")
3111 .unwrap()
3112 .is_ignored
3113 );
3114 assert!(
3115 tree.entry_for_path("ignored-dir/ignored-file1")
3116 .unwrap()
3117 .is_ignored
3118 );
3119 });
3120
3121 std::fs::write(dir.join("tracked-dir/tracked-file2"), "").unwrap();
3122 std::fs::write(dir.join("tracked-dir/ancestor-ignored-file2"), "").unwrap();
3123 std::fs::write(dir.join("ignored-dir/ignored-file2"), "").unwrap();
3124 tree.flush_fs_events(cx).await;
3125 cx.read(|cx| {
3126 let tree = tree.read(cx);
3127 assert!(
3128 !tree
3129 .entry_for_path("tracked-dir/tracked-file2")
3130 .unwrap()
3131 .is_ignored
3132 );
3133 assert!(
3134 tree.entry_for_path("tracked-dir/ancestor-ignored-file2")
3135 .unwrap()
3136 .is_ignored
3137 );
3138 assert!(
3139 tree.entry_for_path("ignored-dir/ignored-file2")
3140 .unwrap()
3141 .is_ignored
3142 );
3143 assert!(tree.entry_for_path(".git").unwrap().is_ignored);
3144 });
3145 }
3146
3147 #[gpui::test]
3148 async fn test_git_repository_for_path(cx: &mut TestAppContext) {
3149 let fs = FakeFs::new(cx.background());
3150 fs.insert_tree(
3151 "/root",
3152 json!({
3153 "dir1": {
3154 ".git": {
3155 "HEAD": "abc"
3156 },
3157 "deps": {
3158 "dep1": {
3159 ".git": {},
3160 "src": {
3161 "a.txt": ""
3162 }
3163 }
3164 },
3165 "src": {
3166 "b.txt": ""
3167 }
3168 },
3169 "c.txt": ""
3170 }),
3171 )
3172 .await;
3173
3174 let http_client = FakeHttpClient::with_404_response();
3175 let client = Client::new(http_client);
3176 let tree = Worktree::local(
3177 client,
3178 Arc::from(Path::new("/root")),
3179 true,
3180 fs.clone(),
3181 Default::default(),
3182 &mut cx.to_async(),
3183 )
3184 .await
3185 .unwrap();
3186
3187 cx.foreground().run_until_parked();
3188
3189 tree.read_with(cx, |tree, cx| {
3190 let tree = tree.as_local().unwrap();
3191
3192 assert!(tree
3193 .git_repository_for_file_path("c.txt".as_ref())
3194 .is_none());
3195
3196 let repo = tree
3197 .git_repository_for_file_path("dir1/src/b.txt".as_ref())
3198 .unwrap();
3199
3200 // Need to update the file system for anything involving git
3201 // Goal: Make this test pass
3202 // Up Next: Invalidating git repos!
3203 assert_eq!(repo.content_path.as_ref(), Path::new("dir1"));
3204 assert_eq!(repo.git_dir_path.as_ref(), Path::new("dir1/.git"));
3205
3206 let repo = tree
3207 .git_repository_for_file_path("dir1/deps/dep1/src/a.txt".as_ref())
3208 .unwrap();
3209
3210 assert_eq!(repo.content_path.as_ref(), Path::new("dir1/deps/dep1"));
3211 assert_eq!( repo = tree .git_repository_for_git_data("dir/.git/HEAD".as_ref())
3212 .unwrap();
3213 assert_eq!(repo.content_path.as_ref(), Path::new("dir1/deps/dep1"));
3214
3215 assert!(tree.does_git_repository_track_file_path(&repo, "dir1/src/b.txt".as_ref()));
3216 assert!(!tree
3217 .does_git_repository_track_file_path(&repo, "dir1/deps/dep1/src/a.txt".as_ref()));
3218 });
3219 }
3220
3221 #[gpui::test]
3222 async fn test_write_file(cx: &mut TestAppContext) {
3223 let dir = temp_tree(json!({
3224 ".git": {},
3225 ".gitignore": "ignored-dir\n",
3226 "tracked-dir": {},
3227 "ignored-dir": {}
3228 }));
3229
3230 let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
3231
3232 let tree = Worktree::local(
3233 client,
3234 dir.path(),
3235 true,
3236 Arc::new(RealFs),
3237 Default::default(),
3238 &mut cx.to_async(),
3239 )
3240 .await
3241 .unwrap();
3242 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3243 .await;
3244 tree.flush_fs_events(cx).await;
3245
3246 tree.update(cx, |tree, cx| {
3247 tree.as_local().unwrap().write_file(
3248 Path::new("tracked-dir/file.txt"),
3249 "hello".into(),
3250 Default::default(),
3251 cx,
3252 )
3253 })
3254 .await
3255 .unwrap();
3256 tree.update(cx, |tree, cx| {
3257 tree.as_local().unwrap().write_file(
3258 Path::new("ignored-dir/file.txt"),
3259 "world".into(),
3260 Default::default(),
3261 cx,
3262 )
3263 })
3264 .await
3265 .unwrap();
3266
3267 tree.read_with(cx, |tree, _| {
3268 let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
3269 let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
3270 assert!(!tracked.is_ignored);
3271 assert!(ignored.is_ignored);
3272 });
3273 }
3274
3275 #[gpui::test(iterations = 30)]
3276 async fn test_create_directory(cx: &mut TestAppContext) {
3277 let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
3278
3279 let fs = FakeFs::new(cx.background());
3280 fs.insert_tree(
3281 "/a",
3282 json!({
3283 "b": {},
3284 "c": {},
3285 "d": {},
3286 }),
3287 )
3288 .await;
3289
3290 let tree = Worktree::local(
3291 client,
3292 "/a".as_ref(),
3293 true,
3294 fs,
3295 Default::default(),
3296 &mut cx.to_async(),
3297 )
3298 .await
3299 .unwrap();
3300
3301 let entry = tree
3302 .update(cx, |tree, cx| {
3303 tree.as_local_mut()
3304 .unwrap()
3305 .create_entry("a/e".as_ref(), true, cx)
3306 })
3307 .await
3308 .unwrap();
3309 assert!(entry.is_dir());
3310
3311 cx.foreground().run_until_parked();
3312 tree.read_with(cx, |tree, _| {
3313 assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
3314 });
3315 }
3316
3317 #[gpui::test(iterations = 100)]
3318 fn test_random(mut rng: StdRng) {
3319 let operations = env::var("OPERATIONS")
3320 .map(|o| o.parse().unwrap())
3321 .unwrap_or(40);
3322 let initial_entries = env::var("INITIAL_ENTRIES")
3323 .map(|o| o.parse().unwrap())
3324 .unwrap_or(20);
3325
3326 let root_dir = tempdir::TempDir::new("worktree-test").unwrap();
3327 for _ in 0..initial_entries {
3328 randomly_mutate_tree(root_dir.path(), 1.0, &mut rng).unwrap();
3329 }
3330 log::info!("Generated initial tree");
3331
3332 let (notify_tx, _notify_rx) = mpsc::unbounded();
3333 let fs = Arc::new(RealFs);
3334 let next_entry_id = Arc::new(AtomicUsize::new(0));
3335 let mut initial_snapshot = LocalSnapshot {
3336 abs_path: root_dir.path().into(),
3337 removed_entry_ids: Default::default(),
3338 ignores_by_parent_abs_path: Default::default(),
3339 git_repositories: Default::default(),
3340 next_entry_id: next_entry_id.clone(),
3341 snapshot: Snapshot {
3342 id: WorktreeId::from_usize(0),
3343 entries_by_path: Default::default(),
3344 entries_by_id: Default::default(),
3345 root_name: Default::default(),
3346 root_char_bag: Default::default(),
3347 scan_id: 0,
3348 is_complete: true,
3349 },
3350 extension_counts: Default::default(),
3351 };
3352 initial_snapshot.insert_entry(
3353 Entry::new(
3354 Path::new("").into(),
3355 &smol::block_on(fs.metadata(root_dir.path()))
3356 .unwrap()
3357 .unwrap(),
3358 &next_entry_id,
3359 Default::default(),
3360 ),
3361 fs.as_ref(),
3362 );
3363 let mut scanner = BackgroundScanner::new(
3364 Arc::new(Mutex::new(initial_snapshot.clone())),
3365 notify_tx,
3366 fs.clone(),
3367 Arc::new(gpui::executor::Background::new()),
3368 );
3369 smol::block_on(scanner.scan_dirs()).unwrap();
3370 scanner.snapshot().check_invariants();
3371
3372 let mut events = Vec::new();
3373 let mut snapshots = Vec::new();
3374 let mut mutations_len = operations;
3375 while mutations_len > 1 {
3376 if !events.is_empty() && rng.gen_bool(0.4) {
3377 let len = rng.gen_range(0..=events.len());
3378 let to_deliver = events.drain(0..len).collect::<Vec<_>>();
3379 log::info!("Delivering events: {:#?}", to_deliver);
3380 smol::block_on(scanner.process_events(to_deliver));
3381 scanner.snapshot().check_invariants();
3382 } else {
3383 events.extend(randomly_mutate_tree(root_dir.path(), 0.6, &mut rng).unwrap());
3384 mutations_len -= 1;
3385 }
3386
3387 if rng.gen_bool(0.2) {
3388 snapshots.push(scanner.snapshot());
3389 }
3390 }
3391 log::info!("Quiescing: {:#?}", events);
3392 smol::block_on(scanner.process_events(events));
3393 scanner.snapshot().check_invariants();
3394
3395 let (notify_tx, _notify_rx) = mpsc::unbounded();
3396 let mut new_scanner = BackgroundScanner::new(
3397 Arc::new(Mutex::new(initial_snapshot)),
3398 notify_tx,
3399 scanner.fs.clone(),
3400 scanner.executor.clone(),
3401 );
3402 smol::block_on(new_scanner.scan_dirs()).unwrap();
3403 assert_eq!(
3404 scanner.snapshot().to_vec(true),
3405 new_scanner.snapshot().to_vec(true)
3406 );
3407
3408 for mut prev_snapshot in snapshots {
3409 let include_ignored = rng.gen::<bool>();
3410 if !include_ignored {
3411 let mut entries_by_path_edits = Vec::new();
3412 let mut entries_by_id_edits = Vec::new();
3413 for entry in prev_snapshot
3414 .entries_by_id
3415 .cursor::<()>()
3416 .filter(|e| e.is_ignored)
3417 {
3418 entries_by_path_edits.push(Edit::Remove(PathKey(entry.path.clone())));
3419 entries_by_id_edits.push(Edit::Remove(entry.id));
3420 }
3421
3422 prev_snapshot
3423 .entries_by_path
3424 .edit(entries_by_path_edits, &());
3425 prev_snapshot.entries_by_id.edit(entries_by_id_edits, &());
3426 }
3427
3428 let update = scanner
3429 .snapshot()
3430 .build_update(&prev_snapshot, 0, 0, include_ignored);
3431 prev_snapshot.apply_remote_update(update).unwrap();
3432 assert_eq!(
3433 prev_snapshot.to_vec(true),
3434 scanner.snapshot().to_vec(include_ignored)
3435 );
3436 }
3437 }
3438
3439 fn randomly_mutate_tree(
3440 root_path: &Path,
3441 insertion_probability: f64,
3442 rng: &mut impl Rng,
3443 ) -> Result<Vec<fsevent::Event>> {
3444 let root_path = root_path.canonicalize().unwrap();
3445 let (dirs, files) = read_dir_recursive(root_path.clone());
3446
3447 let mut events = Vec::new();
3448 let mut record_event = |path: PathBuf| {
3449 events.push(fsevent::Event {
3450 event_id: SystemTime::now()
3451 .duration_since(UNIX_EPOCH)
3452 .unwrap()
3453 .as_secs(),
3454 flags: fsevent::StreamFlags::empty(),
3455 path,
3456 });
3457 };
3458
3459 if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
3460 let path = dirs.choose(rng).unwrap();
3461 let new_path = path.join(gen_name(rng));
3462
3463 if rng.gen() {
3464 log::info!("Creating dir {:?}", new_path.strip_prefix(root_path)?);
3465 std::fs::create_dir(&new_path)?;
3466 } else {
3467 log::info!("Creating file {:?}", new_path.strip_prefix(root_path)?);
3468 std::fs::write(&new_path, "")?;
3469 }
3470 record_event(new_path);
3471 } else if rng.gen_bool(0.05) {
3472 let ignore_dir_path = dirs.choose(rng).unwrap();
3473 let ignore_path = ignore_dir_path.join(&*GITIGNORE);
3474
3475 let (subdirs, subfiles) = read_dir_recursive(ignore_dir_path.clone());
3476 let files_to_ignore = {
3477 let len = rng.gen_range(0..=subfiles.len());
3478 subfiles.choose_multiple(rng, len)
3479 };
3480 let dirs_to_ignore = {
3481 let len = rng.gen_range(0..subdirs.len());
3482 subdirs.choose_multiple(rng, len)
3483 };
3484
3485 let mut ignore_contents = String::new();
3486 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
3487 writeln!(
3488 ignore_contents,
3489 "{}",
3490 path_to_ignore
3491 .strip_prefix(&ignore_dir_path)?
3492 .to_str()
3493 .unwrap()
3494 )
3495 .unwrap();
3496 }
3497 log::info!(
3498 "Creating {:?} with contents:\n{}",
3499 ignore_path.strip_prefix(&root_path)?,
3500 ignore_contents
3501 );
3502 std::fs::write(&ignore_path, ignore_contents).unwrap();
3503 record_event(ignore_path);
3504 } else {
3505 let old_path = {
3506 let file_path = files.choose(rng);
3507 let dir_path = dirs[1..].choose(rng);
3508 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
3509 };
3510
3511 let is_rename = rng.gen();
3512 if is_rename {
3513 let new_path_parent = dirs
3514 .iter()
3515 .filter(|d| !d.starts_with(old_path))
3516 .choose(rng)
3517 .unwrap();
3518
3519 let overwrite_existing_dir =
3520 !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
3521 let new_path = if overwrite_existing_dir {
3522 std::fs::remove_dir_all(&new_path_parent).ok();
3523 new_path_parent.to_path_buf()
3524 } else {
3525 new_path_parent.join(gen_name(rng))
3526 };
3527
3528 log::info!(
3529 "Renaming {:?} to {}{:?}",
3530 old_path.strip_prefix(&root_path)?,
3531 if overwrite_existing_dir {
3532 "overwrite "
3533 } else {
3534 ""
3535 },
3536 new_path.strip_prefix(&root_path)?
3537 );
3538 std::fs::rename(&old_path, &new_path)?;
3539 record_event(old_path.clone());
3540 record_event(new_path);
3541 } else if old_path.is_dir() {
3542 let (dirs, files) = read_dir_recursive(old_path.clone());
3543
3544 log::info!("Deleting dir {:?}", old_path.strip_prefix(&root_path)?);
3545 std::fs::remove_dir_all(&old_path).unwrap();
3546 for file in files {
3547 record_event(file);
3548 }
3549 for dir in dirs {
3550 record_event(dir);
3551 }
3552 } else {
3553 log::info!("Deleting file {:?}", old_path.strip_prefix(&root_path)?);
3554 std::fs::remove_file(old_path).unwrap();
3555 record_event(old_path.clone());
3556 }
3557 }
3558
3559 Ok(events)
3560 }
3561
3562 fn read_dir_recursive(path: PathBuf) -> (Vec<PathBuf>, Vec<PathBuf>) {
3563 let child_entries = std::fs::read_dir(&path).unwrap();
3564 let mut dirs = vec![path];
3565 let mut files = Vec::new();
3566 for child_entry in child_entries {
3567 let child_path = child_entry.unwrap().path();
3568 if child_path.is_dir() {
3569 let (child_dirs, child_files) = read_dir_recursive(child_path);
3570 dirs.extend(child_dirs);
3571 files.extend(child_files);
3572 } else {
3573 files.push(child_path);
3574 }
3575 }
3576 (dirs, files)
3577 }
3578
3579 fn gen_name(rng: &mut impl Rng) -> String {
3580 (0..6)
3581 .map(|_| rng.sample(rand::distributions::Alphanumeric))
3582 .map(char::from)
3583 .collect()
3584 }
3585
3586 impl LocalSnapshot {
3587 fn check_invariants(&self) {
3588 let mut files = self.files(true, 0);
3589 let mut visible_files = self.files(false, 0);
3590 for entry in self.entries_by_path.cursor::<()>() {
3591 if entry.is_file() {
3592 assert_eq!(files.next().unwrap().inode, entry.inode);
3593 if !entry.is_ignored {
3594 assert_eq!(visible_files.next().unwrap().inode, entry.inode);
3595 }
3596 }
3597 }
3598 assert!(files.next().is_none());
3599 assert!(visible_files.next().is_none());
3600
3601 let mut bfs_paths = Vec::new();
3602 let mut stack = vec![Path::new("")];
3603 while let Some(path) = stack.pop() {
3604 bfs_paths.push(path);
3605 let ix = stack.len();
3606 for child_entry in self.child_entries(path) {
3607 stack.insert(ix, &child_entry.path);
3608 }
3609 }
3610
3611 let dfs_paths_via_iter = self
3612 .entries_by_path
3613 .cursor::<()>()
3614 .map(|e| e.path.as_ref())
3615 .collect::<Vec<_>>();
3616 assert_eq!(bfs_paths, dfs_paths_via_iter);
3617
3618 let dfs_paths_via_traversal = self
3619 .entries(true)
3620 .map(|e| e.path.as_ref())
3621 .collect::<Vec<_>>();
3622 assert_eq!(dfs_paths_via_traversal, dfs_paths_via_iter);
3623
3624 for ignore_parent_abs_path in self.ignores_by_parent_abs_path.keys() {
3625 let ignore_parent_path =
3626 ignore_parent_abs_path.strip_prefix(&self.abs_path).unwrap();
3627 assert!(self.entry_for_path(&ignore_parent_path).is_some());
3628 assert!(self
3629 .entry_for_path(ignore_parent_path.join(&*GITIGNORE))
3630 .is_some());
3631 }
3632
3633 // Ensure extension counts are correct.
3634 let mut expected_extension_counts = HashMap::default();
3635 for extension in self.entries(false).filter_map(|e| e.path.extension()) {
3636 *expected_extension_counts
3637 .entry(extension.into())
3638 .or_insert(0) += 1;
3639 }
3640 assert_eq!(self.extension_counts, expected_extension_counts);
3641 }
3642
3643 fn to_vec(&self, include_ignored: bool) -> Vec<(&Path, u64, bool)> {
3644 let mut paths = Vec::new();
3645 for entry in self.entries_by_path.cursor::<()>() {
3646 if include_ignored || !entry.is_ignored {
3647 paths.push((entry.path.as_ref(), entry.inode, entry.is_ignored));
3648 }
3649 }
3650 paths.sort_by(|a, b| a.0.cmp(b.0));
3651 paths
3652 }
3653 }
3654}