1use super::{ignore::IgnoreStack, DiagnosticSummary};
2use crate::{copy_recursive, ProjectEntryId, RemoveOptions};
3use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
4use anyhow::{anyhow, Context, Result};
5use client::{proto, Client};
6use clock::ReplicaId;
7use collections::{HashMap, VecDeque};
8use fs::LineEnding;
9use fs::{repository::GitRepository, Fs};
10use futures::{
11 channel::{
12 mpsc::{self, UnboundedSender},
13 oneshot,
14 },
15 Stream, StreamExt,
16};
17use fuzzy::CharBag;
18use git::{DOT_GIT, GITIGNORE};
19use gpui::{
20 executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext,
21 Task,
22};
23use language::{
24 proto::{deserialize_version, serialize_line_ending, serialize_version},
25 Buffer, DiagnosticEntry, Rope,
26};
27use parking_lot::Mutex;
28use postage::{
29 prelude::{Sink as _, Stream as _},
30 watch,
31};
32use rope::point_utf16::PointUtf16;
33
34use smol::channel::{self, Sender};
35use std::{
36 any::Any,
37 cmp::{self, Ordering},
38 convert::TryFrom,
39 ffi::{OsStr, OsString},
40 fmt,
41 future::Future,
42 mem,
43 ops::{Deref, DerefMut},
44 os::unix::prelude::{OsStrExt, OsStringExt},
45 path::{Path, PathBuf},
46 sync::{atomic::AtomicUsize, Arc},
47 task::Poll,
48 time::{Duration, SystemTime},
49};
50use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
51use util::{ResultExt, TryFutureExt};
52
53#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
54pub struct WorktreeId(usize);
55
56#[allow(clippy::large_enum_variant)]
57pub enum Worktree {
58 Local(LocalWorktree),
59 Remote(RemoteWorktree),
60}
61
62pub struct LocalWorktree {
63 snapshot: LocalSnapshot,
64 background_snapshot: Arc<Mutex<LocalSnapshot>>,
65 last_scan_state_rx: watch::Receiver<ScanState>,
66 _background_scanner_task: Option<Task<()>>,
67 poll_task: Option<Task<()>>,
68 share: Option<ShareState>,
69 diagnostics: HashMap<Arc<Path>, Vec<DiagnosticEntry<PointUtf16>>>,
70 diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
71 client: Arc<Client>,
72 fs: Arc<dyn Fs>,
73 visible: bool,
74}
75
76pub struct RemoteWorktree {
77 pub snapshot: Snapshot,
78 pub(crate) background_snapshot: Arc<Mutex<Snapshot>>,
79 project_id: u64,
80 client: Arc<Client>,
81 updates_tx: Option<UnboundedSender<proto::UpdateWorktree>>,
82 snapshot_subscriptions: VecDeque<(usize, oneshot::Sender<()>)>,
83 replica_id: ReplicaId,
84 diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
85 visible: bool,
86}
87
88#[derive(Clone)]
89pub struct Snapshot {
90 id: WorktreeId,
91 root_name: String,
92 root_char_bag: CharBag,
93 entries_by_path: SumTree<Entry>,
94 entries_by_id: SumTree<PathEntry>,
95 scan_id: usize,
96 is_complete: bool,
97}
98
99#[derive(Clone)]
100pub struct GitRepositoryEntry {
101 pub(crate) repo: Arc<Mutex<dyn GitRepository>>,
102
103 pub(crate) scan_id: usize,
104 // Path to folder containing the .git file or directory
105 pub(crate) content_path: Arc<Path>,
106 // Path to the actual .git folder.
107 // Note: if .git is a file, this points to the folder indicated by the .git file
108 pub(crate) git_dir_path: Arc<Path>,
109}
110
111impl std::fmt::Debug for GitRepositoryEntry {
112 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
113 f.debug_struct("GitRepositoryEntry")
114 .field("content_path", &self.content_path)
115 .field("git_dir_path", &self.git_dir_path)
116 .field("libgit_repository", &"LibGitRepository")
117 .finish()
118 }
119}
120
121pub struct LocalSnapshot {
122 abs_path: Arc<Path>,
123 ignores_by_parent_abs_path: HashMap<Arc<Path>, (Arc<Gitignore>, usize)>,
124 git_repositories: Vec<GitRepositoryEntry>,
125 removed_entry_ids: HashMap<u64, ProjectEntryId>,
126 next_entry_id: Arc<AtomicUsize>,
127 snapshot: Snapshot,
128 extension_counts: HashMap<OsString, usize>,
129}
130
131impl Clone for LocalSnapshot {
132 fn clone(&self) -> Self {
133 Self {
134 abs_path: self.abs_path.clone(),
135 ignores_by_parent_abs_path: self.ignores_by_parent_abs_path.clone(),
136 git_repositories: self.git_repositories.iter().cloned().collect(),
137 removed_entry_ids: self.removed_entry_ids.clone(),
138 next_entry_id: self.next_entry_id.clone(),
139 snapshot: self.snapshot.clone(),
140 extension_counts: self.extension_counts.clone(),
141 }
142 }
143}
144
145impl Deref for LocalSnapshot {
146 type Target = Snapshot;
147
148 fn deref(&self) -> &Self::Target {
149 &self.snapshot
150 }
151}
152
153impl DerefMut for LocalSnapshot {
154 fn deref_mut(&mut self) -> &mut Self::Target {
155 &mut self.snapshot
156 }
157}
158
159#[derive(Clone, Debug)]
160enum ScanState {
161 Idle,
162 /// The worktree is performing its initial scan of the filesystem.
163 Initializing,
164 /// The worktree is updating in response to filesystem events.
165 Updating,
166 Err(Arc<anyhow::Error>),
167}
168
169struct ShareState {
170 project_id: u64,
171 snapshots_tx: watch::Sender<LocalSnapshot>,
172 _maintain_remote_snapshot: Option<Task<Option<()>>>,
173}
174
175pub enum Event {
176 UpdatedEntries,
177 UpdatedGitRepositories(Vec<GitRepositoryEntry>),
178}
179
180impl Entity for Worktree {
181 type Event = Event;
182}
183
184impl Worktree {
185 pub async fn local(
186 client: Arc<Client>,
187 path: impl Into<Arc<Path>>,
188 visible: bool,
189 fs: Arc<dyn Fs>,
190 next_entry_id: Arc<AtomicUsize>,
191 cx: &mut AsyncAppContext,
192 ) -> Result<ModelHandle<Self>> {
193 let (tree, scan_states_tx) =
194 LocalWorktree::create(client, path, visible, fs.clone(), next_entry_id, cx).await?;
195 tree.update(cx, |tree, cx| {
196 let tree = tree.as_local_mut().unwrap();
197 let abs_path = tree.abs_path().clone();
198 let background_snapshot = tree.background_snapshot.clone();
199 let background = cx.background().clone();
200 tree._background_scanner_task = Some(cx.background().spawn(async move {
201 let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
202 let scanner =
203 BackgroundScanner::new(background_snapshot, scan_states_tx, fs, background);
204 scanner.run(events).await;
205 }));
206 });
207 Ok(tree)
208 }
209
210 pub fn remote(
211 project_remote_id: u64,
212 replica_id: ReplicaId,
213 worktree: proto::WorktreeMetadata,
214 client: Arc<Client>,
215 cx: &mut MutableAppContext,
216 ) -> ModelHandle<Self> {
217 let remote_id = worktree.id;
218 let root_char_bag: CharBag = worktree
219 .root_name
220 .chars()
221 .map(|c| c.to_ascii_lowercase())
222 .collect();
223 let root_name = worktree.root_name.clone();
224 let visible = worktree.visible;
225 let snapshot = Snapshot {
226 id: WorktreeId(remote_id as usize),
227 root_name,
228 root_char_bag,
229 entries_by_path: Default::default(),
230 entries_by_id: Default::default(),
231 scan_id: 0,
232 is_complete: false,
233 };
234
235 let (updates_tx, mut updates_rx) = mpsc::unbounded();
236 let background_snapshot = Arc::new(Mutex::new(snapshot.clone()));
237 let (mut snapshot_updated_tx, mut snapshot_updated_rx) = watch::channel();
238 let worktree_handle = cx.add_model(|_: &mut ModelContext<Worktree>| {
239 Worktree::Remote(RemoteWorktree {
240 project_id: project_remote_id,
241 replica_id,
242 snapshot: snapshot.clone(),
243 background_snapshot: background_snapshot.clone(),
244 updates_tx: Some(updates_tx),
245 snapshot_subscriptions: Default::default(),
246 client: client.clone(),
247 diagnostic_summaries: Default::default(),
248 visible,
249 })
250 });
251
252 cx.background()
253 .spawn(async move {
254 while let Some(update) = updates_rx.next().await {
255 if let Err(error) = background_snapshot.lock().apply_remote_update(update) {
256 log::error!("error applying worktree update: {}", error);
257 }
258 snapshot_updated_tx.send(()).await.ok();
259 }
260 })
261 .detach();
262
263 cx.spawn(|mut cx| {
264 let this = worktree_handle.downgrade();
265 async move {
266 while (snapshot_updated_rx.recv().await).is_some() {
267 if let Some(this) = this.upgrade(&cx) {
268 this.update(&mut cx, |this, cx| {
269 this.poll_snapshot(cx);
270 let this = this.as_remote_mut().unwrap();
271 while let Some((scan_id, _)) = this.snapshot_subscriptions.front() {
272 if this.observed_snapshot(*scan_id) {
273 let (_, tx) = this.snapshot_subscriptions.pop_front().unwrap();
274 let _ = tx.send(());
275 } else {
276 break;
277 }
278 }
279 });
280 } else {
281 break;
282 }
283 }
284 }
285 })
286 .detach();
287
288 worktree_handle
289 }
290
291 pub fn as_local(&self) -> Option<&LocalWorktree> {
292 if let Worktree::Local(worktree) = self {
293 Some(worktree)
294 } else {
295 None
296 }
297 }
298
299 pub fn as_remote(&self) -> Option<&RemoteWorktree> {
300 if let Worktree::Remote(worktree) = self {
301 Some(worktree)
302 } else {
303 None
304 }
305 }
306
307 pub fn as_local_mut(&mut self) -> Option<&mut LocalWorktree> {
308 if let Worktree::Local(worktree) = self {
309 Some(worktree)
310 } else {
311 None
312 }
313 }
314
315 pub fn as_remote_mut(&mut self) -> Option<&mut RemoteWorktree> {
316 if let Worktree::Remote(worktree) = self {
317 Some(worktree)
318 } else {
319 None
320 }
321 }
322
323 pub fn is_local(&self) -> bool {
324 matches!(self, Worktree::Local(_))
325 }
326
327 pub fn is_remote(&self) -> bool {
328 !self.is_local()
329 }
330
331 pub fn snapshot(&self) -> Snapshot {
332 match self {
333 Worktree::Local(worktree) => worktree.snapshot().snapshot,
334 Worktree::Remote(worktree) => worktree.snapshot(),
335 }
336 }
337
338 pub fn scan_id(&self) -> usize {
339 match self {
340 Worktree::Local(worktree) => worktree.snapshot.scan_id,
341 Worktree::Remote(worktree) => worktree.snapshot.scan_id,
342 }
343 }
344
345 pub fn is_visible(&self) -> bool {
346 match self {
347 Worktree::Local(worktree) => worktree.visible,
348 Worktree::Remote(worktree) => worktree.visible,
349 }
350 }
351
352 pub fn replica_id(&self) -> ReplicaId {
353 match self {
354 Worktree::Local(_) => 0,
355 Worktree::Remote(worktree) => worktree.replica_id,
356 }
357 }
358
359 pub fn diagnostic_summaries(
360 &self,
361 ) -> impl Iterator<Item = (Arc<Path>, DiagnosticSummary)> + '_ {
362 match self {
363 Worktree::Local(worktree) => &worktree.diagnostic_summaries,
364 Worktree::Remote(worktree) => &worktree.diagnostic_summaries,
365 }
366 .iter()
367 .map(|(path, summary)| (path.0.clone(), *summary))
368 }
369
370 fn poll_snapshot(&mut self, cx: &mut ModelContext<Self>) {
371 match self {
372 Self::Local(worktree) => worktree.poll_snapshot(false, cx),
373 Self::Remote(worktree) => worktree.poll_snapshot(cx),
374 };
375 }
376}
377
378impl LocalWorktree {
379 async fn create(
380 client: Arc<Client>,
381 path: impl Into<Arc<Path>>,
382 visible: bool,
383 fs: Arc<dyn Fs>,
384 next_entry_id: Arc<AtomicUsize>,
385 cx: &mut AsyncAppContext,
386 ) -> Result<(ModelHandle<Worktree>, UnboundedSender<ScanState>)> {
387 let abs_path = path.into();
388 let path: Arc<Path> = Arc::from(Path::new(""));
389
390 // After determining whether the root entry is a file or a directory, populate the
391 // snapshot's "root name", which will be used for the purpose of fuzzy matching.
392 let root_name = abs_path
393 .file_name()
394 .map_or(String::new(), |f| f.to_string_lossy().to_string());
395 let root_char_bag = root_name.chars().map(|c| c.to_ascii_lowercase()).collect();
396 let metadata = fs
397 .metadata(&abs_path)
398 .await
399 .context("failed to stat worktree path")?;
400
401 let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
402 let (mut last_scan_state_tx, last_scan_state_rx) =
403 watch::channel_with(ScanState::Initializing);
404 let tree = cx.add_model(move |cx: &mut ModelContext<Worktree>| {
405 let mut snapshot = LocalSnapshot {
406 abs_path,
407 ignores_by_parent_abs_path: Default::default(),
408 git_repositories: Default::default(),
409 removed_entry_ids: Default::default(),
410 next_entry_id,
411 snapshot: Snapshot {
412 id: WorktreeId::from_usize(cx.model_id()),
413 root_name: root_name.clone(),
414 root_char_bag,
415 entries_by_path: Default::default(),
416 entries_by_id: Default::default(),
417 scan_id: 0,
418 is_complete: true,
419 },
420 extension_counts: Default::default(),
421 };
422 if let Some(metadata) = metadata {
423 let entry = Entry::new(
424 path,
425 &metadata,
426 &snapshot.next_entry_id,
427 snapshot.root_char_bag,
428 );
429 snapshot.insert_entry(entry, fs.as_ref());
430 }
431
432 let tree = Self {
433 snapshot: snapshot.clone(),
434 background_snapshot: Arc::new(Mutex::new(snapshot)),
435 last_scan_state_rx,
436 _background_scanner_task: None,
437 share: None,
438 poll_task: None,
439 diagnostics: Default::default(),
440 diagnostic_summaries: Default::default(),
441 client,
442 fs,
443 visible,
444 };
445
446 cx.spawn_weak(|this, mut cx| async move {
447 while let Some(scan_state) = scan_states_rx.next().await {
448 if let Some(this) = this.upgrade(&cx) {
449 last_scan_state_tx.blocking_send(scan_state).ok();
450 this.update(&mut cx, |this, cx| this.poll_snapshot(cx));
451 } else {
452 break;
453 }
454 }
455 })
456 .detach();
457
458 Worktree::Local(tree)
459 });
460
461 Ok((tree, scan_states_tx))
462 }
463
464 pub fn contains_abs_path(&self, path: &Path) -> bool {
465 path.starts_with(&self.abs_path)
466 }
467
468 fn absolutize(&self, path: &Path) -> PathBuf {
469 if path.file_name().is_some() {
470 self.abs_path.join(path)
471 } else {
472 self.abs_path.to_path_buf()
473 }
474 }
475
476 pub(crate) fn load_buffer(
477 &mut self,
478 path: &Path,
479 cx: &mut ModelContext<Worktree>,
480 ) -> Task<Result<ModelHandle<Buffer>>> {
481 let path = Arc::from(path);
482 cx.spawn(move |this, mut cx| async move {
483 let (file, contents, diff_base) = this
484 .update(&mut cx, |t, cx| t.as_local().unwrap().load(&path, cx))
485 .await?;
486 Ok(cx.add_model(|cx| {
487 let mut buffer = Buffer::from_file(0, contents, diff_base, Arc::new(file), cx);
488 buffer.git_diff_recalc(cx);
489 buffer
490 }))
491 })
492 }
493
494 pub fn diagnostics_for_path(&self, path: &Path) -> Option<Vec<DiagnosticEntry<PointUtf16>>> {
495 self.diagnostics.get(path).cloned()
496 }
497
498 pub fn update_diagnostics(
499 &mut self,
500 language_server_id: usize,
501 worktree_path: Arc<Path>,
502 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
503 _: &mut ModelContext<Worktree>,
504 ) -> Result<bool> {
505 self.diagnostics.remove(&worktree_path);
506 let old_summary = self
507 .diagnostic_summaries
508 .remove(&PathKey(worktree_path.clone()))
509 .unwrap_or_default();
510 let new_summary = DiagnosticSummary::new(language_server_id, &diagnostics);
511 if !new_summary.is_empty() {
512 self.diagnostic_summaries
513 .insert(PathKey(worktree_path.clone()), new_summary);
514 self.diagnostics.insert(worktree_path.clone(), diagnostics);
515 }
516
517 let updated = !old_summary.is_empty() || !new_summary.is_empty();
518 if updated {
519 if let Some(share) = self.share.as_ref() {
520 self.client
521 .send(proto::UpdateDiagnosticSummary {
522 project_id: share.project_id,
523 worktree_id: self.id().to_proto(),
524 summary: Some(proto::DiagnosticSummary {
525 path: worktree_path.to_string_lossy().to_string(),
526 language_server_id: language_server_id as u64,
527 error_count: new_summary.error_count as u32,
528 warning_count: new_summary.warning_count as u32,
529 }),
530 })
531 .log_err();
532 }
533 }
534
535 Ok(updated)
536 }
537
538 fn poll_snapshot(&mut self, force: bool, cx: &mut ModelContext<Worktree>) {
539 self.poll_task.take();
540
541 match self.scan_state() {
542 ScanState::Idle => {
543 let new_snapshot = self.background_snapshot.lock().clone();
544 let updated_repos = Self::changed_repos(
545 &self.snapshot.git_repositories,
546 &new_snapshot.git_repositories,
547 );
548 self.snapshot = new_snapshot;
549
550 if let Some(share) = self.share.as_mut() {
551 *share.snapshots_tx.borrow_mut() = self.snapshot.clone();
552 }
553
554 cx.emit(Event::UpdatedEntries);
555
556 if !updated_repos.is_empty() {
557 cx.emit(Event::UpdatedGitRepositories(updated_repos));
558 }
559 }
560
561 ScanState::Initializing => {
562 let is_fake_fs = self.fs.is_fake();
563
564 let new_snapshot = self.background_snapshot.lock().clone();
565 let updated_repos = Self::changed_repos(
566 &self.snapshot.git_repositories,
567 &new_snapshot.git_repositories,
568 );
569 self.snapshot = new_snapshot;
570
571 self.poll_task = Some(cx.spawn_weak(|this, mut cx| async move {
572 if is_fake_fs {
573 #[cfg(any(test, feature = "test-support"))]
574 cx.background().simulate_random_delay().await;
575 } else {
576 smol::Timer::after(Duration::from_millis(100)).await;
577 }
578 if let Some(this) = this.upgrade(&cx) {
579 this.update(&mut cx, |this, cx| this.poll_snapshot(cx));
580 }
581 }));
582
583 cx.emit(Event::UpdatedEntries);
584
585 if !updated_repos.is_empty() {
586 cx.emit(Event::UpdatedGitRepositories(updated_repos));
587 }
588 }
589
590 _ => {
591 if force {
592 self.snapshot = self.background_snapshot.lock().clone();
593 }
594 }
595 }
596
597 cx.notify();
598 }
599
600 fn changed_repos(
601 old_repos: &[GitRepositoryEntry],
602 new_repos: &[GitRepositoryEntry],
603 ) -> Vec<GitRepositoryEntry> {
604 fn diff<'a>(
605 a: &'a [GitRepositoryEntry],
606 b: &'a [GitRepositoryEntry],
607 updated: &mut HashMap<&'a Path, GitRepositoryEntry>,
608 ) {
609 for a_repo in a {
610 let matched = b.iter().find(|b_repo| {
611 a_repo.git_dir_path == b_repo.git_dir_path && a_repo.scan_id == b_repo.scan_id
612 });
613
614 if matched.is_none() {
615 updated.insert(a_repo.git_dir_path.as_ref(), a_repo.clone());
616 }
617 }
618 }
619
620 let mut updated = HashMap::<&Path, GitRepositoryEntry>::default();
621
622 diff(old_repos, new_repos, &mut updated);
623 diff(new_repos, old_repos, &mut updated);
624
625 updated.into_values().collect()
626 }
627
628 pub fn scan_complete(&self) -> impl Future<Output = ()> {
629 let mut scan_state_rx = self.last_scan_state_rx.clone();
630 async move {
631 let mut scan_state = Some(scan_state_rx.borrow().clone());
632 while let Some(ScanState::Initializing | ScanState::Updating) = scan_state {
633 scan_state = scan_state_rx.recv().await;
634 }
635 }
636 }
637
638 fn scan_state(&self) -> ScanState {
639 self.last_scan_state_rx.borrow().clone()
640 }
641
642 pub fn snapshot(&self) -> LocalSnapshot {
643 self.snapshot.clone()
644 }
645
646 pub fn metadata_proto(&self) -> proto::WorktreeMetadata {
647 proto::WorktreeMetadata {
648 id: self.id().to_proto(),
649 root_name: self.root_name().to_string(),
650 visible: self.visible,
651 }
652 }
653
654 fn load(
655 &self,
656 path: &Path,
657 cx: &mut ModelContext<Worktree>,
658 ) -> Task<Result<(File, String, Option<String>)>> {
659 let handle = cx.handle();
660 let path = Arc::from(path);
661 let abs_path = self.absolutize(&path);
662 let fs = self.fs.clone();
663 let snapshot = self.snapshot();
664
665 cx.spawn(|this, mut cx| async move {
666 let text = fs.load(&abs_path).await?;
667
668 let diff_base = if let Some(repo) = snapshot.repo_for(&path) {
669 if let Ok(repo_relative) = path.strip_prefix(repo.content_path) {
670 let repo_relative = repo_relative.to_owned();
671 cx.background()
672 .spawn(async move { repo.repo.lock().load_index_text(&repo_relative) })
673 .await
674 } else {
675 None
676 }
677 } else {
678 None
679 };
680
681 // Eagerly populate the snapshot with an updated entry for the loaded file
682 let entry = this
683 .update(&mut cx, |this, cx| {
684 this.as_local()
685 .unwrap()
686 .refresh_entry(path, abs_path, None, cx)
687 })
688 .await?;
689
690 Ok((
691 File {
692 entry_id: Some(entry.id),
693 worktree: handle,
694 path: entry.path,
695 mtime: entry.mtime,
696 is_local: true,
697 },
698 text,
699 diff_base,
700 ))
701 })
702 }
703
704 pub fn save_buffer_as(
705 &self,
706 buffer_handle: ModelHandle<Buffer>,
707 path: impl Into<Arc<Path>>,
708 cx: &mut ModelContext<Worktree>,
709 ) -> Task<Result<()>> {
710 let buffer = buffer_handle.read(cx);
711 let text = buffer.as_rope().clone();
712 let fingerprint = text.fingerprint();
713 let version = buffer.version();
714 let save = self.write_file(path, text, buffer.line_ending(), cx);
715 let handle = cx.handle();
716 cx.as_mut().spawn(|mut cx| async move {
717 let entry = save.await?;
718 let file = File {
719 entry_id: Some(entry.id),
720 worktree: handle,
721 path: entry.path,
722 mtime: entry.mtime,
723 is_local: true,
724 };
725
726 buffer_handle.update(&mut cx, |buffer, cx| {
727 buffer.did_save(version, fingerprint, file.mtime, Some(Arc::new(file)), cx);
728 });
729
730 Ok(())
731 })
732 }
733
734 pub fn create_entry(
735 &self,
736 path: impl Into<Arc<Path>>,
737 is_dir: bool,
738 cx: &mut ModelContext<Worktree>,
739 ) -> Task<Result<Entry>> {
740 self.write_entry_internal(
741 path,
742 if is_dir {
743 None
744 } else {
745 Some(Default::default())
746 },
747 cx,
748 )
749 }
750
751 pub fn write_file(
752 &self,
753 path: impl Into<Arc<Path>>,
754 text: Rope,
755 line_ending: LineEnding,
756 cx: &mut ModelContext<Worktree>,
757 ) -> Task<Result<Entry>> {
758 self.write_entry_internal(path, Some((text, line_ending)), cx)
759 }
760
761 pub fn delete_entry(
762 &self,
763 entry_id: ProjectEntryId,
764 cx: &mut ModelContext<Worktree>,
765 ) -> Option<Task<Result<()>>> {
766 let entry = self.entry_for_id(entry_id)?.clone();
767 let abs_path = self.absolutize(&entry.path);
768 let delete = cx.background().spawn({
769 let fs = self.fs.clone();
770 let abs_path = abs_path;
771 async move {
772 if entry.is_file() {
773 fs.remove_file(&abs_path, Default::default()).await
774 } else {
775 fs.remove_dir(
776 &abs_path,
777 RemoveOptions {
778 recursive: true,
779 ignore_if_not_exists: false,
780 },
781 )
782 .await
783 }
784 }
785 });
786
787 Some(cx.spawn(|this, mut cx| async move {
788 delete.await?;
789 this.update(&mut cx, |this, cx| {
790 let this = this.as_local_mut().unwrap();
791 {
792 let mut snapshot = this.background_snapshot.lock();
793 snapshot.delete_entry(entry_id);
794 }
795 this.poll_snapshot(true, cx);
796 });
797 Ok(())
798 }))
799 }
800
801 pub fn rename_entry(
802 &self,
803 entry_id: ProjectEntryId,
804 new_path: impl Into<Arc<Path>>,
805 cx: &mut ModelContext<Worktree>,
806 ) -> Option<Task<Result<Entry>>> {
807 let old_path = self.entry_for_id(entry_id)?.path.clone();
808 let new_path = new_path.into();
809 let abs_old_path = self.absolutize(&old_path);
810 let abs_new_path = self.absolutize(&new_path);
811 let rename = cx.background().spawn({
812 let fs = self.fs.clone();
813 let abs_new_path = abs_new_path.clone();
814 async move {
815 fs.rename(&abs_old_path, &abs_new_path, Default::default())
816 .await
817 }
818 });
819
820 Some(cx.spawn(|this, mut cx| async move {
821 rename.await?;
822 let entry = this
823 .update(&mut cx, |this, cx| {
824 this.as_local_mut().unwrap().refresh_entry(
825 new_path.clone(),
826 abs_new_path,
827 Some(old_path),
828 cx,
829 )
830 })
831 .await?;
832 Ok(entry)
833 }))
834 }
835
836 pub fn copy_entry(
837 &self,
838 entry_id: ProjectEntryId,
839 new_path: impl Into<Arc<Path>>,
840 cx: &mut ModelContext<Worktree>,
841 ) -> Option<Task<Result<Entry>>> {
842 let old_path = self.entry_for_id(entry_id)?.path.clone();
843 let new_path = new_path.into();
844 let abs_old_path = self.absolutize(&old_path);
845 let abs_new_path = self.absolutize(&new_path);
846 let copy = cx.background().spawn({
847 let fs = self.fs.clone();
848 let abs_new_path = abs_new_path.clone();
849 async move {
850 copy_recursive(
851 fs.as_ref(),
852 &abs_old_path,
853 &abs_new_path,
854 Default::default(),
855 )
856 .await
857 }
858 });
859
860 Some(cx.spawn(|this, mut cx| async move {
861 copy.await?;
862 let entry = this
863 .update(&mut cx, |this, cx| {
864 this.as_local_mut().unwrap().refresh_entry(
865 new_path.clone(),
866 abs_new_path,
867 None,
868 cx,
869 )
870 })
871 .await?;
872 Ok(entry)
873 }))
874 }
875
876 fn write_entry_internal(
877 &self,
878 path: impl Into<Arc<Path>>,
879 text_if_file: Option<(Rope, LineEnding)>,
880 cx: &mut ModelContext<Worktree>,
881 ) -> Task<Result<Entry>> {
882 let path = path.into();
883 let abs_path = self.absolutize(&path);
884 let write = cx.background().spawn({
885 let fs = self.fs.clone();
886 let abs_path = abs_path.clone();
887 async move {
888 if let Some((text, line_ending)) = text_if_file {
889 fs.save(&abs_path, &text, line_ending).await
890 } else {
891 fs.create_dir(&abs_path).await
892 }
893 }
894 });
895
896 cx.spawn(|this, mut cx| async move {
897 write.await?;
898 let entry = this
899 .update(&mut cx, |this, cx| {
900 this.as_local_mut()
901 .unwrap()
902 .refresh_entry(path, abs_path, None, cx)
903 })
904 .await?;
905 Ok(entry)
906 })
907 }
908
909 fn refresh_entry(
910 &self,
911 path: Arc<Path>,
912 abs_path: PathBuf,
913 old_path: Option<Arc<Path>>,
914 cx: &mut ModelContext<Worktree>,
915 ) -> Task<Result<Entry>> {
916 let fs = self.fs.clone();
917 let root_char_bag;
918 let next_entry_id;
919 {
920 let snapshot = self.background_snapshot.lock();
921 root_char_bag = snapshot.root_char_bag;
922 next_entry_id = snapshot.next_entry_id.clone();
923 }
924 cx.spawn_weak(|this, mut cx| async move {
925 let metadata = fs
926 .metadata(&abs_path)
927 .await?
928 .ok_or_else(|| anyhow!("could not read saved file metadata"))?;
929 let this = this
930 .upgrade(&cx)
931 .ok_or_else(|| anyhow!("worktree was dropped"))?;
932 this.update(&mut cx, |this, cx| {
933 let this = this.as_local_mut().unwrap();
934 let inserted_entry;
935 {
936 let mut snapshot = this.background_snapshot.lock();
937 let mut entry = Entry::new(path, &metadata, &next_entry_id, root_char_bag);
938 entry.is_ignored = snapshot
939 .ignore_stack_for_abs_path(&abs_path, entry.is_dir())
940 .is_abs_path_ignored(&abs_path, entry.is_dir());
941 if let Some(old_path) = old_path {
942 snapshot.remove_path(&old_path);
943 }
944 inserted_entry = snapshot.insert_entry(entry, fs.as_ref());
945 snapshot.scan_id += 1;
946 }
947 this.poll_snapshot(true, cx);
948 Ok(inserted_entry)
949 })
950 })
951 }
952
953 pub fn share(&mut self, project_id: u64, cx: &mut ModelContext<Worktree>) -> Task<Result<()>> {
954 let (share_tx, share_rx) = oneshot::channel();
955
956 if self.share.is_some() {
957 let _ = share_tx.send(Ok(()));
958 } else {
959 let (snapshots_tx, mut snapshots_rx) = watch::channel_with(self.snapshot());
960 let rpc = self.client.clone();
961 let worktree_id = cx.model_id() as u64;
962
963 for (path, summary) in self.diagnostic_summaries.iter() {
964 if let Err(e) = rpc.send(proto::UpdateDiagnosticSummary {
965 project_id,
966 worktree_id,
967 summary: Some(summary.to_proto(&path.0)),
968 }) {
969 return Task::ready(Err(e));
970 }
971 }
972
973 let maintain_remote_snapshot = cx.background().spawn({
974 let rpc = rpc;
975
976 async move {
977 let mut prev_snapshot = match snapshots_rx.recv().await {
978 Some(snapshot) => {
979 let update = proto::UpdateWorktree {
980 project_id,
981 worktree_id,
982 root_name: snapshot.root_name().to_string(),
983 updated_entries: snapshot
984 .entries_by_path
985 .iter()
986 .map(Into::into)
987 .collect(),
988 removed_entries: Default::default(),
989 scan_id: snapshot.scan_id as u64,
990 is_last_update: true,
991 };
992 if let Err(error) = send_worktree_update(&rpc, update).await {
993 let _ = share_tx.send(Err(error));
994 return Err(anyhow!("failed to send initial update worktree"));
995 } else {
996 let _ = share_tx.send(Ok(()));
997 snapshot
998 }
999 }
1000 None => {
1001 share_tx
1002 .send(Err(anyhow!("worktree dropped before share completed")))
1003 .ok();
1004 return Err(anyhow!("failed to send initial update worktree"));
1005 }
1006 };
1007
1008 while let Some(snapshot) = snapshots_rx.recv().await {
1009 send_worktree_update(
1010 &rpc,
1011 snapshot.build_update(&prev_snapshot, project_id, worktree_id, true),
1012 )
1013 .await?;
1014 prev_snapshot = snapshot;
1015 }
1016
1017 Ok::<_, anyhow::Error>(())
1018 }
1019 .log_err()
1020 });
1021 self.share = Some(ShareState {
1022 project_id,
1023 snapshots_tx,
1024 _maintain_remote_snapshot: Some(maintain_remote_snapshot),
1025 });
1026 }
1027
1028 cx.foreground().spawn(async move {
1029 share_rx
1030 .await
1031 .unwrap_or_else(|_| Err(anyhow!("share ended")))
1032 })
1033 }
1034
1035 pub fn unshare(&mut self) {
1036 self.share.take();
1037 }
1038
1039 pub fn is_shared(&self) -> bool {
1040 self.share.is_some()
1041 }
1042
1043 pub fn send_extension_counts(&self, project_id: u64) {
1044 let mut extensions = Vec::new();
1045 let mut counts = Vec::new();
1046
1047 for (extension, count) in self.extension_counts() {
1048 extensions.push(extension.to_string_lossy().to_string());
1049 counts.push(*count as u32);
1050 }
1051
1052 self.client
1053 .send(proto::UpdateWorktreeExtensions {
1054 project_id,
1055 worktree_id: self.id().to_proto(),
1056 extensions,
1057 counts,
1058 })
1059 .log_err();
1060 }
1061}
1062
1063impl RemoteWorktree {
1064 fn snapshot(&self) -> Snapshot {
1065 self.snapshot.clone()
1066 }
1067
1068 fn poll_snapshot(&mut self, cx: &mut ModelContext<Worktree>) {
1069 self.snapshot = self.background_snapshot.lock().clone();
1070 cx.emit(Event::UpdatedEntries);
1071 cx.notify();
1072 }
1073
1074 pub fn disconnected_from_host(&mut self) {
1075 self.updates_tx.take();
1076 self.snapshot_subscriptions.clear();
1077 }
1078
1079 pub fn update_from_remote(&mut self, update: proto::UpdateWorktree) {
1080 if let Some(updates_tx) = &self.updates_tx {
1081 updates_tx
1082 .unbounded_send(update)
1083 .expect("consumer runs to completion");
1084 }
1085 }
1086
1087 fn observed_snapshot(&self, scan_id: usize) -> bool {
1088 self.scan_id > scan_id || (self.scan_id == scan_id && self.is_complete)
1089 }
1090
1091 fn wait_for_snapshot(&mut self, scan_id: usize) -> impl Future<Output = ()> {
1092 let (tx, rx) = oneshot::channel();
1093 if self.observed_snapshot(scan_id) {
1094 let _ = tx.send(());
1095 } else {
1096 match self
1097 .snapshot_subscriptions
1098 .binary_search_by_key(&scan_id, |probe| probe.0)
1099 {
1100 Ok(ix) | Err(ix) => self.snapshot_subscriptions.insert(ix, (scan_id, tx)),
1101 }
1102 }
1103
1104 async move {
1105 let _ = rx.await;
1106 }
1107 }
1108
1109 pub fn update_diagnostic_summary(
1110 &mut self,
1111 path: Arc<Path>,
1112 summary: &proto::DiagnosticSummary,
1113 ) {
1114 let summary = DiagnosticSummary {
1115 language_server_id: summary.language_server_id as usize,
1116 error_count: summary.error_count as usize,
1117 warning_count: summary.warning_count as usize,
1118 };
1119 if summary.is_empty() {
1120 self.diagnostic_summaries.remove(&PathKey(path));
1121 } else {
1122 self.diagnostic_summaries.insert(PathKey(path), summary);
1123 }
1124 }
1125
1126 pub fn insert_entry(
1127 &mut self,
1128 entry: proto::Entry,
1129 scan_id: usize,
1130 cx: &mut ModelContext<Worktree>,
1131 ) -> Task<Result<Entry>> {
1132 let wait_for_snapshot = self.wait_for_snapshot(scan_id);
1133 cx.spawn(|this, mut cx| async move {
1134 wait_for_snapshot.await;
1135 this.update(&mut cx, |worktree, _| {
1136 let worktree = worktree.as_remote_mut().unwrap();
1137 let mut snapshot = worktree.background_snapshot.lock();
1138 let entry = snapshot.insert_entry(entry);
1139 worktree.snapshot = snapshot.clone();
1140 entry
1141 })
1142 })
1143 }
1144
1145 pub(crate) fn delete_entry(
1146 &mut self,
1147 id: ProjectEntryId,
1148 scan_id: usize,
1149 cx: &mut ModelContext<Worktree>,
1150 ) -> Task<Result<()>> {
1151 let wait_for_snapshot = self.wait_for_snapshot(scan_id);
1152 cx.spawn(|this, mut cx| async move {
1153 wait_for_snapshot.await;
1154 this.update(&mut cx, |worktree, _| {
1155 let worktree = worktree.as_remote_mut().unwrap();
1156 let mut snapshot = worktree.background_snapshot.lock();
1157 snapshot.delete_entry(id);
1158 worktree.snapshot = snapshot.clone();
1159 });
1160 Ok(())
1161 })
1162 }
1163}
1164
1165impl Snapshot {
1166 pub fn id(&self) -> WorktreeId {
1167 self.id
1168 }
1169
1170 pub fn contains_entry(&self, entry_id: ProjectEntryId) -> bool {
1171 self.entries_by_id.get(&entry_id, &()).is_some()
1172 }
1173
1174 pub(crate) fn insert_entry(&mut self, entry: proto::Entry) -> Result<Entry> {
1175 let entry = Entry::try_from((&self.root_char_bag, entry))?;
1176 let old_entry = self.entries_by_id.insert_or_replace(
1177 PathEntry {
1178 id: entry.id,
1179 path: entry.path.clone(),
1180 is_ignored: entry.is_ignored,
1181 scan_id: 0,
1182 },
1183 &(),
1184 );
1185 if let Some(old_entry) = old_entry {
1186 self.entries_by_path.remove(&PathKey(old_entry.path), &());
1187 }
1188 self.entries_by_path.insert_or_replace(entry.clone(), &());
1189 Ok(entry)
1190 }
1191
1192 fn delete_entry(&mut self, entry_id: ProjectEntryId) -> bool {
1193 if let Some(removed_entry) = self.entries_by_id.remove(&entry_id, &()) {
1194 self.entries_by_path = {
1195 let mut cursor = self.entries_by_path.cursor();
1196 let mut new_entries_by_path =
1197 cursor.slice(&TraversalTarget::Path(&removed_entry.path), Bias::Left, &());
1198 while let Some(entry) = cursor.item() {
1199 if entry.path.starts_with(&removed_entry.path) {
1200 self.entries_by_id.remove(&entry.id, &());
1201 cursor.next(&());
1202 } else {
1203 break;
1204 }
1205 }
1206 new_entries_by_path.push_tree(cursor.suffix(&()), &());
1207 new_entries_by_path
1208 };
1209
1210 true
1211 } else {
1212 false
1213 }
1214 }
1215
1216 pub(crate) fn apply_remote_update(&mut self, update: proto::UpdateWorktree) -> Result<()> {
1217 let mut entries_by_path_edits = Vec::new();
1218 let mut entries_by_id_edits = Vec::new();
1219 for entry_id in update.removed_entries {
1220 let entry = self
1221 .entry_for_id(ProjectEntryId::from_proto(entry_id))
1222 .ok_or_else(|| anyhow!("unknown entry {}", entry_id))?;
1223 entries_by_path_edits.push(Edit::Remove(PathKey(entry.path.clone())));
1224 entries_by_id_edits.push(Edit::Remove(entry.id));
1225 }
1226
1227 for entry in update.updated_entries {
1228 let entry = Entry::try_from((&self.root_char_bag, entry))?;
1229 if let Some(PathEntry { path, .. }) = self.entries_by_id.get(&entry.id, &()) {
1230 entries_by_path_edits.push(Edit::Remove(PathKey(path.clone())));
1231 }
1232 entries_by_id_edits.push(Edit::Insert(PathEntry {
1233 id: entry.id,
1234 path: entry.path.clone(),
1235 is_ignored: entry.is_ignored,
1236 scan_id: 0,
1237 }));
1238 entries_by_path_edits.push(Edit::Insert(entry));
1239 }
1240
1241 self.entries_by_path.edit(entries_by_path_edits, &());
1242 self.entries_by_id.edit(entries_by_id_edits, &());
1243 self.scan_id = update.scan_id as usize;
1244 self.is_complete = update.is_last_update;
1245
1246 Ok(())
1247 }
1248
1249 pub fn file_count(&self) -> usize {
1250 self.entries_by_path.summary().file_count
1251 }
1252
1253 pub fn visible_file_count(&self) -> usize {
1254 self.entries_by_path.summary().visible_file_count
1255 }
1256
1257 fn traverse_from_offset(
1258 &self,
1259 include_dirs: bool,
1260 include_ignored: bool,
1261 start_offset: usize,
1262 ) -> Traversal {
1263 let mut cursor = self.entries_by_path.cursor();
1264 cursor.seek(
1265 &TraversalTarget::Count {
1266 count: start_offset,
1267 include_dirs,
1268 include_ignored,
1269 },
1270 Bias::Right,
1271 &(),
1272 );
1273 Traversal {
1274 cursor,
1275 include_dirs,
1276 include_ignored,
1277 }
1278 }
1279
1280 fn traverse_from_path(
1281 &self,
1282 include_dirs: bool,
1283 include_ignored: bool,
1284 path: &Path,
1285 ) -> Traversal {
1286 let mut cursor = self.entries_by_path.cursor();
1287 cursor.seek(&TraversalTarget::Path(path), Bias::Left, &());
1288 Traversal {
1289 cursor,
1290 include_dirs,
1291 include_ignored,
1292 }
1293 }
1294
1295 pub fn files(&self, include_ignored: bool, start: usize) -> Traversal {
1296 self.traverse_from_offset(false, include_ignored, start)
1297 }
1298
1299 pub fn entries(&self, include_ignored: bool) -> Traversal {
1300 self.traverse_from_offset(true, include_ignored, 0)
1301 }
1302
1303 pub fn paths(&self) -> impl Iterator<Item = &Arc<Path>> {
1304 let empty_path = Path::new("");
1305 self.entries_by_path
1306 .cursor::<()>()
1307 .filter(move |entry| entry.path.as_ref() != empty_path)
1308 .map(|entry| &entry.path)
1309 }
1310
1311 fn child_entries<'a>(&'a self, parent_path: &'a Path) -> ChildEntriesIter<'a> {
1312 let mut cursor = self.entries_by_path.cursor();
1313 cursor.seek(&TraversalTarget::Path(parent_path), Bias::Right, &());
1314 let traversal = Traversal {
1315 cursor,
1316 include_dirs: true,
1317 include_ignored: true,
1318 };
1319 ChildEntriesIter {
1320 traversal,
1321 parent_path,
1322 }
1323 }
1324
1325 pub fn root_entry(&self) -> Option<&Entry> {
1326 self.entry_for_path("")
1327 }
1328
1329 pub fn root_name(&self) -> &str {
1330 &self.root_name
1331 }
1332
1333 pub fn scan_id(&self) -> usize {
1334 self.scan_id
1335 }
1336
1337 pub fn entry_for_path(&self, path: impl AsRef<Path>) -> Option<&Entry> {
1338 let path = path.as_ref();
1339 self.traverse_from_path(true, true, path)
1340 .entry()
1341 .and_then(|entry| {
1342 if entry.path.as_ref() == path {
1343 Some(entry)
1344 } else {
1345 None
1346 }
1347 })
1348 }
1349
1350 pub fn entry_for_id(&self, id: ProjectEntryId) -> Option<&Entry> {
1351 let entry = self.entries_by_id.get(&id, &())?;
1352 self.entry_for_path(&entry.path)
1353 }
1354
1355 pub fn inode_for_path(&self, path: impl AsRef<Path>) -> Option<u64> {
1356 self.entry_for_path(path.as_ref()).map(|e| e.inode)
1357 }
1358}
1359
1360impl LocalSnapshot {
1361 pub fn abs_path(&self) -> &Arc<Path> {
1362 &self.abs_path
1363 }
1364
1365 pub fn extension_counts(&self) -> &HashMap<OsString, usize> {
1366 &self.extension_counts
1367 }
1368
1369 // Gives the most specific git repository for a given path
1370 pub(crate) fn repo_for(&self, path: &Path) -> Option<GitRepositoryEntry> {
1371 self.git_repositories
1372 .iter()
1373 .rev() //git_repository is ordered lexicographically
1374 .find(|repo| repo.manages(path))
1375 .cloned()
1376 }
1377
1378 pub(crate) fn in_dot_git(&mut self, path: &Path) -> Option<&mut GitRepositoryEntry> {
1379 // Git repositories cannot be nested, so we don't need to reverse the order
1380 self.git_repositories
1381 .iter_mut()
1382 .find(|repo| repo.in_dot_git(path))
1383 }
1384
1385 #[cfg(test)]
1386 pub(crate) fn build_initial_update(&self, project_id: u64) -> proto::UpdateWorktree {
1387 let root_name = self.root_name.clone();
1388 proto::UpdateWorktree {
1389 project_id,
1390 worktree_id: self.id().to_proto(),
1391 root_name,
1392 updated_entries: self.entries_by_path.iter().map(Into::into).collect(),
1393 removed_entries: Default::default(),
1394 scan_id: self.scan_id as u64,
1395 is_last_update: true,
1396 }
1397 }
1398
1399 pub(crate) fn build_update(
1400 &self,
1401 other: &Self,
1402 project_id: u64,
1403 worktree_id: u64,
1404 include_ignored: bool,
1405 ) -> proto::UpdateWorktree {
1406 let mut updated_entries = Vec::new();
1407 let mut removed_entries = Vec::new();
1408 let mut self_entries = self
1409 .entries_by_id
1410 .cursor::<()>()
1411 .filter(|e| include_ignored || !e.is_ignored)
1412 .peekable();
1413 let mut other_entries = other
1414 .entries_by_id
1415 .cursor::<()>()
1416 .filter(|e| include_ignored || !e.is_ignored)
1417 .peekable();
1418 loop {
1419 match (self_entries.peek(), other_entries.peek()) {
1420 (Some(self_entry), Some(other_entry)) => {
1421 match Ord::cmp(&self_entry.id, &other_entry.id) {
1422 Ordering::Less => {
1423 let entry = self.entry_for_id(self_entry.id).unwrap().into();
1424 updated_entries.push(entry);
1425 self_entries.next();
1426 }
1427 Ordering::Equal => {
1428 if self_entry.scan_id != other_entry.scan_id {
1429 let entry = self.entry_for_id(self_entry.id).unwrap().into();
1430 updated_entries.push(entry);
1431 }
1432
1433 self_entries.next();
1434 other_entries.next();
1435 }
1436 Ordering::Greater => {
1437 removed_entries.push(other_entry.id.to_proto());
1438 other_entries.next();
1439 }
1440 }
1441 }
1442 (Some(self_entry), None) => {
1443 let entry = self.entry_for_id(self_entry.id).unwrap().into();
1444 updated_entries.push(entry);
1445 self_entries.next();
1446 }
1447 (None, Some(other_entry)) => {
1448 removed_entries.push(other_entry.id.to_proto());
1449 other_entries.next();
1450 }
1451 (None, None) => break,
1452 }
1453 }
1454
1455 proto::UpdateWorktree {
1456 project_id,
1457 worktree_id,
1458 root_name: self.root_name().to_string(),
1459 updated_entries,
1460 removed_entries,
1461 scan_id: self.scan_id as u64,
1462 is_last_update: true,
1463 }
1464 }
1465
1466 fn insert_entry(&mut self, mut entry: Entry, fs: &dyn Fs) -> Entry {
1467 if entry.is_file() && entry.path.file_name() == Some(&GITIGNORE) {
1468 let abs_path = self.abs_path.join(&entry.path);
1469 match smol::block_on(build_gitignore(&abs_path, fs)) {
1470 Ok(ignore) => {
1471 self.ignores_by_parent_abs_path.insert(
1472 abs_path.parent().unwrap().into(),
1473 (Arc::new(ignore), self.scan_id),
1474 );
1475 }
1476 Err(error) => {
1477 log::error!(
1478 "error loading .gitignore file {:?} - {:?}",
1479 &entry.path,
1480 error
1481 );
1482 }
1483 }
1484 }
1485
1486 self.reuse_entry_id(&mut entry);
1487
1488 if entry.kind == EntryKind::PendingDir {
1489 if let Some(existing_entry) =
1490 self.entries_by_path.get(&PathKey(entry.path.clone()), &())
1491 {
1492 entry.kind = existing_entry.kind;
1493 }
1494 }
1495
1496 self.entries_by_path.insert_or_replace(entry.clone(), &());
1497 let scan_id = self.scan_id;
1498 let removed_entry = self.entries_by_id.insert_or_replace(
1499 PathEntry {
1500 id: entry.id,
1501 path: entry.path.clone(),
1502 is_ignored: entry.is_ignored,
1503 scan_id,
1504 },
1505 &(),
1506 );
1507
1508 if let Some(removed_entry) = removed_entry {
1509 self.dec_extension_count(&removed_entry.path, removed_entry.is_ignored);
1510 }
1511 self.inc_extension_count(&entry.path, entry.is_ignored);
1512
1513 entry
1514 }
1515
1516 fn populate_dir(
1517 &mut self,
1518 parent_path: Arc<Path>,
1519 entries: impl IntoIterator<Item = Entry>,
1520 ignore: Option<Arc<Gitignore>>,
1521 fs: &dyn Fs,
1522 ) {
1523 let mut parent_entry = if let Some(parent_entry) =
1524 self.entries_by_path.get(&PathKey(parent_path.clone()), &())
1525 {
1526 parent_entry.clone()
1527 } else {
1528 log::warn!(
1529 "populating a directory {:?} that has been removed",
1530 parent_path
1531 );
1532 return;
1533 };
1534
1535 if let Some(ignore) = ignore {
1536 self.ignores_by_parent_abs_path.insert(
1537 self.abs_path.join(&parent_path).into(),
1538 (ignore, self.scan_id),
1539 );
1540 }
1541 if matches!(parent_entry.kind, EntryKind::PendingDir) {
1542 parent_entry.kind = EntryKind::Dir;
1543 } else {
1544 unreachable!();
1545 }
1546
1547 if parent_path.file_name() == Some(&DOT_GIT) {
1548 let abs_path = self.abs_path.join(&parent_path);
1549 let content_path: Arc<Path> = parent_path.parent().unwrap().into();
1550 if let Err(ix) = self
1551 .git_repositories
1552 .binary_search_by_key(&&content_path, |repo| &repo.content_path)
1553 {
1554 if let Some(repo) = fs.open_repo(abs_path.as_path()) {
1555 self.git_repositories.insert(
1556 ix,
1557 GitRepositoryEntry {
1558 repo,
1559 scan_id: 0,
1560 content_path,
1561 git_dir_path: parent_path,
1562 },
1563 );
1564 }
1565 }
1566 }
1567
1568 let mut entries_by_path_edits = vec![Edit::Insert(parent_entry)];
1569 let mut entries_by_id_edits = Vec::new();
1570
1571 for mut entry in entries {
1572 self.reuse_entry_id(&mut entry);
1573 self.inc_extension_count(&entry.path, entry.is_ignored);
1574 entries_by_id_edits.push(Edit::Insert(PathEntry {
1575 id: entry.id,
1576 path: entry.path.clone(),
1577 is_ignored: entry.is_ignored,
1578 scan_id: self.scan_id,
1579 }));
1580 entries_by_path_edits.push(Edit::Insert(entry));
1581 }
1582
1583 self.entries_by_path.edit(entries_by_path_edits, &());
1584 let removed_entries = self.entries_by_id.edit(entries_by_id_edits, &());
1585
1586 for removed_entry in removed_entries {
1587 self.dec_extension_count(&removed_entry.path, removed_entry.is_ignored);
1588 }
1589 }
1590
1591 fn inc_extension_count(&mut self, path: &Path, ignored: bool) {
1592 if !ignored {
1593 if let Some(extension) = path.extension() {
1594 if let Some(count) = self.extension_counts.get_mut(extension) {
1595 *count += 1;
1596 } else {
1597 self.extension_counts.insert(extension.into(), 1);
1598 }
1599 }
1600 }
1601 }
1602
1603 fn dec_extension_count(&mut self, path: &Path, ignored: bool) {
1604 if !ignored {
1605 if let Some(extension) = path.extension() {
1606 if let Some(count) = self.extension_counts.get_mut(extension) {
1607 *count -= 1;
1608 }
1609 }
1610 }
1611 }
1612
1613 fn reuse_entry_id(&mut self, entry: &mut Entry) {
1614 if let Some(removed_entry_id) = self.removed_entry_ids.remove(&entry.inode) {
1615 entry.id = removed_entry_id;
1616 } else if let Some(existing_entry) = self.entry_for_path(&entry.path) {
1617 entry.id = existing_entry.id;
1618 }
1619 }
1620
1621 fn remove_path(&mut self, path: &Path) {
1622 let mut new_entries;
1623 let removed_entries;
1624 {
1625 let mut cursor = self.entries_by_path.cursor::<TraversalProgress>();
1626 new_entries = cursor.slice(&TraversalTarget::Path(path), Bias::Left, &());
1627 removed_entries = cursor.slice(&TraversalTarget::PathSuccessor(path), Bias::Left, &());
1628 new_entries.push_tree(cursor.suffix(&()), &());
1629 }
1630 self.entries_by_path = new_entries;
1631
1632 let mut entries_by_id_edits = Vec::new();
1633 for entry in removed_entries.cursor::<()>() {
1634 let removed_entry_id = self
1635 .removed_entry_ids
1636 .entry(entry.inode)
1637 .or_insert(entry.id);
1638 *removed_entry_id = cmp::max(*removed_entry_id, entry.id);
1639 entries_by_id_edits.push(Edit::Remove(entry.id));
1640 self.dec_extension_count(&entry.path, entry.is_ignored);
1641 }
1642 self.entries_by_id.edit(entries_by_id_edits, &());
1643
1644 if path.file_name() == Some(&GITIGNORE) {
1645 let abs_parent_path = self.abs_path.join(path.parent().unwrap());
1646 if let Some((_, scan_id)) = self
1647 .ignores_by_parent_abs_path
1648 .get_mut(abs_parent_path.as_path())
1649 {
1650 *scan_id = self.snapshot.scan_id;
1651 }
1652 } else if path.file_name() == Some(&DOT_GIT) {
1653 let parent_path = path.parent().unwrap();
1654 if let Ok(ix) = self
1655 .git_repositories
1656 .binary_search_by_key(&parent_path, |repo| repo.git_dir_path.as_ref())
1657 {
1658 self.git_repositories[ix].scan_id = self.snapshot.scan_id;
1659 }
1660 }
1661 }
1662
1663 fn ancestor_inodes_for_path(&self, path: &Path) -> TreeSet<u64> {
1664 let mut inodes = TreeSet::default();
1665 for ancestor in path.ancestors().skip(1) {
1666 if let Some(entry) = self.entry_for_path(ancestor) {
1667 inodes.insert(entry.inode);
1668 }
1669 }
1670 inodes
1671 }
1672
1673 fn ignore_stack_for_abs_path(&self, abs_path: &Path, is_dir: bool) -> Arc<IgnoreStack> {
1674 let mut new_ignores = Vec::new();
1675 for ancestor in abs_path.ancestors().skip(1) {
1676 if let Some((ignore, _)) = self.ignores_by_parent_abs_path.get(ancestor) {
1677 new_ignores.push((ancestor, Some(ignore.clone())));
1678 } else {
1679 new_ignores.push((ancestor, None));
1680 }
1681 }
1682
1683 let mut ignore_stack = IgnoreStack::none();
1684 for (parent_abs_path, ignore) in new_ignores.into_iter().rev() {
1685 if ignore_stack.is_abs_path_ignored(parent_abs_path, true) {
1686 ignore_stack = IgnoreStack::all();
1687 break;
1688 } else if let Some(ignore) = ignore {
1689 ignore_stack = ignore_stack.append(parent_abs_path.into(), ignore);
1690 }
1691 }
1692
1693 if ignore_stack.is_abs_path_ignored(abs_path, is_dir) {
1694 ignore_stack = IgnoreStack::all();
1695 }
1696
1697 ignore_stack
1698 }
1699
1700 pub fn git_repo_entries(&self) -> &[GitRepositoryEntry] {
1701 &self.git_repositories
1702 }
1703}
1704
1705impl GitRepositoryEntry {
1706 // Note that these paths should be relative to the worktree root.
1707 pub(crate) fn manages(&self, path: &Path) -> bool {
1708 path.starts_with(self.content_path.as_ref())
1709 }
1710
1711 // Note that theis path should be relative to the worktree root.
1712 pub(crate) fn in_dot_git(&self, path: &Path) -> bool {
1713 path.starts_with(self.git_dir_path.as_ref())
1714 }
1715}
1716
1717async fn build_gitignore(abs_path: &Path, fs: &dyn Fs) -> Result<Gitignore> {
1718 let contents = fs.load(abs_path).await?;
1719 let parent = abs_path.parent().unwrap_or_else(|| Path::new("/"));
1720 let mut builder = GitignoreBuilder::new(parent);
1721 for line in contents.lines() {
1722 builder.add_line(Some(abs_path.into()), line)?;
1723 }
1724 Ok(builder.build()?)
1725}
1726
1727impl WorktreeId {
1728 pub fn from_usize(handle_id: usize) -> Self {
1729 Self(handle_id)
1730 }
1731
1732 pub(crate) fn from_proto(id: u64) -> Self {
1733 Self(id as usize)
1734 }
1735
1736 pub fn to_proto(&self) -> u64 {
1737 self.0 as u64
1738 }
1739
1740 pub fn to_usize(&self) -> usize {
1741 self.0
1742 }
1743}
1744
1745impl fmt::Display for WorktreeId {
1746 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1747 self.0.fmt(f)
1748 }
1749}
1750
1751impl Deref for Worktree {
1752 type Target = Snapshot;
1753
1754 fn deref(&self) -> &Self::Target {
1755 match self {
1756 Worktree::Local(worktree) => &worktree.snapshot,
1757 Worktree::Remote(worktree) => &worktree.snapshot,
1758 }
1759 }
1760}
1761
1762impl Deref for LocalWorktree {
1763 type Target = LocalSnapshot;
1764
1765 fn deref(&self) -> &Self::Target {
1766 &self.snapshot
1767 }
1768}
1769
1770impl Deref for RemoteWorktree {
1771 type Target = Snapshot;
1772
1773 fn deref(&self) -> &Self::Target {
1774 &self.snapshot
1775 }
1776}
1777
1778impl fmt::Debug for LocalWorktree {
1779 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1780 self.snapshot.fmt(f)
1781 }
1782}
1783
1784impl fmt::Debug for Snapshot {
1785 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1786 struct EntriesById<'a>(&'a SumTree<PathEntry>);
1787 struct EntriesByPath<'a>(&'a SumTree<Entry>);
1788
1789 impl<'a> fmt::Debug for EntriesByPath<'a> {
1790 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1791 f.debug_map()
1792 .entries(self.0.iter().map(|entry| (&entry.path, entry.id)))
1793 .finish()
1794 }
1795 }
1796
1797 impl<'a> fmt::Debug for EntriesById<'a> {
1798 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1799 f.debug_list().entries(self.0.iter()).finish()
1800 }
1801 }
1802
1803 f.debug_struct("Snapshot")
1804 .field("id", &self.id)
1805 .field("root_name", &self.root_name)
1806 .field("entries_by_path", &EntriesByPath(&self.entries_by_path))
1807 .field("entries_by_id", &EntriesById(&self.entries_by_id))
1808 .finish()
1809 }
1810}
1811
1812#[derive(Clone, PartialEq)]
1813pub struct File {
1814 pub worktree: ModelHandle<Worktree>,
1815 pub path: Arc<Path>,
1816 pub mtime: SystemTime,
1817 pub(crate) entry_id: Option<ProjectEntryId>,
1818 pub(crate) is_local: bool,
1819}
1820
1821impl language::File for File {
1822 fn as_local(&self) -> Option<&dyn language::LocalFile> {
1823 if self.is_local {
1824 Some(self)
1825 } else {
1826 None
1827 }
1828 }
1829
1830 fn mtime(&self) -> SystemTime {
1831 self.mtime
1832 }
1833
1834 fn path(&self) -> &Arc<Path> {
1835 &self.path
1836 }
1837
1838 fn full_path(&self, cx: &AppContext) -> PathBuf {
1839 let mut full_path = PathBuf::new();
1840 full_path.push(self.worktree.read(cx).root_name());
1841 if self.path.components().next().is_some() {
1842 full_path.push(&self.path);
1843 }
1844 full_path
1845 }
1846
1847 /// Returns the last component of this handle's absolute path. If this handle refers to the root
1848 /// of its worktree, then this method will return the name of the worktree itself.
1849 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr {
1850 self.path
1851 .file_name()
1852 .unwrap_or_else(|| OsStr::new(&self.worktree.read(cx).root_name))
1853 }
1854
1855 fn is_deleted(&self) -> bool {
1856 self.entry_id.is_none()
1857 }
1858
1859 fn save(
1860 &self,
1861 buffer_id: u64,
1862 text: Rope,
1863 version: clock::Global,
1864 line_ending: LineEnding,
1865 cx: &mut MutableAppContext,
1866 ) -> Task<Result<(clock::Global, String, SystemTime)>> {
1867 self.worktree.update(cx, |worktree, cx| match worktree {
1868 Worktree::Local(worktree) => {
1869 let rpc = worktree.client.clone();
1870 let project_id = worktree.share.as_ref().map(|share| share.project_id);
1871 let fingerprint = text.fingerprint();
1872 let save = worktree.write_file(self.path.clone(), text, line_ending, cx);
1873 cx.background().spawn(async move {
1874 let entry = save.await?;
1875 if let Some(project_id) = project_id {
1876 rpc.send(proto::BufferSaved {
1877 project_id,
1878 buffer_id,
1879 version: serialize_version(&version),
1880 mtime: Some(entry.mtime.into()),
1881 fingerprint: fingerprint.clone(),
1882 })?;
1883 }
1884 Ok((version, fingerprint, entry.mtime))
1885 })
1886 }
1887 Worktree::Remote(worktree) => {
1888 let rpc = worktree.client.clone();
1889 let project_id = worktree.project_id;
1890 cx.foreground().spawn(async move {
1891 let response = rpc
1892 .request(proto::SaveBuffer {
1893 project_id,
1894 buffer_id,
1895 version: serialize_version(&version),
1896 })
1897 .await?;
1898 let version = deserialize_version(response.version);
1899 let mtime = response
1900 .mtime
1901 .ok_or_else(|| anyhow!("missing mtime"))?
1902 .into();
1903 Ok((version, response.fingerprint, mtime))
1904 })
1905 }
1906 })
1907 }
1908
1909 fn as_any(&self) -> &dyn Any {
1910 self
1911 }
1912
1913 fn to_proto(&self) -> rpc::proto::File {
1914 rpc::proto::File {
1915 worktree_id: self.worktree.id() as u64,
1916 entry_id: self.entry_id.map(|entry_id| entry_id.to_proto()),
1917 path: self.path.to_string_lossy().into(),
1918 mtime: Some(self.mtime.into()),
1919 }
1920 }
1921}
1922
1923impl language::LocalFile for File {
1924 fn abs_path(&self, cx: &AppContext) -> PathBuf {
1925 self.worktree
1926 .read(cx)
1927 .as_local()
1928 .unwrap()
1929 .abs_path
1930 .join(&self.path)
1931 }
1932
1933 fn load(&self, cx: &AppContext) -> Task<Result<String>> {
1934 let worktree = self.worktree.read(cx).as_local().unwrap();
1935 let abs_path = worktree.absolutize(&self.path);
1936 let fs = worktree.fs.clone();
1937 cx.background()
1938 .spawn(async move { fs.load(&abs_path).await })
1939 }
1940
1941 fn buffer_reloaded(
1942 &self,
1943 buffer_id: u64,
1944 version: &clock::Global,
1945 fingerprint: String,
1946 line_ending: LineEnding,
1947 mtime: SystemTime,
1948 cx: &mut MutableAppContext,
1949 ) {
1950 let worktree = self.worktree.read(cx).as_local().unwrap();
1951 if let Some(project_id) = worktree.share.as_ref().map(|share| share.project_id) {
1952 worktree
1953 .client
1954 .send(proto::BufferReloaded {
1955 project_id,
1956 buffer_id,
1957 version: serialize_version(version),
1958 mtime: Some(mtime.into()),
1959 fingerprint,
1960 line_ending: serialize_line_ending(line_ending) as i32,
1961 })
1962 .log_err();
1963 }
1964 }
1965}
1966
1967impl File {
1968 pub fn from_proto(
1969 proto: rpc::proto::File,
1970 worktree: ModelHandle<Worktree>,
1971 cx: &AppContext,
1972 ) -> Result<Self> {
1973 let worktree_id = worktree
1974 .read(cx)
1975 .as_remote()
1976 .ok_or_else(|| anyhow!("not remote"))?
1977 .id();
1978
1979 if worktree_id.to_proto() != proto.worktree_id {
1980 return Err(anyhow!("worktree id does not match file"));
1981 }
1982
1983 Ok(Self {
1984 worktree,
1985 path: Path::new(&proto.path).into(),
1986 mtime: proto.mtime.ok_or_else(|| anyhow!("no timestamp"))?.into(),
1987 entry_id: proto.entry_id.map(ProjectEntryId::from_proto),
1988 is_local: false,
1989 })
1990 }
1991
1992 pub fn from_dyn(file: Option<&dyn language::File>) -> Option<&Self> {
1993 file.and_then(|f| f.as_any().downcast_ref())
1994 }
1995
1996 pub fn worktree_id(&self, cx: &AppContext) -> WorktreeId {
1997 self.worktree.read(cx).id()
1998 }
1999
2000 pub fn project_entry_id(&self, _: &AppContext) -> Option<ProjectEntryId> {
2001 self.entry_id
2002 }
2003}
2004
2005#[derive(Clone, Debug, PartialEq, Eq)]
2006pub struct Entry {
2007 pub id: ProjectEntryId,
2008 pub kind: EntryKind,
2009 pub path: Arc<Path>,
2010 pub inode: u64,
2011 pub mtime: SystemTime,
2012 pub is_symlink: bool,
2013 pub is_ignored: bool,
2014}
2015
2016#[derive(Clone, Copy, Debug, PartialEq, Eq)]
2017pub enum EntryKind {
2018 PendingDir,
2019 Dir,
2020 File(CharBag),
2021}
2022
2023impl Entry {
2024 fn new(
2025 path: Arc<Path>,
2026 metadata: &fs::Metadata,
2027 next_entry_id: &AtomicUsize,
2028 root_char_bag: CharBag,
2029 ) -> Self {
2030 Self {
2031 id: ProjectEntryId::new(next_entry_id),
2032 kind: if metadata.is_dir {
2033 EntryKind::PendingDir
2034 } else {
2035 EntryKind::File(char_bag_for_path(root_char_bag, &path))
2036 },
2037 path,
2038 inode: metadata.inode,
2039 mtime: metadata.mtime,
2040 is_symlink: metadata.is_symlink,
2041 is_ignored: false,
2042 }
2043 }
2044
2045 pub fn is_dir(&self) -> bool {
2046 matches!(self.kind, EntryKind::Dir | EntryKind::PendingDir)
2047 }
2048
2049 pub fn is_file(&self) -> bool {
2050 matches!(self.kind, EntryKind::File(_))
2051 }
2052}
2053
2054impl sum_tree::Item for Entry {
2055 type Summary = EntrySummary;
2056
2057 fn summary(&self) -> Self::Summary {
2058 let visible_count = if self.is_ignored { 0 } else { 1 };
2059 let file_count;
2060 let visible_file_count;
2061 if self.is_file() {
2062 file_count = 1;
2063 visible_file_count = visible_count;
2064 } else {
2065 file_count = 0;
2066 visible_file_count = 0;
2067 }
2068
2069 EntrySummary {
2070 max_path: self.path.clone(),
2071 count: 1,
2072 visible_count,
2073 file_count,
2074 visible_file_count,
2075 }
2076 }
2077}
2078
2079impl sum_tree::KeyedItem for Entry {
2080 type Key = PathKey;
2081
2082 fn key(&self) -> Self::Key {
2083 PathKey(self.path.clone())
2084 }
2085}
2086
2087#[derive(Clone, Debug)]
2088pub struct EntrySummary {
2089 max_path: Arc<Path>,
2090 count: usize,
2091 visible_count: usize,
2092 file_count: usize,
2093 visible_file_count: usize,
2094}
2095
2096impl Default for EntrySummary {
2097 fn default() -> Self {
2098 Self {
2099 max_path: Arc::from(Path::new("")),
2100 count: 0,
2101 visible_count: 0,
2102 file_count: 0,
2103 visible_file_count: 0,
2104 }
2105 }
2106}
2107
2108impl sum_tree::Summary for EntrySummary {
2109 type Context = ();
2110
2111 fn add_summary(&mut self, rhs: &Self, _: &()) {
2112 self.max_path = rhs.max_path.clone();
2113 self.count += rhs.count;
2114 self.visible_count += rhs.visible_count;
2115 self.file_count += rhs.file_count;
2116 self.visible_file_count += rhs.visible_file_count;
2117 }
2118}
2119
2120#[derive(Clone, Debug)]
2121struct PathEntry {
2122 id: ProjectEntryId,
2123 path: Arc<Path>,
2124 is_ignored: bool,
2125 scan_id: usize,
2126}
2127
2128impl sum_tree::Item for PathEntry {
2129 type Summary = PathEntrySummary;
2130
2131 fn summary(&self) -> Self::Summary {
2132 PathEntrySummary { max_id: self.id }
2133 }
2134}
2135
2136impl sum_tree::KeyedItem for PathEntry {
2137 type Key = ProjectEntryId;
2138
2139 fn key(&self) -> Self::Key {
2140 self.id
2141 }
2142}
2143
2144#[derive(Clone, Debug, Default)]
2145struct PathEntrySummary {
2146 max_id: ProjectEntryId,
2147}
2148
2149impl sum_tree::Summary for PathEntrySummary {
2150 type Context = ();
2151
2152 fn add_summary(&mut self, summary: &Self, _: &Self::Context) {
2153 self.max_id = summary.max_id;
2154 }
2155}
2156
2157impl<'a> sum_tree::Dimension<'a, PathEntrySummary> for ProjectEntryId {
2158 fn add_summary(&mut self, summary: &'a PathEntrySummary, _: &()) {
2159 *self = summary.max_id;
2160 }
2161}
2162
2163#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]
2164pub struct PathKey(Arc<Path>);
2165
2166impl Default for PathKey {
2167 fn default() -> Self {
2168 Self(Path::new("").into())
2169 }
2170}
2171
2172impl<'a> sum_tree::Dimension<'a, EntrySummary> for PathKey {
2173 fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) {
2174 self.0 = summary.max_path.clone();
2175 }
2176}
2177
2178struct BackgroundScanner {
2179 fs: Arc<dyn Fs>,
2180 snapshot: Arc<Mutex<LocalSnapshot>>,
2181 notify: UnboundedSender<ScanState>,
2182 executor: Arc<executor::Background>,
2183}
2184
2185impl BackgroundScanner {
2186 fn new(
2187 snapshot: Arc<Mutex<LocalSnapshot>>,
2188 notify: UnboundedSender<ScanState>,
2189 fs: Arc<dyn Fs>,
2190 executor: Arc<executor::Background>,
2191 ) -> Self {
2192 Self {
2193 fs,
2194 snapshot,
2195 notify,
2196 executor,
2197 }
2198 }
2199
2200 fn abs_path(&self) -> Arc<Path> {
2201 self.snapshot.lock().abs_path.clone()
2202 }
2203
2204 fn snapshot(&self) -> LocalSnapshot {
2205 self.snapshot.lock().clone()
2206 }
2207
2208 async fn run(mut self, events_rx: impl Stream<Item = Vec<fsevent::Event>>) {
2209 if self.notify.unbounded_send(ScanState::Initializing).is_err() {
2210 return;
2211 }
2212
2213 if let Err(err) = self.scan_dirs().await {
2214 if self
2215 .notify
2216 .unbounded_send(ScanState::Err(Arc::new(err)))
2217 .is_err()
2218 {
2219 return;
2220 }
2221 }
2222
2223 if self.notify.unbounded_send(ScanState::Idle).is_err() {
2224 return;
2225 }
2226
2227 futures::pin_mut!(events_rx);
2228
2229 while let Some(mut events) = events_rx.next().await {
2230 while let Poll::Ready(Some(additional_events)) = futures::poll!(events_rx.next()) {
2231 events.extend(additional_events);
2232 }
2233
2234 if self.notify.unbounded_send(ScanState::Updating).is_err() {
2235 break;
2236 }
2237
2238 if !self.process_events(events).await {
2239 break;
2240 }
2241
2242 if self.notify.unbounded_send(ScanState::Idle).is_err() {
2243 break;
2244 }
2245 }
2246 }
2247
2248 async fn scan_dirs(&mut self) -> Result<()> {
2249 let root_char_bag;
2250 let root_abs_path;
2251 let root_inode;
2252 let is_dir;
2253 let next_entry_id;
2254 {
2255 let snapshot = self.snapshot.lock();
2256 root_char_bag = snapshot.root_char_bag;
2257 root_abs_path = snapshot.abs_path.clone();
2258 root_inode = snapshot.root_entry().map(|e| e.inode);
2259 is_dir = snapshot.root_entry().map_or(false, |e| e.is_dir());
2260 next_entry_id = snapshot.next_entry_id.clone();
2261 };
2262
2263 // Populate ignores above the root.
2264 for ancestor in root_abs_path.ancestors().skip(1) {
2265 if let Ok(ignore) = build_gitignore(&ancestor.join(&*GITIGNORE), self.fs.as_ref()).await
2266 {
2267 self.snapshot
2268 .lock()
2269 .ignores_by_parent_abs_path
2270 .insert(ancestor.into(), (ignore.into(), 0));
2271 }
2272 }
2273
2274 let ignore_stack = {
2275 let mut snapshot = self.snapshot.lock();
2276 let ignore_stack = snapshot.ignore_stack_for_abs_path(&root_abs_path, true);
2277 if ignore_stack.is_all() {
2278 if let Some(mut root_entry) = snapshot.root_entry().cloned() {
2279 root_entry.is_ignored = true;
2280 snapshot.insert_entry(root_entry, self.fs.as_ref());
2281 }
2282 }
2283 ignore_stack
2284 };
2285
2286 if is_dir {
2287 let path: Arc<Path> = Arc::from(Path::new(""));
2288 let mut ancestor_inodes = TreeSet::default();
2289 if let Some(root_inode) = root_inode {
2290 ancestor_inodes.insert(root_inode);
2291 }
2292
2293 let (tx, rx) = channel::unbounded();
2294 self.executor
2295 .block(tx.send(ScanJob {
2296 abs_path: root_abs_path.to_path_buf(),
2297 path,
2298 ignore_stack,
2299 ancestor_inodes,
2300 scan_queue: tx.clone(),
2301 }))
2302 .unwrap();
2303 drop(tx);
2304
2305 self.executor
2306 .scoped(|scope| {
2307 for _ in 0..self.executor.num_cpus() {
2308 scope.spawn(async {
2309 while let Ok(job) = rx.recv().await {
2310 if let Err(err) = self
2311 .scan_dir(root_char_bag, next_entry_id.clone(), &job)
2312 .await
2313 {
2314 log::error!("error scanning {:?}: {}", job.abs_path, err);
2315 }
2316 }
2317 });
2318 }
2319 })
2320 .await;
2321 }
2322
2323 Ok(())
2324 }
2325
2326 async fn scan_dir(
2327 &self,
2328 root_char_bag: CharBag,
2329 next_entry_id: Arc<AtomicUsize>,
2330 job: &ScanJob,
2331 ) -> Result<()> {
2332 let mut new_entries: Vec<Entry> = Vec::new();
2333 let mut new_jobs: Vec<ScanJob> = Vec::new();
2334 let mut ignore_stack = job.ignore_stack.clone();
2335 let mut new_ignore = None;
2336
2337 let mut child_paths = self.fs.read_dir(&job.abs_path).await?;
2338 while let Some(child_abs_path) = child_paths.next().await {
2339 let child_abs_path = match child_abs_path {
2340 Ok(child_abs_path) => child_abs_path,
2341 Err(error) => {
2342 log::error!("error processing entry {:?}", error);
2343 continue;
2344 }
2345 };
2346 let child_name = child_abs_path.file_name().unwrap();
2347 let child_path: Arc<Path> = job.path.join(child_name).into();
2348 let child_metadata = match self.fs.metadata(&child_abs_path).await {
2349 Ok(Some(metadata)) => metadata,
2350 Ok(None) => continue,
2351 Err(err) => {
2352 log::error!("error processing {:?}: {:?}", child_abs_path, err);
2353 continue;
2354 }
2355 };
2356
2357 // If we find a .gitignore, add it to the stack of ignores used to determine which paths are ignored
2358 if child_name == *GITIGNORE {
2359 match build_gitignore(&child_abs_path, self.fs.as_ref()).await {
2360 Ok(ignore) => {
2361 let ignore = Arc::new(ignore);
2362 ignore_stack =
2363 ignore_stack.append(job.abs_path.as_path().into(), ignore.clone());
2364 new_ignore = Some(ignore);
2365 }
2366 Err(error) => {
2367 log::error!(
2368 "error loading .gitignore file {:?} - {:?}",
2369 child_name,
2370 error
2371 );
2372 }
2373 }
2374
2375 // Update ignore status of any child entries we've already processed to reflect the
2376 // ignore file in the current directory. Because `.gitignore` starts with a `.`,
2377 // there should rarely be too numerous. Update the ignore stack associated with any
2378 // new jobs as well.
2379 let mut new_jobs = new_jobs.iter_mut();
2380 for entry in &mut new_entries {
2381 let entry_abs_path = self.abs_path().join(&entry.path);
2382 entry.is_ignored =
2383 ignore_stack.is_abs_path_ignored(&entry_abs_path, entry.is_dir());
2384 if entry.is_dir() {
2385 new_jobs.next().unwrap().ignore_stack = if entry.is_ignored {
2386 IgnoreStack::all()
2387 } else {
2388 ignore_stack.clone()
2389 };
2390 }
2391 }
2392 }
2393
2394 let mut child_entry = Entry::new(
2395 child_path.clone(),
2396 &child_metadata,
2397 &next_entry_id,
2398 root_char_bag,
2399 );
2400
2401 if child_entry.is_dir() {
2402 let is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, true);
2403 child_entry.is_ignored = is_ignored;
2404
2405 if !job.ancestor_inodes.contains(&child_entry.inode) {
2406 let mut ancestor_inodes = job.ancestor_inodes.clone();
2407 ancestor_inodes.insert(child_entry.inode);
2408 new_jobs.push(ScanJob {
2409 abs_path: child_abs_path,
2410 path: child_path,
2411 ignore_stack: if is_ignored {
2412 IgnoreStack::all()
2413 } else {
2414 ignore_stack.clone()
2415 },
2416 ancestor_inodes,
2417 scan_queue: job.scan_queue.clone(),
2418 });
2419 }
2420 } else {
2421 child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, false);
2422 }
2423
2424 new_entries.push(child_entry);
2425 }
2426
2427 self.snapshot.lock().populate_dir(
2428 job.path.clone(),
2429 new_entries,
2430 new_ignore,
2431 self.fs.as_ref(),
2432 );
2433 for new_job in new_jobs {
2434 job.scan_queue.send(new_job).await.unwrap();
2435 }
2436
2437 Ok(())
2438 }
2439
2440 async fn process_events(&mut self, mut events: Vec<fsevent::Event>) -> bool {
2441 events.sort_unstable_by(|a, b| a.path.cmp(&b.path));
2442 events.dedup_by(|a, b| a.path.starts_with(&b.path));
2443
2444 let root_char_bag;
2445 let root_abs_path;
2446 let next_entry_id;
2447 {
2448 let snapshot = self.snapshot.lock();
2449 root_char_bag = snapshot.root_char_bag;
2450 root_abs_path = snapshot.abs_path.clone();
2451 next_entry_id = snapshot.next_entry_id.clone();
2452 }
2453
2454 let root_canonical_path = if let Ok(path) = self.fs.canonicalize(&root_abs_path).await {
2455 path
2456 } else {
2457 return false;
2458 };
2459 let metadata = futures::future::join_all(
2460 events
2461 .iter()
2462 .map(|event| self.fs.metadata(&event.path))
2463 .collect::<Vec<_>>(),
2464 )
2465 .await;
2466
2467 // Hold the snapshot lock while clearing and re-inserting the root entries
2468 // for each event. This way, the snapshot is not observable to the foreground
2469 // thread while this operation is in-progress.
2470 let (scan_queue_tx, scan_queue_rx) = channel::unbounded();
2471 {
2472 let mut snapshot = self.snapshot.lock();
2473 snapshot.scan_id += 1;
2474 for event in &events {
2475 if let Ok(path) = event.path.strip_prefix(&root_canonical_path) {
2476 snapshot.remove_path(path);
2477 }
2478 }
2479
2480 for (event, metadata) in events.into_iter().zip(metadata.into_iter()) {
2481 let path: Arc<Path> = match event.path.strip_prefix(&root_canonical_path) {
2482 Ok(path) => Arc::from(path.to_path_buf()),
2483 Err(_) => {
2484 log::error!(
2485 "unexpected event {:?} for root path {:?}",
2486 event.path,
2487 root_canonical_path
2488 );
2489 continue;
2490 }
2491 };
2492 let abs_path = root_abs_path.join(&path);
2493
2494 match metadata {
2495 Ok(Some(metadata)) => {
2496 let ignore_stack =
2497 snapshot.ignore_stack_for_abs_path(&abs_path, metadata.is_dir);
2498 let mut fs_entry = Entry::new(
2499 path.clone(),
2500 &metadata,
2501 snapshot.next_entry_id.as_ref(),
2502 snapshot.root_char_bag,
2503 );
2504 fs_entry.is_ignored = ignore_stack.is_all();
2505 snapshot.insert_entry(fs_entry, self.fs.as_ref());
2506
2507 let scan_id = snapshot.scan_id;
2508 if let Some(repo) = snapshot.in_dot_git(&path) {
2509 repo.repo.lock().reload_index();
2510 repo.scan_id = scan_id;
2511 }
2512
2513 let mut ancestor_inodes = snapshot.ancestor_inodes_for_path(&path);
2514 if metadata.is_dir && !ancestor_inodes.contains(&metadata.inode) {
2515 ancestor_inodes.insert(metadata.inode);
2516 self.executor
2517 .block(scan_queue_tx.send(ScanJob {
2518 abs_path,
2519 path,
2520 ignore_stack,
2521 ancestor_inodes,
2522 scan_queue: scan_queue_tx.clone(),
2523 }))
2524 .unwrap();
2525 }
2526 }
2527 Ok(None) => {}
2528 Err(err) => {
2529 // TODO - create a special 'error' entry in the entries tree to mark this
2530 log::error!("error reading file on event {:?}", err);
2531 }
2532 }
2533 }
2534 drop(scan_queue_tx);
2535 }
2536
2537 // Scan any directories that were created as part of this event batch.
2538 self.executor
2539 .scoped(|scope| {
2540 for _ in 0..self.executor.num_cpus() {
2541 scope.spawn(async {
2542 while let Ok(job) = scan_queue_rx.recv().await {
2543 if let Err(err) = self
2544 .scan_dir(root_char_bag, next_entry_id.clone(), &job)
2545 .await
2546 {
2547 log::error!("error scanning {:?}: {}", job.abs_path, err);
2548 }
2549 }
2550 });
2551 }
2552 })
2553 .await;
2554
2555 // Attempt to detect renames only over a single batch of file-system events.
2556 self.snapshot.lock().removed_entry_ids.clear();
2557
2558 self.update_ignore_statuses().await;
2559 self.update_git_repositories();
2560 true
2561 }
2562
2563 async fn update_ignore_statuses(&self) {
2564 let mut snapshot = self.snapshot();
2565
2566 let mut ignores_to_update = Vec::new();
2567 let mut ignores_to_delete = Vec::new();
2568 for (parent_abs_path, (_, scan_id)) in &snapshot.ignores_by_parent_abs_path {
2569 if let Ok(parent_path) = parent_abs_path.strip_prefix(&snapshot.abs_path) {
2570 if *scan_id == snapshot.scan_id && snapshot.entry_for_path(parent_path).is_some() {
2571 ignores_to_update.push(parent_abs_path.clone());
2572 }
2573
2574 let ignore_path = parent_path.join(&*GITIGNORE);
2575 if snapshot.entry_for_path(ignore_path).is_none() {
2576 ignores_to_delete.push(parent_abs_path.clone());
2577 }
2578 }
2579 }
2580
2581 for parent_abs_path in ignores_to_delete {
2582 snapshot.ignores_by_parent_abs_path.remove(&parent_abs_path);
2583 self.snapshot
2584 .lock()
2585 .ignores_by_parent_abs_path
2586 .remove(&parent_abs_path);
2587 }
2588
2589 let (ignore_queue_tx, ignore_queue_rx) = channel::unbounded();
2590 ignores_to_update.sort_unstable();
2591 let mut ignores_to_update = ignores_to_update.into_iter().peekable();
2592 while let Some(parent_abs_path) = ignores_to_update.next() {
2593 while ignores_to_update
2594 .peek()
2595 .map_or(false, |p| p.starts_with(&parent_abs_path))
2596 {
2597 ignores_to_update.next().unwrap();
2598 }
2599
2600 let ignore_stack = snapshot.ignore_stack_for_abs_path(&parent_abs_path, true);
2601 ignore_queue_tx
2602 .send(UpdateIgnoreStatusJob {
2603 abs_path: parent_abs_path,
2604 ignore_stack,
2605 ignore_queue: ignore_queue_tx.clone(),
2606 })
2607 .await
2608 .unwrap();
2609 }
2610 drop(ignore_queue_tx);
2611
2612 self.executor
2613 .scoped(|scope| {
2614 for _ in 0..self.executor.num_cpus() {
2615 scope.spawn(async {
2616 while let Ok(job) = ignore_queue_rx.recv().await {
2617 self.update_ignore_status(job, &snapshot).await;
2618 }
2619 });
2620 }
2621 })
2622 .await;
2623 }
2624
2625 fn update_git_repositories(&self) {
2626 let mut snapshot = self.snapshot.lock();
2627 let mut git_repositories = mem::take(&mut snapshot.git_repositories);
2628 git_repositories.retain(|repo| snapshot.entry_for_path(&repo.git_dir_path).is_some());
2629 snapshot.git_repositories = git_repositories;
2630 }
2631
2632 async fn update_ignore_status(&self, job: UpdateIgnoreStatusJob, snapshot: &LocalSnapshot) {
2633 let mut ignore_stack = job.ignore_stack;
2634 if let Some((ignore, _)) = snapshot.ignores_by_parent_abs_path.get(&job.abs_path) {
2635 ignore_stack = ignore_stack.append(job.abs_path.clone(), ignore.clone());
2636 }
2637
2638 let mut entries_by_id_edits = Vec::new();
2639 let mut entries_by_path_edits = Vec::new();
2640 let path = job.abs_path.strip_prefix(&snapshot.abs_path).unwrap();
2641 for mut entry in snapshot.child_entries(path).cloned() {
2642 let was_ignored = entry.is_ignored;
2643 let abs_path = self.abs_path().join(&entry.path);
2644 entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, entry.is_dir());
2645 if entry.is_dir() {
2646 let child_ignore_stack = if entry.is_ignored {
2647 IgnoreStack::all()
2648 } else {
2649 ignore_stack.clone()
2650 };
2651 job.ignore_queue
2652 .send(UpdateIgnoreStatusJob {
2653 abs_path: abs_path.into(),
2654 ignore_stack: child_ignore_stack,
2655 ignore_queue: job.ignore_queue.clone(),
2656 })
2657 .await
2658 .unwrap();
2659 }
2660
2661 if entry.is_ignored != was_ignored {
2662 let mut path_entry = snapshot.entries_by_id.get(&entry.id, &()).unwrap().clone();
2663 path_entry.scan_id = snapshot.scan_id;
2664 path_entry.is_ignored = entry.is_ignored;
2665 entries_by_id_edits.push(Edit::Insert(path_entry));
2666 entries_by_path_edits.push(Edit::Insert(entry));
2667 }
2668 }
2669
2670 let mut snapshot = self.snapshot.lock();
2671 snapshot.entries_by_path.edit(entries_by_path_edits, &());
2672 snapshot.entries_by_id.edit(entries_by_id_edits, &());
2673 }
2674}
2675
2676fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag {
2677 let mut result = root_char_bag;
2678 result.extend(
2679 path.to_string_lossy()
2680 .chars()
2681 .map(|c| c.to_ascii_lowercase()),
2682 );
2683 result
2684}
2685
2686struct ScanJob {
2687 abs_path: PathBuf,
2688 path: Arc<Path>,
2689 ignore_stack: Arc<IgnoreStack>,
2690 scan_queue: Sender<ScanJob>,
2691 ancestor_inodes: TreeSet<u64>,
2692}
2693
2694struct UpdateIgnoreStatusJob {
2695 abs_path: Arc<Path>,
2696 ignore_stack: Arc<IgnoreStack>,
2697 ignore_queue: Sender<UpdateIgnoreStatusJob>,
2698}
2699
2700pub trait WorktreeHandle {
2701 #[cfg(any(test, feature = "test-support"))]
2702 fn flush_fs_events<'a>(
2703 &self,
2704 cx: &'a gpui::TestAppContext,
2705 ) -> futures::future::LocalBoxFuture<'a, ()>;
2706}
2707
2708impl WorktreeHandle for ModelHandle<Worktree> {
2709 // When the worktree's FS event stream sometimes delivers "redundant" events for FS changes that
2710 // occurred before the worktree was constructed. These events can cause the worktree to perfrom
2711 // extra directory scans, and emit extra scan-state notifications.
2712 //
2713 // This function mutates the worktree's directory and waits for those mutations to be picked up,
2714 // to ensure that all redundant FS events have already been processed.
2715 #[cfg(any(test, feature = "test-support"))]
2716 fn flush_fs_events<'a>(
2717 &self,
2718 cx: &'a gpui::TestAppContext,
2719 ) -> futures::future::LocalBoxFuture<'a, ()> {
2720 use smol::future::FutureExt;
2721
2722 let filename = "fs-event-sentinel";
2723 let tree = self.clone();
2724 let (fs, root_path) = self.read_with(cx, |tree, _| {
2725 let tree = tree.as_local().unwrap();
2726 (tree.fs.clone(), tree.abs_path().clone())
2727 });
2728
2729 async move {
2730 fs.create_file(&root_path.join(filename), Default::default())
2731 .await
2732 .unwrap();
2733 tree.condition(cx, |tree, _| tree.entry_for_path(filename).is_some())
2734 .await;
2735
2736 fs.remove_file(&root_path.join(filename), Default::default())
2737 .await
2738 .unwrap();
2739 tree.condition(cx, |tree, _| tree.entry_for_path(filename).is_none())
2740 .await;
2741
2742 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
2743 .await;
2744 }
2745 .boxed_local()
2746 }
2747}
2748
2749#[derive(Clone, Debug)]
2750struct TraversalProgress<'a> {
2751 max_path: &'a Path,
2752 count: usize,
2753 visible_count: usize,
2754 file_count: usize,
2755 visible_file_count: usize,
2756}
2757
2758impl<'a> TraversalProgress<'a> {
2759 fn count(&self, include_dirs: bool, include_ignored: bool) -> usize {
2760 match (include_ignored, include_dirs) {
2761 (true, true) => self.count,
2762 (true, false) => self.file_count,
2763 (false, true) => self.visible_count,
2764 (false, false) => self.visible_file_count,
2765 }
2766 }
2767}
2768
2769impl<'a> sum_tree::Dimension<'a, EntrySummary> for TraversalProgress<'a> {
2770 fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) {
2771 self.max_path = summary.max_path.as_ref();
2772 self.count += summary.count;
2773 self.visible_count += summary.visible_count;
2774 self.file_count += summary.file_count;
2775 self.visible_file_count += summary.visible_file_count;
2776 }
2777}
2778
2779impl<'a> Default for TraversalProgress<'a> {
2780 fn default() -> Self {
2781 Self {
2782 max_path: Path::new(""),
2783 count: 0,
2784 visible_count: 0,
2785 file_count: 0,
2786 visible_file_count: 0,
2787 }
2788 }
2789}
2790
2791pub struct Traversal<'a> {
2792 cursor: sum_tree::Cursor<'a, Entry, TraversalProgress<'a>>,
2793 include_ignored: bool,
2794 include_dirs: bool,
2795}
2796
2797impl<'a> Traversal<'a> {
2798 pub fn advance(&mut self) -> bool {
2799 self.advance_to_offset(self.offset() + 1)
2800 }
2801
2802 pub fn advance_to_offset(&mut self, offset: usize) -> bool {
2803 self.cursor.seek_forward(
2804 &TraversalTarget::Count {
2805 count: offset,
2806 include_dirs: self.include_dirs,
2807 include_ignored: self.include_ignored,
2808 },
2809 Bias::Right,
2810 &(),
2811 )
2812 }
2813
2814 pub fn advance_to_sibling(&mut self) -> bool {
2815 while let Some(entry) = self.cursor.item() {
2816 self.cursor.seek_forward(
2817 &TraversalTarget::PathSuccessor(&entry.path),
2818 Bias::Left,
2819 &(),
2820 );
2821 if let Some(entry) = self.cursor.item() {
2822 if (self.include_dirs || !entry.is_dir())
2823 && (self.include_ignored || !entry.is_ignored)
2824 {
2825 return true;
2826 }
2827 }
2828 }
2829 false
2830 }
2831
2832 pub fn entry(&self) -> Option<&'a Entry> {
2833 self.cursor.item()
2834 }
2835
2836 pub fn offset(&self) -> usize {
2837 self.cursor
2838 .start()
2839 .count(self.include_dirs, self.include_ignored)
2840 }
2841}
2842
2843impl<'a> Iterator for Traversal<'a> {
2844 type Item = &'a Entry;
2845
2846 fn next(&mut self) -> Option<Self::Item> {
2847 if let Some(item) = self.entry() {
2848 self.advance();
2849 Some(item)
2850 } else {
2851 None
2852 }
2853 }
2854}
2855
2856#[derive(Debug)]
2857enum TraversalTarget<'a> {
2858 Path(&'a Path),
2859 PathSuccessor(&'a Path),
2860 Count {
2861 count: usize,
2862 include_ignored: bool,
2863 include_dirs: bool,
2864 },
2865}
2866
2867impl<'a, 'b> SeekTarget<'a, EntrySummary, TraversalProgress<'a>> for TraversalTarget<'b> {
2868 fn cmp(&self, cursor_location: &TraversalProgress<'a>, _: &()) -> Ordering {
2869 match self {
2870 TraversalTarget::Path(path) => path.cmp(&cursor_location.max_path),
2871 TraversalTarget::PathSuccessor(path) => {
2872 if !cursor_location.max_path.starts_with(path) {
2873 Ordering::Equal
2874 } else {
2875 Ordering::Greater
2876 }
2877 }
2878 TraversalTarget::Count {
2879 count,
2880 include_dirs,
2881 include_ignored,
2882 } => Ord::cmp(
2883 count,
2884 &cursor_location.count(*include_dirs, *include_ignored),
2885 ),
2886 }
2887 }
2888}
2889
2890struct ChildEntriesIter<'a> {
2891 parent_path: &'a Path,
2892 traversal: Traversal<'a>,
2893}
2894
2895impl<'a> Iterator for ChildEntriesIter<'a> {
2896 type Item = &'a Entry;
2897
2898 fn next(&mut self) -> Option<Self::Item> {
2899 if let Some(item) = self.traversal.entry() {
2900 if item.path.starts_with(&self.parent_path) {
2901 self.traversal.advance_to_sibling();
2902 return Some(item);
2903 }
2904 }
2905 None
2906 }
2907}
2908
2909impl<'a> From<&'a Entry> for proto::Entry {
2910 fn from(entry: &'a Entry) -> Self {
2911 Self {
2912 id: entry.id.to_proto(),
2913 is_dir: entry.is_dir(),
2914 path: entry.path.as_os_str().as_bytes().to_vec(),
2915 inode: entry.inode,
2916 mtime: Some(entry.mtime.into()),
2917 is_symlink: entry.is_symlink,
2918 is_ignored: entry.is_ignored,
2919 }
2920 }
2921}
2922
2923impl<'a> TryFrom<(&'a CharBag, proto::Entry)> for Entry {
2924 type Error = anyhow::Error;
2925
2926 fn try_from((root_char_bag, entry): (&'a CharBag, proto::Entry)) -> Result<Self> {
2927 if let Some(mtime) = entry.mtime {
2928 let kind = if entry.is_dir {
2929 EntryKind::Dir
2930 } else {
2931 let mut char_bag = *root_char_bag;
2932 char_bag.extend(
2933 String::from_utf8_lossy(&entry.path)
2934 .chars()
2935 .map(|c| c.to_ascii_lowercase()),
2936 );
2937 EntryKind::File(char_bag)
2938 };
2939 let path: Arc<Path> = PathBuf::from(OsString::from_vec(entry.path)).into();
2940 Ok(Entry {
2941 id: ProjectEntryId::from_proto(entry.id),
2942 kind,
2943 path,
2944 inode: entry.inode,
2945 mtime: mtime.into(),
2946 is_symlink: entry.is_symlink,
2947 is_ignored: entry.is_ignored,
2948 })
2949 } else {
2950 Err(anyhow!(
2951 "missing mtime in remote worktree entry {:?}",
2952 entry.path
2953 ))
2954 }
2955 }
2956}
2957
2958async fn send_worktree_update(client: &Arc<Client>, update: proto::UpdateWorktree) -> Result<()> {
2959 #[cfg(any(test, feature = "test-support"))]
2960 const MAX_CHUNK_SIZE: usize = 2;
2961 #[cfg(not(any(test, feature = "test-support")))]
2962 const MAX_CHUNK_SIZE: usize = 256;
2963
2964 for update in proto::split_worktree_update(update, MAX_CHUNK_SIZE) {
2965 client.request(update).await?;
2966 }
2967
2968 Ok(())
2969}
2970
2971#[cfg(test)]
2972mod tests {
2973 use super::*;
2974 use anyhow::Result;
2975 use client::test::FakeHttpClient;
2976 use fs::repository::FakeGitRepository;
2977 use fs::{FakeFs, RealFs};
2978 use gpui::{executor::Deterministic, TestAppContext};
2979 use rand::prelude::*;
2980 use serde_json::json;
2981 use std::{
2982 env,
2983 fmt::Write,
2984 time::{SystemTime, UNIX_EPOCH},
2985 };
2986
2987 use util::test::temp_tree;
2988
2989 #[gpui::test]
2990 async fn test_traversal(cx: &mut TestAppContext) {
2991 let fs = FakeFs::new(cx.background());
2992 fs.insert_tree(
2993 "/root",
2994 json!({
2995 ".gitignore": "a/b\n",
2996 "a": {
2997 "b": "",
2998 "c": "",
2999 }
3000 }),
3001 )
3002 .await;
3003
3004 let http_client = FakeHttpClient::with_404_response();
3005 let client = cx.read(|cx| Client::new(http_client, cx));
3006
3007 let tree = Worktree::local(
3008 client,
3009 Arc::from(Path::new("/root")),
3010 true,
3011 fs,
3012 Default::default(),
3013 &mut cx.to_async(),
3014 )
3015 .await
3016 .unwrap();
3017 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3018 .await;
3019
3020 tree.read_with(cx, |tree, _| {
3021 assert_eq!(
3022 tree.entries(false)
3023 .map(|entry| entry.path.as_ref())
3024 .collect::<Vec<_>>(),
3025 vec![
3026 Path::new(""),
3027 Path::new(".gitignore"),
3028 Path::new("a"),
3029 Path::new("a/c"),
3030 ]
3031 );
3032 assert_eq!(
3033 tree.entries(true)
3034 .map(|entry| entry.path.as_ref())
3035 .collect::<Vec<_>>(),
3036 vec![
3037 Path::new(""),
3038 Path::new(".gitignore"),
3039 Path::new("a"),
3040 Path::new("a/b"),
3041 Path::new("a/c"),
3042 ]
3043 );
3044 })
3045 }
3046
3047 #[gpui::test(iterations = 10)]
3048 async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
3049 let fs = FakeFs::new(cx.background());
3050 fs.insert_tree(
3051 "/root",
3052 json!({
3053 "lib": {
3054 "a": {
3055 "a.txt": ""
3056 },
3057 "b": {
3058 "b.txt": ""
3059 }
3060 }
3061 }),
3062 )
3063 .await;
3064 fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
3065 fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
3066
3067 let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
3068 let tree = Worktree::local(
3069 client,
3070 Arc::from(Path::new("/root")),
3071 true,
3072 fs.clone(),
3073 Default::default(),
3074 &mut cx.to_async(),
3075 )
3076 .await
3077 .unwrap();
3078
3079 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3080 .await;
3081
3082 tree.read_with(cx, |tree, _| {
3083 assert_eq!(
3084 tree.entries(false)
3085 .map(|entry| entry.path.as_ref())
3086 .collect::<Vec<_>>(),
3087 vec![
3088 Path::new(""),
3089 Path::new("lib"),
3090 Path::new("lib/a"),
3091 Path::new("lib/a/a.txt"),
3092 Path::new("lib/a/lib"),
3093 Path::new("lib/b"),
3094 Path::new("lib/b/b.txt"),
3095 Path::new("lib/b/lib"),
3096 ]
3097 );
3098 });
3099
3100 fs.rename(
3101 Path::new("/root/lib/a/lib"),
3102 Path::new("/root/lib/a/lib-2"),
3103 Default::default(),
3104 )
3105 .await
3106 .unwrap();
3107 executor.run_until_parked();
3108 tree.read_with(cx, |tree, _| {
3109 assert_eq!(
3110 tree.entries(false)
3111 .map(|entry| entry.path.as_ref())
3112 .collect::<Vec<_>>(),
3113 vec![
3114 Path::new(""),
3115 Path::new("lib"),
3116 Path::new("lib/a"),
3117 Path::new("lib/a/a.txt"),
3118 Path::new("lib/a/lib-2"),
3119 Path::new("lib/b"),
3120 Path::new("lib/b/b.txt"),
3121 Path::new("lib/b/lib"),
3122 ]
3123 );
3124 });
3125 }
3126
3127 #[gpui::test]
3128 async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
3129 let parent_dir = temp_tree(json!({
3130 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
3131 "tree": {
3132 ".git": {},
3133 ".gitignore": "ignored-dir\n",
3134 "tracked-dir": {
3135 "tracked-file1": "",
3136 "ancestor-ignored-file1": "",
3137 },
3138 "ignored-dir": {
3139 "ignored-file1": ""
3140 }
3141 }
3142 }));
3143 let dir = parent_dir.path().join("tree");
3144
3145 let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
3146
3147 let tree = Worktree::local(
3148 client,
3149 dir.as_path(),
3150 true,
3151 Arc::new(RealFs),
3152 Default::default(),
3153 &mut cx.to_async(),
3154 )
3155 .await
3156 .unwrap();
3157 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3158 .await;
3159 tree.flush_fs_events(cx).await;
3160 cx.read(|cx| {
3161 let tree = tree.read(cx);
3162 assert!(
3163 !tree
3164 .entry_for_path("tracked-dir/tracked-file1")
3165 .unwrap()
3166 .is_ignored
3167 );
3168 assert!(
3169 tree.entry_for_path("tracked-dir/ancestor-ignored-file1")
3170 .unwrap()
3171 .is_ignored
3172 );
3173 assert!(
3174 tree.entry_for_path("ignored-dir/ignored-file1")
3175 .unwrap()
3176 .is_ignored
3177 );
3178 });
3179
3180 std::fs::write(dir.join("tracked-dir/tracked-file2"), "").unwrap();
3181 std::fs::write(dir.join("tracked-dir/ancestor-ignored-file2"), "").unwrap();
3182 std::fs::write(dir.join("ignored-dir/ignored-file2"), "").unwrap();
3183 tree.flush_fs_events(cx).await;
3184 cx.read(|cx| {
3185 let tree = tree.read(cx);
3186 assert!(
3187 !tree
3188 .entry_for_path("tracked-dir/tracked-file2")
3189 .unwrap()
3190 .is_ignored
3191 );
3192 assert!(
3193 tree.entry_for_path("tracked-dir/ancestor-ignored-file2")
3194 .unwrap()
3195 .is_ignored
3196 );
3197 assert!(
3198 tree.entry_for_path("ignored-dir/ignored-file2")
3199 .unwrap()
3200 .is_ignored
3201 );
3202 assert!(tree.entry_for_path(".git").unwrap().is_ignored);
3203 });
3204 }
3205
3206 #[gpui::test]
3207 async fn test_git_repository_for_path(cx: &mut TestAppContext) {
3208 let root = temp_tree(json!({
3209 "dir1": {
3210 ".git": {},
3211 "deps": {
3212 "dep1": {
3213 ".git": {},
3214 "src": {
3215 "a.txt": ""
3216 }
3217 }
3218 },
3219 "src": {
3220 "b.txt": ""
3221 }
3222 },
3223 "c.txt": "",
3224
3225 }));
3226
3227 let http_client = FakeHttpClient::with_404_response();
3228 let client = cx.read(|cx| Client::new(http_client, cx));
3229 let tree = Worktree::local(
3230 client,
3231 root.path(),
3232 true,
3233 Arc::new(RealFs),
3234 Default::default(),
3235 &mut cx.to_async(),
3236 )
3237 .await
3238 .unwrap();
3239
3240 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3241 .await;
3242 tree.flush_fs_events(cx).await;
3243
3244 tree.read_with(cx, |tree, _cx| {
3245 let tree = tree.as_local().unwrap();
3246
3247 assert!(tree.repo_for("c.txt".as_ref()).is_none());
3248
3249 let repo = tree.repo_for("dir1/src/b.txt".as_ref()).unwrap();
3250 assert_eq!(repo.content_path.as_ref(), Path::new("dir1"));
3251 assert_eq!(repo.git_dir_path.as_ref(), Path::new("dir1/.git"));
3252
3253 let repo = tree.repo_for("dir1/deps/dep1/src/a.txt".as_ref()).unwrap();
3254 assert_eq!(repo.content_path.as_ref(), Path::new("dir1/deps/dep1"));
3255 assert_eq!(repo.git_dir_path.as_ref(), Path::new("dir1/deps/dep1/.git"),);
3256 });
3257
3258 let original_scan_id = tree.read_with(cx, |tree, _cx| {
3259 let tree = tree.as_local().unwrap();
3260 tree.repo_for("dir1/src/b.txt".as_ref()).unwrap().scan_id
3261 });
3262
3263 std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
3264 tree.flush_fs_events(cx).await;
3265
3266 tree.read_with(cx, |tree, _cx| {
3267 let tree = tree.as_local().unwrap();
3268 let new_scan_id = tree.repo_for("dir1/src/b.txt".as_ref()).unwrap().scan_id;
3269 assert_ne!(
3270 original_scan_id, new_scan_id,
3271 "original {original_scan_id}, new {new_scan_id}"
3272 );
3273 });
3274
3275 std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
3276 tree.flush_fs_events(cx).await;
3277
3278 tree.read_with(cx, |tree, _cx| {
3279 let tree = tree.as_local().unwrap();
3280
3281 assert!(tree.repo_for("dir1/src/b.txt".as_ref()).is_none());
3282 });
3283 }
3284
3285 #[test]
3286 fn test_changed_repos() {
3287 fn fake_entry(git_dir_path: impl AsRef<Path>, scan_id: usize) -> GitRepositoryEntry {
3288 GitRepositoryEntry {
3289 repo: Arc::new(Mutex::new(FakeGitRepository::default())),
3290 scan_id,
3291 content_path: git_dir_path.as_ref().parent().unwrap().into(),
3292 git_dir_path: git_dir_path.as_ref().into(),
3293 }
3294 }
3295
3296 let prev_repos: Vec<GitRepositoryEntry> = vec![
3297 fake_entry("/.git", 0),
3298 fake_entry("/a/.git", 0),
3299 fake_entry("/a/b/.git", 0),
3300 ];
3301
3302 let new_repos: Vec<GitRepositoryEntry> = vec![
3303 fake_entry("/a/.git", 1),
3304 fake_entry("/a/b/.git", 0),
3305 fake_entry("/a/c/.git", 0),
3306 ];
3307
3308 let res = LocalWorktree::changed_repos(&prev_repos, &new_repos);
3309
3310 // Deletion retained
3311 assert!(res
3312 .iter()
3313 .find(|repo| repo.git_dir_path.as_ref() == Path::new("/.git") && repo.scan_id == 0)
3314 .is_some());
3315
3316 // Update retained
3317 assert!(res
3318 .iter()
3319 .find(|repo| repo.git_dir_path.as_ref() == Path::new("/a/.git") && repo.scan_id == 1)
3320 .is_some());
3321
3322 // Addition retained
3323 assert!(res
3324 .iter()
3325 .find(|repo| repo.git_dir_path.as_ref() == Path::new("/a/c/.git") && repo.scan_id == 0)
3326 .is_some());
3327
3328 // Nochange, not retained
3329 assert!(res
3330 .iter()
3331 .find(|repo| repo.git_dir_path.as_ref() == Path::new("/a/b/.git") && repo.scan_id == 0)
3332 .is_none());
3333 }
3334
3335 #[gpui::test]
3336 async fn test_write_file(cx: &mut TestAppContext) {
3337 let dir = temp_tree(json!({
3338 ".git": {},
3339 ".gitignore": "ignored-dir\n",
3340 "tracked-dir": {},
3341 "ignored-dir": {}
3342 }));
3343
3344 let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
3345
3346 let tree = Worktree::local(
3347 client,
3348 dir.path(),
3349 true,
3350 Arc::new(RealFs),
3351 Default::default(),
3352 &mut cx.to_async(),
3353 )
3354 .await
3355 .unwrap();
3356 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3357 .await;
3358 tree.flush_fs_events(cx).await;
3359
3360 tree.update(cx, |tree, cx| {
3361 tree.as_local().unwrap().write_file(
3362 Path::new("tracked-dir/file.txt"),
3363 "hello".into(),
3364 Default::default(),
3365 cx,
3366 )
3367 })
3368 .await
3369 .unwrap();
3370 tree.update(cx, |tree, cx| {
3371 tree.as_local().unwrap().write_file(
3372 Path::new("ignored-dir/file.txt"),
3373 "world".into(),
3374 Default::default(),
3375 cx,
3376 )
3377 })
3378 .await
3379 .unwrap();
3380
3381 tree.read_with(cx, |tree, _| {
3382 let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
3383 let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
3384 assert!(!tracked.is_ignored);
3385 assert!(ignored.is_ignored);
3386 });
3387 }
3388
3389 #[gpui::test(iterations = 30)]
3390 async fn test_create_directory(cx: &mut TestAppContext) {
3391 let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
3392
3393 let fs = FakeFs::new(cx.background());
3394 fs.insert_tree(
3395 "/a",
3396 json!({
3397 "b": {},
3398 "c": {},
3399 "d": {},
3400 }),
3401 )
3402 .await;
3403
3404 let tree = Worktree::local(
3405 client,
3406 "/a".as_ref(),
3407 true,
3408 fs,
3409 Default::default(),
3410 &mut cx.to_async(),
3411 )
3412 .await
3413 .unwrap();
3414
3415 let entry = tree
3416 .update(cx, |tree, cx| {
3417 tree.as_local_mut()
3418 .unwrap()
3419 .create_entry("a/e".as_ref(), true, cx)
3420 })
3421 .await
3422 .unwrap();
3423 assert!(entry.is_dir());
3424
3425 cx.foreground().run_until_parked();
3426 tree.read_with(cx, |tree, _| {
3427 assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
3428 });
3429 }
3430
3431 #[gpui::test(iterations = 100)]
3432 fn test_random(mut rng: StdRng) {
3433 let operations = env::var("OPERATIONS")
3434 .map(|o| o.parse().unwrap())
3435 .unwrap_or(40);
3436 let initial_entries = env::var("INITIAL_ENTRIES")
3437 .map(|o| o.parse().unwrap())
3438 .unwrap_or(20);
3439
3440 let root_dir = tempdir::TempDir::new("worktree-test").unwrap();
3441 for _ in 0..initial_entries {
3442 randomly_mutate_tree(root_dir.path(), 1.0, &mut rng).unwrap();
3443 }
3444 log::info!("Generated initial tree");
3445
3446 let (notify_tx, _notify_rx) = mpsc::unbounded();
3447 let fs = Arc::new(RealFs);
3448 let next_entry_id = Arc::new(AtomicUsize::new(0));
3449 let mut initial_snapshot = LocalSnapshot {
3450 abs_path: root_dir.path().into(),
3451 removed_entry_ids: Default::default(),
3452 ignores_by_parent_abs_path: Default::default(),
3453 git_repositories: Default::default(),
3454 next_entry_id: next_entry_id.clone(),
3455 snapshot: Snapshot {
3456 id: WorktreeId::from_usize(0),
3457 entries_by_path: Default::default(),
3458 entries_by_id: Default::default(),
3459 root_name: Default::default(),
3460 root_char_bag: Default::default(),
3461 scan_id: 0,
3462 is_complete: true,
3463 },
3464 extension_counts: Default::default(),
3465 };
3466 initial_snapshot.insert_entry(
3467 Entry::new(
3468 Path::new("").into(),
3469 &smol::block_on(fs.metadata(root_dir.path()))
3470 .unwrap()
3471 .unwrap(),
3472 &next_entry_id,
3473 Default::default(),
3474 ),
3475 fs.as_ref(),
3476 );
3477 let mut scanner = BackgroundScanner::new(
3478 Arc::new(Mutex::new(initial_snapshot.clone())),
3479 notify_tx,
3480 fs.clone(),
3481 Arc::new(gpui::executor::Background::new()),
3482 );
3483 smol::block_on(scanner.scan_dirs()).unwrap();
3484 scanner.snapshot().check_invariants();
3485
3486 let mut events = Vec::new();
3487 let mut snapshots = Vec::new();
3488 let mut mutations_len = operations;
3489 while mutations_len > 1 {
3490 if !events.is_empty() && rng.gen_bool(0.4) {
3491 let len = rng.gen_range(0..=events.len());
3492 let to_deliver = events.drain(0..len).collect::<Vec<_>>();
3493 log::info!("Delivering events: {:#?}", to_deliver);
3494 smol::block_on(scanner.process_events(to_deliver));
3495 scanner.snapshot().check_invariants();
3496 } else {
3497 events.extend(randomly_mutate_tree(root_dir.path(), 0.6, &mut rng).unwrap());
3498 mutations_len -= 1;
3499 }
3500
3501 if rng.gen_bool(0.2) {
3502 snapshots.push(scanner.snapshot());
3503 }
3504 }
3505 log::info!("Quiescing: {:#?}", events);
3506 smol::block_on(scanner.process_events(events));
3507 scanner.snapshot().check_invariants();
3508
3509 let (notify_tx, _notify_rx) = mpsc::unbounded();
3510 let mut new_scanner = BackgroundScanner::new(
3511 Arc::new(Mutex::new(initial_snapshot)),
3512 notify_tx,
3513 scanner.fs.clone(),
3514 scanner.executor.clone(),
3515 );
3516 smol::block_on(new_scanner.scan_dirs()).unwrap();
3517 assert_eq!(
3518 scanner.snapshot().to_vec(true),
3519 new_scanner.snapshot().to_vec(true)
3520 );
3521
3522 for mut prev_snapshot in snapshots {
3523 let include_ignored = rng.gen::<bool>();
3524 if !include_ignored {
3525 let mut entries_by_path_edits = Vec::new();
3526 let mut entries_by_id_edits = Vec::new();
3527 for entry in prev_snapshot
3528 .entries_by_id
3529 .cursor::<()>()
3530 .filter(|e| e.is_ignored)
3531 {
3532 entries_by_path_edits.push(Edit::Remove(PathKey(entry.path.clone())));
3533 entries_by_id_edits.push(Edit::Remove(entry.id));
3534 }
3535
3536 prev_snapshot
3537 .entries_by_path
3538 .edit(entries_by_path_edits, &());
3539 prev_snapshot.entries_by_id.edit(entries_by_id_edits, &());
3540 }
3541
3542 let update = scanner
3543 .snapshot()
3544 .build_update(&prev_snapshot, 0, 0, include_ignored);
3545 prev_snapshot.apply_remote_update(update).unwrap();
3546 assert_eq!(
3547 prev_snapshot.to_vec(true),
3548 scanner.snapshot().to_vec(include_ignored)
3549 );
3550 }
3551 }
3552
3553 fn randomly_mutate_tree(
3554 root_path: &Path,
3555 insertion_probability: f64,
3556 rng: &mut impl Rng,
3557 ) -> Result<Vec<fsevent::Event>> {
3558 let root_path = root_path.canonicalize().unwrap();
3559 let (dirs, files) = read_dir_recursive(root_path.clone());
3560
3561 let mut events = Vec::new();
3562 let mut record_event = |path: PathBuf| {
3563 events.push(fsevent::Event {
3564 event_id: SystemTime::now()
3565 .duration_since(UNIX_EPOCH)
3566 .unwrap()
3567 .as_secs(),
3568 flags: fsevent::StreamFlags::empty(),
3569 path,
3570 });
3571 };
3572
3573 if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
3574 let path = dirs.choose(rng).unwrap();
3575 let new_path = path.join(gen_name(rng));
3576
3577 if rng.gen() {
3578 log::info!("Creating dir {:?}", new_path.strip_prefix(root_path)?);
3579 std::fs::create_dir(&new_path)?;
3580 } else {
3581 log::info!("Creating file {:?}", new_path.strip_prefix(root_path)?);
3582 std::fs::write(&new_path, "")?;
3583 }
3584 record_event(new_path);
3585 } else if rng.gen_bool(0.05) {
3586 let ignore_dir_path = dirs.choose(rng).unwrap();
3587 let ignore_path = ignore_dir_path.join(&*GITIGNORE);
3588
3589 let (subdirs, subfiles) = read_dir_recursive(ignore_dir_path.clone());
3590 let files_to_ignore = {
3591 let len = rng.gen_range(0..=subfiles.len());
3592 subfiles.choose_multiple(rng, len)
3593 };
3594 let dirs_to_ignore = {
3595 let len = rng.gen_range(0..subdirs.len());
3596 subdirs.choose_multiple(rng, len)
3597 };
3598
3599 let mut ignore_contents = String::new();
3600 for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
3601 writeln!(
3602 ignore_contents,
3603 "{}",
3604 path_to_ignore
3605 .strip_prefix(&ignore_dir_path)?
3606 .to_str()
3607 .unwrap()
3608 )
3609 .unwrap();
3610 }
3611 log::info!(
3612 "Creating {:?} with contents:\n{}",
3613 ignore_path.strip_prefix(&root_path)?,
3614 ignore_contents
3615 );
3616 std::fs::write(&ignore_path, ignore_contents).unwrap();
3617 record_event(ignore_path);
3618 } else {
3619 let old_path = {
3620 let file_path = files.choose(rng);
3621 let dir_path = dirs[1..].choose(rng);
3622 file_path.into_iter().chain(dir_path).choose(rng).unwrap()
3623 };
3624
3625 let is_rename = rng.gen();
3626 if is_rename {
3627 let new_path_parent = dirs
3628 .iter()
3629 .filter(|d| !d.starts_with(old_path))
3630 .choose(rng)
3631 .unwrap();
3632
3633 let overwrite_existing_dir =
3634 !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
3635 let new_path = if overwrite_existing_dir {
3636 std::fs::remove_dir_all(&new_path_parent).ok();
3637 new_path_parent.to_path_buf()
3638 } else {
3639 new_path_parent.join(gen_name(rng))
3640 };
3641
3642 log::info!(
3643 "Renaming {:?} to {}{:?}",
3644 old_path.strip_prefix(&root_path)?,
3645 if overwrite_existing_dir {
3646 "overwrite "
3647 } else {
3648 ""
3649 },
3650 new_path.strip_prefix(&root_path)?
3651 );
3652 std::fs::rename(&old_path, &new_path)?;
3653 record_event(old_path.clone());
3654 record_event(new_path);
3655 } else if old_path.is_dir() {
3656 let (dirs, files) = read_dir_recursive(old_path.clone());
3657
3658 log::info!("Deleting dir {:?}", old_path.strip_prefix(&root_path)?);
3659 std::fs::remove_dir_all(&old_path).unwrap();
3660 for file in files {
3661 record_event(file);
3662 }
3663 for dir in dirs {
3664 record_event(dir);
3665 }
3666 } else {
3667 log::info!("Deleting file {:?}", old_path.strip_prefix(&root_path)?);
3668 std::fs::remove_file(old_path).unwrap();
3669 record_event(old_path.clone());
3670 }
3671 }
3672
3673 Ok(events)
3674 }
3675
3676 fn read_dir_recursive(path: PathBuf) -> (Vec<PathBuf>, Vec<PathBuf>) {
3677 let child_entries = std::fs::read_dir(&path).unwrap();
3678 let mut dirs = vec![path];
3679 let mut files = Vec::new();
3680 for child_entry in child_entries {
3681 let child_path = child_entry.unwrap().path();
3682 if child_path.is_dir() {
3683 let (child_dirs, child_files) = read_dir_recursive(child_path);
3684 dirs.extend(child_dirs);
3685 files.extend(child_files);
3686 } else {
3687 files.push(child_path);
3688 }
3689 }
3690 (dirs, files)
3691 }
3692
3693 fn gen_name(rng: &mut impl Rng) -> String {
3694 (0..6)
3695 .map(|_| rng.sample(rand::distributions::Alphanumeric))
3696 .map(char::from)
3697 .collect()
3698 }
3699
3700 impl LocalSnapshot {
3701 fn check_invariants(&self) {
3702 let mut files = self.files(true, 0);
3703 let mut visible_files = self.files(false, 0);
3704 for entry in self.entries_by_path.cursor::<()>() {
3705 if entry.is_file() {
3706 assert_eq!(files.next().unwrap().inode, entry.inode);
3707 if !entry.is_ignored {
3708 assert_eq!(visible_files.next().unwrap().inode, entry.inode);
3709 }
3710 }
3711 }
3712 assert!(files.next().is_none());
3713 assert!(visible_files.next().is_none());
3714
3715 let mut bfs_paths = Vec::new();
3716 let mut stack = vec![Path::new("")];
3717 while let Some(path) = stack.pop() {
3718 bfs_paths.push(path);
3719 let ix = stack.len();
3720 for child_entry in self.child_entries(path) {
3721 stack.insert(ix, &child_entry.path);
3722 }
3723 }
3724
3725 let dfs_paths_via_iter = self
3726 .entries_by_path
3727 .cursor::<()>()
3728 .map(|e| e.path.as_ref())
3729 .collect::<Vec<_>>();
3730 assert_eq!(bfs_paths, dfs_paths_via_iter);
3731
3732 let dfs_paths_via_traversal = self
3733 .entries(true)
3734 .map(|e| e.path.as_ref())
3735 .collect::<Vec<_>>();
3736 assert_eq!(dfs_paths_via_traversal, dfs_paths_via_iter);
3737
3738 for ignore_parent_abs_path in self.ignores_by_parent_abs_path.keys() {
3739 let ignore_parent_path =
3740 ignore_parent_abs_path.strip_prefix(&self.abs_path).unwrap();
3741 assert!(self.entry_for_path(&ignore_parent_path).is_some());
3742 assert!(self
3743 .entry_for_path(ignore_parent_path.join(&*GITIGNORE))
3744 .is_some());
3745 }
3746
3747 // Ensure extension counts are correct.
3748 let mut expected_extension_counts = HashMap::default();
3749 for extension in self.entries(false).filter_map(|e| e.path.extension()) {
3750 *expected_extension_counts
3751 .entry(extension.into())
3752 .or_insert(0) += 1;
3753 }
3754 assert_eq!(self.extension_counts, expected_extension_counts);
3755 }
3756
3757 fn to_vec(&self, include_ignored: bool) -> Vec<(&Path, u64, bool)> {
3758 let mut paths = Vec::new();
3759 for entry in self.entries_by_path.cursor::<()>() {
3760 if include_ignored || !entry.is_ignored {
3761 paths.push((entry.path.as_ref(), entry.inode, entry.is_ignored));
3762 }
3763 }
3764 paths.sort_by(|a, b| a.0.cmp(b.0));
3765 paths
3766 }
3767 }
3768}